Skip to content

Commit 990affd

Browse files
HyukjinKwonyaooqinn
andcommitted
[SPARK-44290][CONNECT][FOLLOW-UP] Skip flaky tests, and fix a typo in session UUID together
### What changes were proposed in this pull request? This PR is a followup of #41495 that skips a couple of flaky tests. In addition, this PR fixes a typo together. ### Why are the changes needed? To keep the tests green. In order to reenable the tests, it needs other fixes together that might refactor the whole test cases which takes a while. I will followup and fix them in SPARK-44348 ### Does this PR introduce _any_ user-facing change? No, the feature is not released to end users yet. ### How was this patch tested? Unittests skipped for now. Closes #41913 from HyukjinKwon/SPARK-44290-followup. Lead-authored-by: Hyukjin Kwon <[email protected]> Co-authored-by: Kent Yao <[email protected]> Signed-off-by: Hyukjin Kwon <[email protected]>
1 parent 81ef113 commit 990affd

File tree

2 files changed

+6
-2
lines changed

2 files changed

+6
-2
lines changed

core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String
157157

158158
// Create and start the worker
159159
val pb = new ProcessBuilder(Arrays.asList(pythonExec, "-m", workerModule))
160-
val sessionId = envVars.getOrElse("SPARK_CONNECT_SESSION_UUID", "deafult")
160+
val sessionId = envVars.getOrElse("SPARK_CONNECT_SESSION_UUID", "default")
161161
if (sessionId != "default") {
162162
pb.directory(new File(SparkFiles.getRootDirectory(), sessionId))
163163
}
@@ -214,7 +214,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String
214214
// Create and start the daemon
215215
val command = Arrays.asList(pythonExec, "-m", daemonModule)
216216
val pb = new ProcessBuilder(command)
217-
val sessionId = envVars.getOrElse("SPARK_CONNECT_SESSION_UUID", "deafult")
217+
val sessionId = envVars.getOrElse("SPARK_CONNECT_SESSION_UUID", "default")
218218
if (sessionId != "default") {
219219
pb.directory(new File(SparkFiles.getRootDirectory(), sessionId))
220220
}

python/pyspark/sql/tests/connect/client/test_artifact.py

+4
Original file line numberDiff line numberDiff line change
@@ -245,6 +245,7 @@ def func(x):
245245
spark_session.addArtifacts(pyfile_path, pyfile=True)
246246
self.assertEqual(spark_session.range(1).select(func("id")).first()[0], 10)
247247

248+
@unittest.skip("SPARK-44348: Reenable Session-based artifact test cases")
248249
def test_add_pyfile(self):
249250
self.check_add_pyfile(self.spark)
250251

@@ -272,6 +273,7 @@ def func(x):
272273
spark_session.addArtifacts(f"{package_path}.zip", pyfile=True)
273274
self.assertEqual(spark_session.range(1).select(func("id")).first()[0], 5)
274275

276+
@unittest.skip("SPARK-44348: Reenable Session-based artifact test cases")
275277
def test_add_zipped_package(self):
276278
self.check_add_zipped_package(self.spark)
277279

@@ -303,6 +305,7 @@ def func(x):
303305
spark_session.addArtifacts(f"{archive_path}.zip#my_files", archive=True)
304306
self.assertEqual(spark_session.range(1).select(func("id")).first()[0], "hello world!")
305307

308+
@unittest.skip("SPARK-44348: Reenable Session-based artifact test cases")
306309
def test_add_archive(self):
307310
self.check_add_archive(self.spark)
308311

@@ -328,6 +331,7 @@ def func(x):
328331
spark_session.addArtifacts(file_path, file=True)
329332
self.assertEqual(spark_session.range(1).select(func("id")).first()[0], "Hello world!!")
330333

334+
@unittest.skip("SPARK-44348: Reenable Session-based artifact test cases")
331335
def test_add_file(self):
332336
self.check_add_file(self.spark)
333337

0 commit comments

Comments
 (0)