Skip to content

Commit

Permalink
[SPARK-48088][PYTHON][CONNECT][TESTS] Prepare backward compatibility …
Browse files Browse the repository at this point in the history
…test 4.0 <> above

### What changes were proposed in this pull request?

This PR forward ports #46334 to reduce conflicts.

### Why are the changes needed?

To reduce the conflict against branch-3.5, and prepare 4.0 <> above test.

### Does this PR introduce _any_ user-facing change?

No, dev-only.

### How was this patch tested?

CI in this PR should verify them.

### Was this patch authored or co-authored using generative AI tooling?

No.

Closes #46358 from HyukjinKwon/SPARK-48088-40.

Authored-by: Hyukjin Kwon <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
  • Loading branch information
HyukjinKwon authored and dongjoon-hyun committed May 3, 2024
1 parent aaf3995 commit cd789ac
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 7 deletions.
3 changes: 3 additions & 0 deletions python/pyspark/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -747,6 +747,9 @@ def is_remote_only() -> bool:
"""
global _is_remote_only

if "SPARK_SKIP_CONNECT_COMPAT_TESTS" in os.environ:
return True

if _is_remote_only is not None:
return _is_remote_only
try:
Expand Down
18 changes: 11 additions & 7 deletions python/run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,15 @@ def get_valid_filename(s):

# Find out where the assembly jars are located.
# TODO: revisit for Scala 2.13
for scala in ["2.13"]:
build_dir = os.path.join(SPARK_HOME, "assembly", "target", "scala-" + scala)
if os.path.isdir(build_dir):
SPARK_DIST_CLASSPATH = os.path.join(build_dir, "jars", "*")
break
else:
raise RuntimeError("Cannot find assembly build directory, please build Spark first.")
SPARK_DIST_CLASSPATH = ""
if "SPARK_SKIP_CONNECT_COMPAT_TESTS" not in os.environ:
for scala in ["2.13"]:
build_dir = os.path.join(SPARK_HOME, "assembly", "target", "scala-" + scala)
if os.path.isdir(build_dir):
SPARK_DIST_CLASSPATH = os.path.join(build_dir, "jars", "*")
break
else:
raise RuntimeError("Cannot find assembly build directory, please build Spark first.")


def run_individual_python_test(target_dir, test_name, pyspark_python, keep_test_output):
Expand Down Expand Up @@ -100,6 +102,8 @@ def run_individual_python_test(target_dir, test_name, pyspark_python, keep_test_

if "SPARK_CONNECT_TESTING_REMOTE" in os.environ:
env.update({"SPARK_CONNECT_TESTING_REMOTE": os.environ["SPARK_CONNECT_TESTING_REMOTE"]})
if "SPARK_SKIP_CONNECT_COMPAT_TESTS" in os.environ:
env.update({"SPARK_SKIP_JVM_REQUIRED_TESTS": os.environ["SPARK_SKIP_CONNECT_COMPAT_TESTS"]})

# Create a unique temp directory under 'target/' for each run. The TMPDIR variable is
# recognized by the tempfile module to override the default system temp directory.
Expand Down

0 comments on commit cd789ac

Please sign in to comment.