From a380df0c5497771ecca73ed7f0b5a4977f281fa3 Mon Sep 17 00:00:00 2001 From: Raza Jafri Date: Mon, 30 Aug 2021 17:47:09 -0700 Subject: [PATCH] Revert "Temporarily disable cache test except for spark 3.1.1 (#3319)" This reverts commit 97baf1f4888d235d1c5dca3c0aad8dd63fa269d5. Signed-off-by: Raza Jafri --- jenkins/databricks/test.sh | 18 ++++++++---------- jenkins/spark-tests.sh | 8 ++------ 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/jenkins/databricks/test.sh b/jenkins/databricks/test.sh index 2e7e68e3d52..513d4f24484 100755 --- a/jenkins/databricks/test.sh +++ b/jenkins/databricks/test.sh @@ -64,12 +64,11 @@ if [ -d "$LOCAL_JAR_PATH" ]; then ## Run tests with jars in the LOCAL_JAR_PATH dir downloading from the denpedency repo LOCAL_JAR_PATH=$LOCAL_JAR_PATH bash $LOCAL_JAR_PATH/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE - # Temporarily only run on Spark 3.1.1 (https://github.com/NVIDIA/spark-rapids/issues/3311) ## Run cache tests - #if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then - # PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \ - # LOCAL_JAR_PATH=$LOCAL_JAR_PATH bash $LOCAL_JAR_PATH/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test - #fi + if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then + PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \ + LOCAL_JAR_PATH=$LOCAL_JAR_PATH bash $LOCAL_JAR_PATH/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test + fi ## Run cudf-udf tests CUDF_UDF_TEST_ARGS="$CUDF_UDF_TEST_ARGS --conf spark.executorEnv.PYTHONPATH=`ls $LOCAL_JAR_PATH/rapids-4-spark_*.jar | grep -v 'tests.jar'`" @@ -80,12 +79,11 @@ else ## Run tests with jars building from the spark-rapids source code bash /home/ubuntu/spark-rapids/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE - # Temporarily only run on Spark 3.1.1 (https://github.com/NVIDIA/spark-rapids/issues/3311) ## Run cache tests - #if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then - # PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \ - # bash /home/ubuntu/spark-rapids/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test - #fi + if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then + PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \ + bash /home/ubuntu/spark-rapids/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test + fi ## Run cudf-udf tests CUDF_UDF_TEST_ARGS="$CUDF_UDF_TEST_ARGS --conf spark.executorEnv.PYTHONPATH=`ls /home/ubuntu/spark-rapids/dist/target/rapids-4-spark_*.jar | grep -v 'tests.jar'`" diff --git a/jenkins/spark-tests.sh b/jenkins/spark-tests.sh index d59f6c87bd7..49857b358ff 100755 --- a/jenkins/spark-tests.sh +++ b/jenkins/spark-tests.sh @@ -66,9 +66,6 @@ IS_SPARK_311_OR_LATER=0 export SPARK_TASK_MAXFAILURES=1 [[ "$IS_SPARK_311_OR_LATER" -eq "0" ]] && SPARK_TASK_MAXFAILURES=4 -IS_SPARK_311=0 -[[ "$SPARK_VER" == "3.1.1" ]] && IS_SPARK_311=1 - export PATH="$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH" #stop and restart SPARK ETL @@ -174,9 +171,8 @@ else fi # cudf_udf_test run_test cudf_udf_test - -# Temporarily only run on Spark 3.1.1 (https://github.com/NVIDIA/spark-rapids/issues/3311) -if [[ "$IS_SPARK_311" -eq "1" ]]; then +# only run cache tests with our serializer in nightly test for Spark version >= 3.1.1 +if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then run_test cache_serializer fi