Skip to content

Commit

Permalink
addressed review comments
Browse files Browse the repository at this point in the history
Signed-off-by: Raza Jafri <[email protected]>
  • Loading branch information
razajafri committed Sep 3, 2020
1 parent 557df87 commit 6472e92
Showing 1 changed file with 16 additions and 12 deletions.
28 changes: 16 additions & 12 deletions integration_tests/src/main/python/cache_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@
from data_gen import *
from datetime import date
import pyspark.sql.functions as f
from spark_session import with_cpu_session, with_gpu_session
from spark_init_internal import spark_version
from spark_session import with_cpu_session, with_gpu_session, with_spark_session
from join_test import create_df
from generate_expr_test import four_op_df
from marks import incompat, allow_non_gpu, ignore_order
Expand Down Expand Up @@ -62,8 +61,9 @@ def test_passing_gpuExpr_as_Expr():
@pytest.mark.parametrize('join_type', ['Left', 'Right', 'Inner', 'LeftSemi', 'LeftAnti'], ids=idfn)
@ignore_order
def test_cache_join(data_gen, join_type):
if spark_version() == "3.0.0" and data_gen.data_type == BooleanType():
pytest.xfail("https://github.com/NVIDIA/spark-rapids/issues/350")
if with_spark_session(lambda spark : spark.sparkContext.version == "3.0.0") \
and data_gen.data_type == BooleanType():
pytest.xfail("https://issues.apache.org/jira/browse/SPARK-32672")

def do_join(spark):
left, right = create_df(spark, data_gen, 500, 500)
Expand All @@ -82,8 +82,9 @@ def do_join(spark):
@ignore_order
def test_cached_join_filter(data_gen, join_type):
data, filter = data_gen
if spark_version() == "3.0.0" and data.data_type == BooleanType():
pytest.xfail("https://github.com/NVIDIA/spark-rapids/issues/350")
if with_spark_session(lambda spark : spark.sparkContext.version == "3.0.0") \
and data.data_type == BooleanType():
pytest.xfail("https://issues.apache.org/jira/browse/SPARK-32672")

def do_join(spark):
left, right = create_df(spark, data, 500, 500)
Expand All @@ -97,8 +98,9 @@ def do_join(spark):
@pytest.mark.parametrize('join_type', ['Left', 'Right', 'Inner', 'LeftSemi', 'LeftAnti'], ids=idfn)
@ignore_order
def test_cache_broadcast_hash_join(data_gen, join_type):
if spark_version() == "3.0.0" and data_gen.data_type == BooleanType():
pytest.xfail("https://github.com/NVIDIA/spark-rapids/issues/350")
if with_spark_session(lambda spark : spark.sparkContext.version == "3.0.0") \
and data_gen.data_type == BooleanType():
pytest.xfail("https://issues.apache.org/jira/browse/SPARK-32672")

def do_join(spark):
left, right = create_df(spark, data_gen, 500, 500)
Expand All @@ -117,8 +119,9 @@ def do_join(spark):
@pytest.mark.parametrize('join_type', ['Left', 'Right', 'Inner', 'LeftSemi', 'LeftAnti'], ids=idfn)
@ignore_order
def test_cache_shuffled_hash_join(data_gen, join_type):
if spark_version() == "3.0.0" and data_gen.data_type == BooleanType():
pytest.xfail("https://github.com/NVIDIA/spark-rapids/issues/350")
if with_spark_session(lambda spark : spark.sparkContext.version == "3.0.0") \
and data_gen.data_type == BooleanType():
pytest.xfail("https://issues.apache.org/jira/browse/SPARK-32672")

def do_join(spark):
left, right = create_df(spark, data_gen, 50, 500)
Expand Down Expand Up @@ -152,8 +155,9 @@ def do_join(spark):
@pytest.mark.parametrize('data_gen', all_gen_restricting_dates, ids=idfn)
@allow_non_gpu('InMemoryTableScanExec', 'DataWritingCommandExec')
def test_cache_posexplode_makearray(spark_tmp_path, data_gen):
if spark_version() == "3.0.0" and data_gen.data_type == BooleanType():
pytest.xfail("https://github.com/NVIDIA/spark-rapids/issues/350")
if with_spark_session(lambda spark : spark.sparkContext.version == "3.0.0") \
and data_gen.data_type == BooleanType():
pytest.xfail("https://issues.apache.org/jira/browse/SPARK-32672")
data_path_cpu = spark_tmp_path + '/PARQUET_DATA_CPU'
data_path_gpu = spark_tmp_path + '/PARQUET_DATA_GPU'
def write_posExplode(data_path):
Expand Down

0 comments on commit 6472e92

Please sign in to comment.