From 0a7fa52dc06681a9ef8f1da6b36ed35ac2be79dc Mon Sep 17 00:00:00 2001 From: Raza Jafri Date: Sat, 11 Nov 2023 21:03:46 -0800 Subject: [PATCH] xfail approximate percentile test --- integration_tests/src/main/python/hash_aggregate_test.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/integration_tests/src/main/python/hash_aggregate_test.py b/integration_tests/src/main/python/hash_aggregate_test.py index a9300a51c79..ba8b8bf8f48 100644 --- a/integration_tests/src/main/python/hash_aggregate_test.py +++ b/integration_tests/src/main/python/hash_aggregate_test.py @@ -25,7 +25,7 @@ from pyspark.sql.types import * from marks import * import pyspark.sql.functions as f -from spark_session import is_databricks104_or_later, with_cpu_session, is_before_spark_330 +from spark_session import is_databricks104_or_later, with_cpu_session, is_before_spark_330, is_databricks_runtime, is_spark_340_or_later pytestmark = pytest.mark.nightly_resource_consuming_test @@ -1652,6 +1652,7 @@ def test_hash_groupby_approx_percentile_double_single(aqe_enabled): @ignore_order(local=True) @allow_non_gpu('TakeOrderedAndProjectExec', 'Alias', 'Cast', 'ObjectHashAggregateExec', 'AggregateExpression', 'ApproximatePercentile', 'Literal', 'ShuffleExchangeExec', 'HashPartitioning', 'CollectLimitExec') +@pytest.mark.xfail(condition=is_spark_340_or_later() and is_databricks_runtime(), reason="https://github.com/NVIDIA/spark-rapids/issues/9493") def test_hash_groupby_approx_percentile_partial_fallback_to_cpu(aqe_enabled): conf = { 'spark.rapids.sql.hashAgg.replaceMode': 'partial',