diff --git a/integration_tests/src/main/python/arithmetic_ops_test.py b/integration_tests/src/main/python/arithmetic_ops_test.py index e1d4f8c3c24..a674f1b2cfb 100644 --- a/integration_tests/src/main/python/arithmetic_ops_test.py +++ b/integration_tests/src/main/python/arithmetic_ops_test.py @@ -19,7 +19,8 @@ from marks import ignore_order, incompat, approximate_float, allow_non_gpu from pyspark.sql.types import * from pyspark.sql.types import IntegralType -from spark_session import with_cpu_session, with_gpu_session, with_spark_session, is_before_spark_320, is_before_spark_330, is_databricks91_or_later +from spark_session import with_cpu_session, with_gpu_session, with_spark_session, is_before_spark_320, \ + is_before_spark_330, is_databricks91_or_later, is_spark_330_or_later import pyspark.sql.functions as f from datetime import timedelta @@ -780,6 +781,7 @@ def _get_div_overflow_df(spark, expr): # Only run this test for Spark v3.2.0 and later to verify IntegralDivide will # throw exceptions for overflow when ANSI mode is enabled. @pytest.mark.skipif(is_before_spark_320(), reason='https://github.com/apache/spark/pull/32260') +@pytest.mark.skipif(is_spark_330_or_later(), reason='https://github.com/NVIDIA/spark-rapids/issues/5182') @pytest.mark.parametrize('expr', div_overflow_exprs) @pytest.mark.parametrize('ansi_enabled', ['false', 'true']) def test_div_overflow_exception_when_ansi(expr, ansi_enabled):