Skip to content

Commit

Permalink
Fixed failing tests
Browse files Browse the repository at this point in the history
  • Loading branch information
razajafri committed Jan 31, 2023
1 parent 518051a commit ae52717
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 4 deletions.
27 changes: 23 additions & 4 deletions integration_tests/src/main/python/arithmetic_ops_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,7 @@ def test_int_division_mixed(lhs, rhs):
'a DIV b'))

@pytest.mark.parametrize('data_gen', _arith_data_gens, ids=idfn)
@pytest.mark.skipif(not is_before_spark_340() or is_databricks113_or_later())
def test_mod(data_gen):
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
Expand All @@ -282,6 +283,20 @@ def test_mod(data_gen):
f.col('b') % f.lit(None).cast(data_type),
f.col('a') % f.col('b')))

# This test is only added because we are skipping test_mod for spark 3.4 and databricks 11.3 because of https://github.com/NVIDIA/spark-rapids/issues/7595
# Once that is resolved we should remove this test and not skip test_mod for spark 3.4 and db 11.3
@pytest.mark.parametrize('data_gen', _arith_data_gens, ids=idfn)
@pytest.mark.skipif(is_before_spark_340() and not is_databricks113_or_later())
def test_mod_db11_3(data_gen):
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).select(
f.col('a') % f.lit(100).cast(data_type),
f.lit(-12).cast(data_type) % f.col('b'),
f.lit(None).cast(data_type) % f.col('a'),
f.col('b') % f.lit(None).cast(data_type),
f.col('a') % f.col('b')))

# pmod currently falls back for Decimal(precision=38)
# https://github.com/NVIDIA/spark-rapids/issues/6336
_pmod_gens = numeric_gens + [ decimal_gen_32bit, decimal_gen_64bit, _decimal_gen_18_0, decimal_gen_128bit,
Expand Down Expand Up @@ -372,11 +387,15 @@ def test_mod_pmod_by_zero_not_ansi(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr(
'pmod(a, cast(0 as {}))'.format(string_type),
'pmod(cast(-12 as {}), cast(0 as {}))'.format(string_type, string_type),
'a % (cast(0 as {}))'.format(string_type),
'cast(-12 as {}) % cast(0 as {})'.format(string_type, string_type)),
'pmod(cast(-12 as {}), cast(0 as {}))'.format(string_type, string_type)),
{'spark.sql.ansi.enabled': 'false'})

# Skip decimal tests for mod on spark 3.4 and databricks 11.3, reason=https://github.com/NVIDIA/spark-rapids/issues/7595
if is_before_spark_340() or not is_databricks113_or_later():
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr(
'a % (cast(0 as {}))'.format(string_type),
'cast(-12 as {}) % cast(0 as {})'.format(string_type, string_type)),
{'spark.sql.ansi.enabled': 'false'})

mod_mixed_decimals_lhs = [DecimalGen(6, 5), DecimalGen(6, 4), DecimalGen(5, 4), DecimalGen(5, 3), DecimalGen(4, 2),
DecimalGen(3, -2), DecimalGen(16, 7), DecimalGen(19, 0), DecimalGen(30, 10)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,12 @@ object DecimalArithmeticOverrides {
GpuIntegralDivide(lhs, rhs)
}
}),

/**
* Because of https://github.com/NVIDIA/spark-rapids/issues/7595 we are not supporting Decimals for spark 3.4 and
* db 11.3. Once we do we should revert the changes made to the following tests test_mod, test_mod_mixed and test_mod_pmod_by_zero_not_ansi
* or we should just revert this commit
*/
expr[Remainder](
"Remainder or modulo",
ExprChecks.binaryProject(
Expand Down

0 comments on commit ae52717

Please sign in to comment.