Skip to content

Commit

Permalink
[REVIEW] Timesub tests (NVIDIA#365)
Browse files Browse the repository at this point in the history
* timesub tests

* addressed review comments

* TimeSub should be able to subtract negative interval

Co-authored-by: Raza Jafri <[email protected]>
  • Loading branch information
razajafri and razajafri authored Jul 17, 2020
1 parent 69fd5d5 commit b09775a
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 15 deletions.
14 changes: 12 additions & 2 deletions integration_tests/src/main/python/date_time_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,24 @@
# limitations under the License.

import pytest

from asserts import assert_gpu_and_cpu_are_equal_collect
from data_gen import *
from datetime import date, datetime, timedelta, timezone
from datetime import date, datetime, timezone
from marks import incompat
from pyspark.sql.types import *
import pyspark.sql.functions as f

# We only support literal intervals for TimeSub
vals = [(-584, 1563), (1943, 1101), (2693, 2167), (2729, 0), (44, 1534), (2635, 3319),
(1885, -2828), (0, 2463), (932, 2286), (0, 0)]
@pytest.mark.parametrize('data_gen', vals, ids=idfn)
def test_timesub(data_gen):
days, seconds = data_gen
assert_gpu_and_cpu_are_equal_collect(
# We are starting at year 0015 to make sure we don't go before year 0001 while doing TimeSub
lambda spark: unary_op_df(spark, TimestampGen(start=datetime(15, 1, 1, tzinfo=timezone.utc)), seed=1)
.selectExpr("a - (interval {} days {} seconds)".format(days, seconds)))

@pytest.mark.parametrize('data_gen', date_gens, ids=idfn)
def test_datediff(data_gen):
assert_gpu_and_cpu_are_equal_collect(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,4 @@ class TimeOperatorsSuite extends SparkQueryCompareTestSuite {
frame => frame.select(from_unixtime(col("dates"),"dd/LL/yy HH:mm:ss.SSSSSS"))
}

testSparkResultsAreEqual("Test timesub - 4000 seconds", epochDf) {
frame => frame.selectExpr("cast(dates as timestamp) - (interval 40000 seconds)")
}

testSparkResultsAreEqual("Test timesub - 4 day", epochDf) {
frame => frame.selectExpr("cast(dates as timestamp) - (interval 4 days)")
}

testSparkResultsAreEqual("Test timesub - 4 day 1000 seconds", epochDf) {
frame => frame.selectExpr("cast(dates as timestamp) - (interval 4 days 1000 seconds)")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -152,15 +152,17 @@ case class GpuTimeSub(
if (intvl.months != 0) {
throw new UnsupportedOperationException("Months aren't supported at the moment")
}
val usToSub = intvl.days * 24 * 60 * 60 * 1000 * 1000L + intvl.microseconds
if (usToSub > 0) {
val usToSub = intvl.days.toLong * 24 * 60 * 60 * 1000 * 1000 + intvl.microseconds
if (usToSub != 0) {
withResource(Scalar.fromLong(usToSub)) { us_s =>
withResource(l.getBase.castTo(DType.INT64)) { us =>
withResource(us.sub(us_s)) {longResult =>
GpuColumnVector.from(longResult.castTo(DType.TIMESTAMP_MICROSECONDS))
}
}
}
} else {
l.incRefCount()
}
case _ =>
throw new UnsupportedOperationException("GpuTimeSub takes column and interval as an " +
Expand Down

0 comments on commit b09775a

Please sign in to comment.