Skip to content

Commit

Permalink
Handle warnings from encoding time in CF
Browse files Browse the repository at this point in the history
  • Loading branch information
pnuu committed Dec 15, 2023
1 parent 8ea9e30 commit 612e927
Showing 1 changed file with 14 additions and 6 deletions.
20 changes: 14 additions & 6 deletions satpy/tests/writer_tests/test_cf.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def test_single_time_value(self):
attrs=dict(start_time=start_time,
end_time=end_time))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer="cf")
scn.save_datasets(filename=filename, writer="cf", encoding={"time": {"units": "seconds since 2018-01-01"}})
with xr.open_dataset(filename, decode_cf=True) as f:
np.testing.assert_array_equal(f["time"], scn["test-array"]["time"])
bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]")
Expand All @@ -264,7 +264,8 @@ def test_time_coordinate_on_a_swath(self):
coords={"time": ("y", times)},
attrs=dict(start_time=times[0], end_time=times[-1]))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer="cf", pretty=True)
scn.save_datasets(filename=filename, writer="cf", pretty=True,
encoding={"time": {"units": "seconds since 2018-01-01"}})
with xr.open_dataset(filename, decode_cf=True) as f:
np.testing.assert_array_equal(f["time"], scn["test-array"]["time"])

Expand All @@ -280,7 +281,11 @@ def test_bounds(self):
attrs=dict(start_time=start_time,
end_time=end_time))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer="cf")
with warnings.catch_warnings():
# The purpose is to use the default time encoding, silence the warning
warnings.filterwarnings("ignore", category=UserWarning,
message=r"Times can't be serialized faithfully to int64 with requested units")
scn.save_datasets(filename=filename, writer="cf")
# Check decoded time coordinates & bounds
with xr.open_dataset(filename, decode_cf=True) as f:
bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]")
Expand Down Expand Up @@ -319,7 +324,8 @@ def test_bounds_minimum(self):
attrs=dict(start_time=start_timeB,
end_time=end_timeB))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer="cf")
scn.save_datasets(filename=filename, writer="cf",
encoding={"time": {"units": "seconds since 2018-01-01"}})
with xr.open_dataset(filename, decode_cf=True) as f:
bounds_exp = np.array([[start_timeA, end_timeB]], dtype="datetime64[m]")
np.testing.assert_array_equal(f["time_bnds"], bounds_exp)
Expand All @@ -340,7 +346,8 @@ def test_bounds_missing_time_info(self):
dims=["x", "y", "time"],
coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]})
with TempFile() as filename:
scn.save_datasets(filename=filename, writer="cf")
scn.save_datasets(filename=filename, writer="cf",
encoding={"time": {"units": "seconds since 2018-01-01"}})
with xr.open_dataset(filename, decode_cf=True) as f:
bounds_exp = np.array([[start_timeA, end_timeA]], dtype="datetime64[m]")
np.testing.assert_array_equal(f["time_bnds"], bounds_exp)
Expand All @@ -357,7 +364,8 @@ def test_unlimited_dims_kwarg(self):
attrs=dict(start_time=start_time,
end_time=end_time))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer="cf", unlimited_dims=["time"])
scn.save_datasets(filename=filename, writer="cf", unlimited_dims=["time"],
encoding={"time": {"units": "seconds since 2018-01-01"}})
with xr.open_dataset(filename) as f:
assert set(f.encoding["unlimited_dims"]) == {"time"}

Expand Down

0 comments on commit 612e927

Please sign in to comment.