From c52f7a074c8ed107e784b21204086c16dde048d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=81nis=20Gailis?= Date: Thu, 27 Sep 2018 13:32:28 +0200 Subject: [PATCH] Make sure integer data arrays can be coregistered There was a numba typing issue in the 'gridtools' code we had incorporated, that would prevent the downsampling code from compiling if a non-float datatype is used. Fix borrowed from upstream. #Closes 770 --- CHANGES.md | 2 + cate/ops/resampling.py | 2 +- test/ops/test_coregistration.py | 68 +++++++++++++++++++++++++++++++++ 3 files changed, 71 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index d307caf10..af166e986 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,7 @@ ## Version 2.0.0.dev21 (in development) +* Make sure integer data variables can be coregistered [#770](https://github.com/CCI-Tools/cate/issues/770) + ## Version 2.0.0.dev20 * Changed order and names of new `data_frame_subset` operation inputs. diff --git a/cate/ops/resampling.py b/cate/ops/resampling.py index ac3af3d50..86663f5e4 100644 --- a/cate/ops/resampling.py +++ b/cate/ops/resampling.py @@ -529,7 +529,7 @@ def _downsample_2d(src, mask, use_mask, method, fill_value, mode_rank, out): else: out[out_y, out_x] = (wvv_sum * w_sum - wv_sum * wv_sum) / w_sum / w_sum if method == DS_STD: - out = np.sqrt(out) + out = np.sqrt(out).astype(out.dtype) else: raise ValueError('invalid downsampling method') diff --git a/test/ops/test_coregistration.py b/test/ops/test_coregistration.py index e25ca33e5..3247735e6 100644 --- a/test/ops/test_coregistration.py +++ b/test/ops/test_coregistration.py @@ -674,3 +674,71 @@ def test_2D(self): ds_coarse_resampled = coregister(ds_fine, ds_coarse) assert_almost_equal(ds_coarse_resampled['first'].values, slice_exp) + + def test_int_array(self): + """ + Test coregistration on integer arrays + """ + ds_fine = xr.Dataset({ + 'first': (['time', 'lat', 'lon'], np.array([np.eye(4, 8), np.eye(4, 8)], dtype='int32')), + 'second': (['time', 'lat', 'lon'], np.array([np.eye(4, 8), np.eye(4, 8,)])), + 'lat': np.linspace(-67.5, 67.5, 4), + 'lon': np.linspace(-157.5, 157.5, 8), + 'time': np.array([1, 2])}).chunk(chunks={'lat': 2, 'lon': 4}) + + ds_coarse = xr.Dataset({ + 'first': (['time', 'lat', 'lon'], np.array([np.eye(3, 6), np.eye(3, 6)], dtype='int32')), + 'second': (['time', 'lat', 'lon'], np.array([np.eye(3, 6), np.eye(3, 6)])), + 'lat': np.linspace(-60, 60, 3), + 'lon': np.linspace(-150, 150, 6), + 'time': np.array([1, 2])}).chunk(chunks={'lat': 3, 'lon': 3}) + + # Test that the coarse dataset has been resampled onto the grid + # of the finer dataset. + ds_coarse_resampled = coregister(ds_fine, ds_coarse, method_us='nearest') + + expected = xr.Dataset({ + 'first': (['time', 'lat', 'lon'], np.array([[[1, 1, 0, 0, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 0, 0, 0, 0]], + [[1, 1, 0, 0, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 0, 0, 0, 0]]])), + 'second': (['time', 'lat', 'lon'], np.array([[[1, 1, 0, 0, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 0, 0, 0, 0]], + [[1, 1, 0, 0, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 0, 0, 0, 0]]])), + 'lat': np.linspace(-67.5, 67.5, 4), + 'lon': np.linspace(-157.5, 157.5, 8), + 'time': np.array([1, 2])}) + assert_almost_equal(ds_coarse_resampled['first'].values, expected['first'].values) + + # Test that the fine dataset has been resampled (aggregated) + # onto the grid of the coarse dataset. + ds_fine_resampled = coregister(ds_coarse, ds_fine, method_ds='mode') + expected = xr.Dataset({ + 'first': (['time', 'lat', 'lon'], np.array([[[1, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0]], + + [[1, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0]]])), + 'second': (['time', 'lat', 'lon'], np.array([[[1, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0]], + + [[1, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0]]])), + 'lat': np.linspace(-60, 60, 3), + 'lon': np.linspace(-150, 150, 6), + 'time': np.array([1, 2])}) + + assert_almost_equal(ds_fine_resampled['first'].values, expected['first'].values)