Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix LI L2 accumulated products 'with_area_definition': False 1-d coordinates computation #2804

Merged
merged 7 commits into from
Jun 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions satpy/readers/li_base_nc.py
Original file line number Diff line number Diff line change
Expand Up @@ -371,6 +371,9 @@ def inverse_projection(self, azimuth, elevation, proj_dict):
azimuth = azimuth.values * point_height
elevation = elevation.values * point_height

# In the MTG world, azimuth is defined as positive towards west, while proj expects it positive towards east
azimuth *= -1
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I was going to comment that in-place multiplication with Dask arrays is dangerous, but it seems the array is already computed (azimuth.values is accessed). Is that necessary for some further computations?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That self.inverse_projection method is called inside a da.map_blocks:

# Daskify inverse projection computation:
lon, lat = da.map_blocks(self.inverse_projection, azimuth, elevation, proj_dict,
chunks=(2, azimuth.shape[0]),
meta=np.array((), dtype=azimuth.dtype),
dtype=azimuth.dtype,
)

so believe it should remain daskified. IIRC, we needed to get the .values inside there as the following Proj call accepts only computed values.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would have thought da.map_blocks() would pass Numpy arrays to the method being used 🤔 Haven't used it in a while so might remember incorrectly. And if it works, then it must be correct because Numpy arrays wouldn't have .values attribute causing a crash 😅


lon, lat = projection(azimuth, elevation, inverse=True)

return np.stack([lon.astype(azimuth.dtype), lat.astype(elevation.dtype)])
Expand Down
2 changes: 1 addition & 1 deletion satpy/readers/li_l2_nc.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def get_area_def(self, dsid):
if var_with_swath_coord and self.with_area_def:
return get_area_def("mtg_fci_fdss_2km")

raise NotImplementedError("Area definition is not supported for accumulated products.")
raise NotImplementedError("Area definition is not supported for non-accumulated products.")

def is_var_with_swath_coord(self, dsid):
"""Check if the variable corresponding to this dataset is listed as variable with swath coordinates."""
Expand Down
16 changes: 10 additions & 6 deletions satpy/tests/reader_tests/_li_test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -522,20 +522,24 @@ def accumulation_dimensions(nacc, nobs):

def fci_grid_definition(axis, nobs):
"""FCI grid definition on X or Y axis."""
scale_factor = 5.58871526031607e-5
add_offset = -0.15561777642350116
if axis == "X":
long_name = "azimuth angle encoded as column"
standard_name = "projection_x_coordinate"
scale_factor *= -1
add_offset *= -1
else:
long_name = "zenith angle encoded as row"
standard_name = "projection_y_coordinate"

return {
"format": "i2",
"shape": ("pixels",),
"add_offset": -0.155619516,
"add_offset": add_offset,
"axis": axis,
"long_name": long_name,
"scale_factor": 5.58878e-5,
"scale_factor": scale_factor,
"standard_name": standard_name,
"units": "radian",
"valid_range": np.asarray([1, 5568]),
Expand All @@ -549,12 +553,12 @@ def mtg_geos_projection():
"format": "i4",
"shape": ("accumulations",),
"grid_mapping_name": "geostationary",
"inverse_flattening": 298.2572221,
"inverse_flattening": 298.257223563,
"latitude_of_projection_origin": 0,
"longitude_of_projection_origin": 0,
"perspective_point_height": 42164000,
"semi_major_axis": 6378169,
"semi_minor_axis": 6356583.8,
"perspective_point_height": 3.57864e7,
"semi_major_axis": 6378137.0,
"semi_minor_axis": 6356752.31424518,
"sweep_angle_axis": "y",
"long_name": "MTG geostationary projection",
"default_data": lambda: -2147483647
Expand Down
26 changes: 25 additions & 1 deletion satpy/tests/reader_tests/test_li_l2_nc.py
Original file line number Diff line number Diff line change
Expand Up @@ -592,7 +592,6 @@ def test_generate_coords_called_once(Self, filetype_infos):

def test_coords_generation(self, filetype_infos):
"""Compare daskified coords generation results with non-daskified."""
# Prepare dummy (but somewhat realistic) arrays of azimuth/elevation values.
products = ["li_l2_af_nc",
"li_l2_afr_nc",
"li_l2_afa_nc"]
Expand Down Expand Up @@ -627,6 +626,7 @@ def test_coords_generation(self, filetype_infos):
projection = Proj(proj_dict)
azimuth_vals = azimuth.values * point_height
elevation_vals = elevation.values * point_height
azimuth_vals *= -1
lon_ref, lat_ref = projection(azimuth_vals, elevation_vals, inverse=True)
# Convert to float32:
lon_ref = lon_ref.astype(np.float32)
Expand All @@ -640,6 +640,30 @@ def test_coords_generation(self, filetype_infos):
np.testing.assert_equal(lon, lon_ref)
np.testing.assert_equal(lat, lat_ref)

def test_coords_and_grid_consistency(self, filetype_infos):
"""Compare computed latlon coords for 1-d version with latlon from areadef as for the gridded version."""
handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"),
with_area_definition=True)

# Get cols/rows arrays from handler
x = handler.get_measured_variable(handler.swath_coordinates["azimuth"])
y = handler.get_measured_variable(handler.swath_coordinates["elevation"])
cols = x.astype(int) - 1
rows = (LI_GRID_SHAPE[0] - y.astype(int))

# compute lonlat from 1-d coords generation (called when with_area_definition==False)
handler.generate_coords_from_scan_angles()
lon = handler.internal_variables["longitude"].values
lat = handler.internal_variables["latitude"].values

# compute lonlat from 2-d areadef
dsid = make_dataid(name="flash_accumulation")
area_def = handler.get_area_def(dsid)
lon_areadef, lat_areadef = area_def.get_lonlat_from_array_coordinates(cols, rows)

np.testing.assert_allclose(lon, lon_areadef, rtol=1e-3)
np.testing.assert_allclose(lat, lat_areadef, rtol=1e-3)

def test_get_area_def_acc_products(self, filetype_infos):
"""Test retrieval of area def for accumulated products."""
handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"),
Expand Down
Loading