-
Notifications
You must be signed in to change notification settings - Fork 77
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Implementation of data-processing-level attributes (#1001)
* Draft, rough, partial implementation of data-processing-level attribute insertion * Generalized add_processing_level to serve as decorator for class methods. Decorated echodata.update_platform * Finalize processing level decorator function, handle new wildcard-based level or sublevel propagation forms like L*A or L2*; other cleanups to the function * Add processing level decorator to remove_noise, apply_mask and compute_MVBS_index_binning * Added error checks and warnings (echopype logger.info) to add_processing_level * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Added integration test for processing levels functionality, testing multiple processing steps * Update echopype/tests/utils/test_processinglevels_integration.py * Rename sv_ds to Sv_ds in test, for clarity and accuracy --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Wu-Jung Lee <[email protected]>
- Loading branch information
1 parent
cb44dbc
commit e9a5d86
Showing
8 changed files
with
310 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
122 changes: 122 additions & 0 deletions
122
echopype/tests/utils/test_processinglevels_integration.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,122 @@ | ||
import pytest | ||
|
||
import numpy as np | ||
import xarray as xr | ||
import echopype as ep | ||
|
||
|
||
@pytest.mark.parametrize( | ||
["sonar_model", "path_model", "raw_and_xml_paths", "extras"], | ||
[ | ||
( | ||
"EK60", | ||
"EK60", | ||
("Winter2017-D20170115-T150122.raw", None), | ||
{}, | ||
), | ||
( | ||
"AZFP", | ||
"AZFP", | ||
("17082117.01A", "17041823.XML"), | ||
{"longitude": -60.0, "latitude": 45.0, "salinity": 27.9, "pressure": 59}, | ||
), | ||
], | ||
) | ||
def test_raw_to_mvbs( | ||
sonar_model, | ||
path_model, | ||
raw_and_xml_paths, | ||
extras, | ||
test_path | ||
): | ||
# Prepare the Sv dataset | ||
raw_path = test_path[path_model] / raw_and_xml_paths[0] | ||
if raw_and_xml_paths[1]: | ||
xml_path = test_path[path_model] / raw_and_xml_paths[1] | ||
else: | ||
xml_path = None | ||
|
||
def _presence_test(test_ds, processing_level): | ||
assert "processing_level" in test_ds.attrs | ||
assert "processing_level_url" in test_ds.attrs | ||
assert test_ds.attrs["processing_level"] == processing_level | ||
|
||
def _absence_test(test_ds): | ||
assert "processing_level" not in test_ds.attrs | ||
assert "processing_level_url" not in test_ds.attrs | ||
|
||
# ---- Convert raw file and update_platform | ||
ed = ep.open_raw(raw_path, xml_path=xml_path, sonar_model=sonar_model) | ||
if "longitude" in ed['Platform'].data_vars and "latitude" in ed['Platform'].data_vars: | ||
_presence_test(ed["Top-level"], "Level 1A") | ||
elif "longitude" in extras and "latitude" in extras: | ||
_absence_test(ed["Top-level"]) | ||
point_ds = xr.Dataset( | ||
{ | ||
"latitude": (["time"], np.array([float(extras["latitude"])])), | ||
"longitude": (["time"], np.array([float(extras["longitude"])])), | ||
}, | ||
coords={ | ||
"time": (["time"], np.array([ed["Sonar/Beam_group1"]["ping_time"].values.min()])) | ||
}, | ||
) | ||
ed.update_platform(point_ds) | ||
_presence_test(ed["Top-level"], "Level 1A") | ||
else: | ||
_absence_test(ed["Top-level"]) | ||
raise RuntimeError( | ||
"Platform latitude and longitude are not present and cannot be added " | ||
"using update_platform based on test raw file and included parameters." | ||
) | ||
|
||
# ---- Calibrate and add_latlon | ||
env_params = None | ||
if sonar_model == "AZFP": | ||
# AZFP data require external salinity and pressure | ||
env_params = { | ||
"temperature": ed["Environment"]["temperature"].values.mean(), | ||
"salinity": extras["salinity"], | ||
"pressure": extras["pressure"], | ||
} | ||
|
||
ds = ep.calibrate.compute_Sv(echodata=ed, env_params=env_params) | ||
_absence_test(ds) | ||
|
||
Sv_ds = ep.consolidate.add_location(ds=ds, echodata=ed) | ||
assert "longitude" in Sv_ds.data_vars and "latitude" in Sv_ds.data_vars | ||
_presence_test(Sv_ds, "Level 2A") | ||
|
||
# ---- Noise removal | ||
denoised_ds = ep.clean.remove_noise(Sv_ds, ping_num=10, range_sample_num=20) | ||
_presence_test(denoised_ds, "Level 2B") | ||
|
||
# ---- apply_mask based on frequency differencing | ||
def _freqdiff_applymask(test_ds): | ||
# frequency_differencing expects a dataarray variable named "Sv". For denoised Sv, | ||
# rename Sv to Sv_raw and Sv_corrected to Sv before passing ds to frequency_differencing | ||
if "Sv_corrected" in test_ds.data_vars: | ||
out_ds = test_ds.rename_vars(name_dict={"Sv": "Sv_raw", "Sv_corrected": "Sv"}) | ||
else: | ||
out_ds = test_ds | ||
freqAB = list(out_ds.frequency_nominal.values[:2]) | ||
freqdiff_da = ep.mask.frequency_differencing(source_Sv=out_ds, freqAB=freqAB, operator=">", diff=5) | ||
|
||
# Create a new, single-channel Sv variable in ds to pass to apply_mask. | ||
# The resulting masked Sv will be single-channel. | ||
out_ds["Sv_ch0"] = out_ds["Sv"].isel(channel=0).squeeze() | ||
return ep.mask.apply_mask(source_ds=out_ds, var_name="Sv_ch0", mask=freqdiff_da) | ||
|
||
# On Sv w/o noise removal | ||
ds = _freqdiff_applymask(Sv_ds) | ||
_presence_test(ds, "Level 3A") | ||
|
||
# On denoised Sv | ||
ds = _freqdiff_applymask(denoised_ds) | ||
_presence_test(ds, "Level 3B") | ||
|
||
# ---- Compute MVBS | ||
# compute_MVBS expects a variable named "Sv" | ||
# No product level is assigned because at present compute_MVBS drops the lat/lon data associated with the input Sv dataset | ||
ds = ds.rename_vars(name_dict={"Sv": "Sv_unmasked", "Sv_ch0": "Sv"}) | ||
mvbs_ds = ep.commongrid.compute_MVBS(ds, range_meter_bin=30, ping_time_bin='1min') | ||
_absence_test(mvbs_ds) |
Oops, something went wrong.