Skip to content

Commit

Permalink
Merge pull request #284 from amoodie/maintain
Browse files Browse the repository at this point in the history
Regular maintenance
  • Loading branch information
amoodie authored Feb 20, 2025
2 parents 17c2e67 + 456ca58 commit 6a6b9f1
Show file tree
Hide file tree
Showing 6 changed files with 308 additions and 287 deletions.
11 changes: 7 additions & 4 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: ['3.9', '3.10', '3.11']
python-version: ['3.11', '3.12', '3.13']

steps:
- name: Checkout code
Expand All @@ -31,6 +31,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools
pip install -r requirements-test.txt
pip install -r requirements.txt
- name: Install pyDeltaRCM
Expand All @@ -52,7 +53,7 @@ jobs:

env:
OS: ${{ matrix.os }}
PYTHON: '3.10'
PYTHON: '3.13'

steps:
- uses: actions/checkout@v4
Expand All @@ -63,6 +64,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools
pip install pytest pytest-cov pytest-mpl coveralls
pip install -r requirements.txt
- name: Install pyDeltaRCM
Expand Down Expand Up @@ -95,10 +97,11 @@ jobs:
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: '3.10'
python-version: '3.13'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools
pip install -r requirements.txt
pip install -r requirements-docs.txt
sudo apt update -y && sudo apt install -y latexmk texlive-latex-recommended texlive-latex-extra texlive-fonts-recommended dvipng
Expand All @@ -109,7 +112,7 @@ jobs:
run: |
(cd docs && make docs)
- name: Upload log file
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
if: ${{ failure() }}
with:
name: log-file
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.9'
python-version: '3.13'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
Expand Down
2 changes: 1 addition & 1 deletion pyDeltaRCM/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ def __version__() -> str:
Private version declaration, gets assigned to pyDeltaRCM.__version__
during import
"""
return "2.1.5"
return "2.1.6"
4 changes: 4 additions & 0 deletions pyDeltaRCM/shared_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,7 @@ def custom_unravel(i: int, shape: Tuple[int, int]) -> Tuple[int, int]:
from pyDeltaRCM.shared_tools import custom_unravel
.. doctest::
:skip:
>>> _shape = (100, 200) # e.g., delta.eta.shape
Expand Down Expand Up @@ -318,6 +319,7 @@ def custom_ravel(tup: Tuple[int, int], shape: Tuple[int, int]) -> int:
from pyDeltaRCM.shared_tools import custom_ravel
.. doctest::
:skip:
>>> _shape = (100, 200) # e.g., delta.eta.shape
Expand Down Expand Up @@ -368,6 +370,7 @@ def custom_pad(arr: np.ndarray) -> np.ndarray:
Consider a model domain of size `(4, 8)`
.. doctest::
:skip:
>>> arr = np.arange(32).reshape(4, 8)
Expand All @@ -382,6 +385,7 @@ def custom_pad(arr: np.ndarray) -> np.ndarray:
sliced:
.. doctest::
:skip:
>>> for i in range(4):
... for j in range(8):
Expand Down
130 changes: 69 additions & 61 deletions tests/integration/test_timing_triggers.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,9 @@


class TestTimingOutputData:

def test_update_make_record(self, tmp_path: Path) -> None:
# create a delta with default settings
p = utilities.yaml_from_dict(tmp_path, 'input.yaml',
{'save_checkpoint': True})
p = utilities.yaml_from_dict(tmp_path, "input.yaml", {"save_checkpoint": True})
_delta = DeltaModel(input_file=p)

# modify the save interval to be twice dt
Expand Down Expand Up @@ -89,7 +87,7 @@ def test_update_make_record(self, tmp_path: Path) -> None:
def test_update_saving_intervals_on_cycle(self, tmp_path: Path) -> None:
"""dt == 300; save_dt == 600"""
# create a delta with default settings
p = utilities.yaml_from_dict(tmp_path, 'input.yaml')
p = utilities.yaml_from_dict(tmp_path, "input.yaml")
_delta = DeltaModel(input_file=p)

# modify the timestep and save interval to be twice dt
Expand Down Expand Up @@ -141,7 +139,7 @@ def test_update_saving_intervals_on_cycle(self, tmp_path: Path) -> None:
def test_update_saving_intervals_short(self, tmp_path: Path) -> None:
"""dt == 300; save_dt == 100"""
# create a delta with default settings
p = utilities.yaml_from_dict(tmp_path, 'input.yaml')
p = utilities.yaml_from_dict(tmp_path, "input.yaml")
_delta = DeltaModel(input_file=p)

# modify the timestep and save interval to be twice dt
Expand Down Expand Up @@ -182,10 +180,12 @@ def test_update_saving_intervals_short(self, tmp_path: Path) -> None:
assert _delta.save_iter == 4 # once during init
assert _delta.time == 900

def test_update_saving_intervals_offset_long_not_double(self, tmp_path: Path) -> None:
def test_update_saving_intervals_offset_long_not_double(
self, tmp_path: Path
) -> None:
"""dt == 300; save_dt == 500"""
# create a delta with default settings
p = utilities.yaml_from_dict(tmp_path, 'input.yaml')
p = utilities.yaml_from_dict(tmp_path, "input.yaml")
_delta = DeltaModel(input_file=p)

# modify the timestep and save interval to be twice dt
Expand Down Expand Up @@ -223,10 +223,12 @@ def test_update_saving_intervals_offset_long_not_double(self, tmp_path: Path) ->
assert _delta.time == 3600
assert _delta.save_grids_and_figs.call_count == 6

def test_update_saving_intervals_offset_long_over_double(self, tmp_path: Path) -> None:
def test_update_saving_intervals_offset_long_over_double(
self, tmp_path: Path
) -> None:
"""dt == 300; save_dt == 1000"""
# create a delta with default settings
p = utilities.yaml_from_dict(tmp_path, 'input.yaml')
p = utilities.yaml_from_dict(tmp_path, "input.yaml")
_delta = DeltaModel(input_file=p)

# modify the timestep and save interval to be twice dt
Expand Down Expand Up @@ -280,7 +282,7 @@ def test_update_saving_intervals_offset_long_over_double(self, tmp_path: Path) -
assert _delta._is_finalized is False

def test_finalize_updated(self, tmp_path: Path) -> None:
p = utilities.yaml_from_dict(tmp_path, 'input.yaml')
p = utilities.yaml_from_dict(tmp_path, "input.yaml")
_delta = DeltaModel(input_file=p)

# mock the top-level
Expand Down Expand Up @@ -311,9 +313,9 @@ def test_finalize_updated(self, tmp_path: Path) -> None:
assert _delta._is_finalized is True

def test_save_one_fig_no_grids(self, tmp_path: Path) -> None:
p = utilities.yaml_from_dict(tmp_path, 'input.yaml',
{'save_dt': 1,
'save_eta_figs': True})
p = utilities.yaml_from_dict(
tmp_path, "input.yaml", {"save_dt": 1, "save_eta_figs": True}
)
_delta = DeltaModel(input_file=p)

# mock the timestep computations
Expand All @@ -324,8 +326,8 @@ def test_save_one_fig_no_grids(self, tmp_path: Path) -> None:
_delta.output_checkpoint = mock.MagicMock()

# check one set images created during init
img_glob = glob.glob(os.path.join(_delta.prefix, '*.png'))
nc_glob = glob.glob(os.path.join(_delta.prefix, '*.nc'))
img_glob = glob.glob(os.path.join(_delta.prefix, "*.png"))
nc_glob = glob.glob(os.path.join(_delta.prefix, "*.nc"))
assert len(img_glob) == 1
assert len(nc_glob) == 0

Expand All @@ -337,23 +339,23 @@ def test_save_one_fig_no_grids(self, tmp_path: Path) -> None:
_delta.solve_water_and_sediment_timestep.call_count == 2

# check for output eta files
exp_path_nc = os.path.join(
tmp_path / 'out_dir', 'pyDeltaRCM_output.nc')
exp_path_png0 = os.path.join(tmp_path / 'out_dir', 'eta_00000.png')
exp_path_png1 = os.path.join(tmp_path / 'out_dir', 'eta_00001.png')
exp_path_png2 = os.path.join(tmp_path / 'out_dir', 'eta_00002.png')
exp_path_png3 = os.path.join(tmp_path / 'out_dir', 'eta_00003.png')
exp_path_nc = os.path.join(tmp_path / "out_dir", "pyDeltaRCM_output.nc")
exp_path_png0 = os.path.join(tmp_path / "out_dir", "eta_00000.png")
exp_path_png1 = os.path.join(tmp_path / "out_dir", "eta_00001.png")
exp_path_png2 = os.path.join(tmp_path / "out_dir", "eta_00002.png")
exp_path_png3 = os.path.join(tmp_path / "out_dir", "eta_00003.png")
assert not os.path.isfile(exp_path_nc)
assert os.path.isfile(exp_path_png0)
assert os.path.isfile(exp_path_png1)
assert os.path.isfile(exp_path_png2)
assert not os.path.isfile(exp_path_png3)

def test_save_one_fig_one_grid(self, tmp_path: Path) -> None:
p = utilities.yaml_from_dict(tmp_path, 'input.yaml',
{'save_dt': 1,
'save_eta_grids': True,
'save_discharge_figs': True})
p = utilities.yaml_from_dict(
tmp_path,
"input.yaml",
{"save_dt": 1, "save_eta_grids": True, "save_discharge_figs": True},
)
_delta = DeltaModel(input_file=p)

# mock the timestep computations
Expand All @@ -363,8 +365,7 @@ def test_save_one_fig_one_grid(self, tmp_path: Path) -> None:
_delta.log_model_time = mock.MagicMock()
_delta.output_checkpoint = mock.MagicMock()

exp_path_nc = os.path.join(
tmp_path / 'out_dir', 'pyDeltaRCM_output.nc')
exp_path_nc = os.path.join(tmp_path / "out_dir", "pyDeltaRCM_output.nc")
assert os.path.isfile(exp_path_nc)
nc_size_before = os.path.getsize(exp_path_nc)
assert nc_size_before > 0
Expand All @@ -385,9 +386,9 @@ def test_save_one_fig_one_grid(self, tmp_path: Path) -> None:
assert nc_size_after > nc_size_before

def test_save_metadata_no_grids(self, tmp_path: Path) -> None:
p = utilities.yaml_from_dict(tmp_path, 'input.yaml',
{'save_dt': 1,
'save_metadata': True})
p = utilities.yaml_from_dict(
tmp_path, "input.yaml", {"save_dt": 1, "save_metadata": True}
)
_delta = DeltaModel(input_file=p)

# mock the timestep computations
Expand All @@ -397,8 +398,7 @@ def test_save_metadata_no_grids(self, tmp_path: Path) -> None:
_delta.log_model_time = mock.MagicMock()
_delta.output_checkpoint = mock.MagicMock()

exp_path_nc = os.path.join(
tmp_path / 'out_dir', 'pyDeltaRCM_output.nc')
exp_path_nc = os.path.join(tmp_path / "out_dir", "pyDeltaRCM_output.nc")
assert os.path.isfile(exp_path_nc)

for _ in range(0, 2):
Expand All @@ -410,17 +410,22 @@ def test_save_metadata_no_grids(self, tmp_path: Path) -> None:
_delta.finalize()

ds = netCDF4.Dataset(exp_path_nc, "r", format="NETCDF4")
assert not ('eta' in ds.variables)
assert ds['meta']['H_SL'].shape[0] == 3
assert ds['meta']['L0'][:] == 3
assert not ("eta" in ds.variables)
assert ds["meta"]["H_SL"].shape[0] == 3
assert ds["meta"]["L0"][:] == 3

def test_save_subsidence_metadata_no_grids(self, tmp_path: Path) -> None:
p = utilities.yaml_from_dict(tmp_path, 'input.yaml',
{'save_dt': 1,
'toggle_subsidence': True,
'start_subsidence': 0,
'subsidence_rate': 1,
'save_metadata': True})
p = utilities.yaml_from_dict(
tmp_path,
"input.yaml",
{
"save_dt": 1,
"toggle_subsidence": True,
"start_subsidence": 0,
"subsidence_rate": 1,
"save_metadata": True,
},
)
_delta = DeltaModel(input_file=p)

# mock the timestep computations
Expand All @@ -430,8 +435,7 @@ def test_save_subsidence_metadata_no_grids(self, tmp_path: Path) -> None:
_delta.log_model_time = mock.MagicMock()
_delta.output_checkpoint = mock.MagicMock()

exp_path_nc = os.path.join(
tmp_path / 'out_dir', 'pyDeltaRCM_output.nc')
exp_path_nc = os.path.join(tmp_path / "out_dir", "pyDeltaRCM_output.nc")
assert os.path.isfile(exp_path_nc)

for _ in range(0, 2):
Expand All @@ -443,19 +447,24 @@ def test_save_subsidence_metadata_no_grids(self, tmp_path: Path) -> None:
_delta.finalize()

ds = netCDF4.Dataset(exp_path_nc, "r", format="NETCDF4")
assert not ('eta' in ds.variables)
assert ds['meta']['H_SL'].shape[0] == 3
assert ds['meta']['L0'][:] == 3
assert ds['meta']['sigma'].shape == _delta.sigma.shape
assert np.all(ds['meta']['sigma'] == _delta.sigma)
assert ds['meta']['start_subsidence'][:] == 0
assert not ("eta" in ds.variables)
assert ds["meta"]["H_SL"].shape[0] == 3
assert ds["meta"]["L0"][:] == 3
assert ds["meta"]["sigma"].shape == _delta.sigma.shape
assert np.all(ds["meta"]["sigma"] == _delta.sigma)
assert ds["meta"]["start_subsidence"][:] == 0

def test_save_one_grid_metadata_by_default(self, tmp_path: Path) -> None:
p = utilities.yaml_from_dict(tmp_path, 'input.yaml',
{'save_dt': 1,
'save_metadata': False,
'save_eta_grids': True,
'C0_percent': 0.2})
p = utilities.yaml_from_dict(
tmp_path,
"input.yaml",
{
"save_dt": 1,
"save_metadata": False,
"save_eta_grids": True,
"C0_percent": 0.2,
},
)
_delta = DeltaModel(input_file=p)

# mock the timestep computations
Expand All @@ -465,8 +474,7 @@ def test_save_one_grid_metadata_by_default(self, tmp_path: Path) -> None:
_delta.log_model_time = mock.MagicMock()
_delta.output_checkpoint = mock.MagicMock()

exp_path_nc = os.path.join(
tmp_path / 'out_dir', 'pyDeltaRCM_output.nc')
exp_path_nc = os.path.join(tmp_path / "out_dir", "pyDeltaRCM_output.nc")
assert os.path.isfile(exp_path_nc)

for _ in range(0, 6):
Expand All @@ -477,10 +485,10 @@ def test_save_one_grid_metadata_by_default(self, tmp_path: Path) -> None:

_delta.finalize()
ds = netCDF4.Dataset(exp_path_nc, "r", format="NETCDF4")
_arr = ds.variables['eta']
_arr = ds.variables["eta"]
assert _arr.shape[1] == _delta.eta.shape[0]
assert _arr.shape[2] == _delta.eta.shape[1]
assert ('meta' in ds.groups) # if any grids, save meta too
assert ds.groups['meta']['H_SL'].shape[0] == _arr.shape[0]
assert np.all(ds.groups['meta']['C0_percent'][:] == 0.2)
assert np.all(ds.groups['meta']['f_bedload'][:] == 0.5)
assert "meta" in ds.groups # if any grids, save meta too
assert ds.groups["meta"]["H_SL"].shape[0] == _arr.shape[0]
assert np.all(ds.groups["meta"]["C0_percent"][:].data == 0.2)
assert np.all(ds.groups["meta"]["f_bedload"][:].data == 0.5)
Loading

0 comments on commit 6a6b9f1

Please sign in to comment.