From 5c4daa7ee62826ad4214eb1e21a013b183902b71 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Tue, 13 Aug 2024 14:42:05 +0200 Subject: [PATCH 01/34] update to new xugrid version to allow aggregating hfbs --- pixi.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pixi.toml b/pixi.toml index 98b6f3b76..a1f41bfc2 100644 --- a/pixi.toml +++ b/pixi.toml @@ -107,7 +107,7 @@ tqdm = "*" twine = "*" vtk = { version = ">=9.0", build = "*qt*", channel = "conda-forge" } xarray = ">=2023.08.0" -xugrid = ">=0.10.0" +xugrid = ">=0.11.0" zarr = "*" build = "*" From 3fa3ece3e6a1f3fcf1850b23e572e0645d3c9957 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Tue, 13 Aug 2024 15:54:54 +0200 Subject: [PATCH 02/34] Update lockfile --- pixi.lock | 60 +++++++++++++++++++++++++++---------------------------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/pixi.lock b/pixi.lock index dc8d1e42e..37a608fd6 100644 --- a/pixi.lock +++ b/pixi.lock @@ -306,7 +306,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.102-h593d115_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py311h4bc866e_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.12.1-py311h4332511_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.4-py311h64a7726_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/occt-7.7.2-novtk_h130ccc2_101.conda @@ -468,7 +468,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 @@ -758,7 +758,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/nspr-4.35-hea0b92c_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.102-he7eb89d_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numba-0.60.0-py311h0e5bd6a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numcodecs-0.12.1-py311hbafa61a_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numpy-1.26.4-py311hc43a94b_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/occt-7.7.2-all_hd7ce604_201.conda @@ -898,7 +898,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-renderproto-0.11.1-h0d85af4_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-xextproto-7.3.0-hb7f2c08_1003.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-xproto-7.0.31-h35c211d_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/yaml-0.2.5-h0d85af4_2.tar.bz2 @@ -1180,7 +1180,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/nspr-4.35-hb7217d7_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/nss-3.102-hc42bcbf_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numba-0.60.0-py311h9506ed5_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numcodecs-0.12.1-py311hb9542d7_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numpy-1.26.4-py311h7125741_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/occt-7.7.2-novtk_h5f4376a_101.conda @@ -1321,7 +1321,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-renderproto-0.11.1-h27ca646_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-xextproto-7.3.0-h1a8c8d9_1003.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-xproto-7.0.31-h27ca646_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/yaml-0.2.5-h3422bc3_2.tar.bz2 @@ -1565,7 +1565,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/nh3-0.2.18-py311h9363f20_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/nlohmann_json-3.11.3-h1537add_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numba-0.60.0-py311h0673bce_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numcodecs-0.12.1-py311hda3d55a_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-1.26.4-py311h0b4df5a_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/occt-7.7.2-novtk_hdfb195f_101.conda @@ -1717,7 +1717,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-renderproto-0.11.1-hcd874cb_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-xextproto-7.3.0-hcd874cb_1003.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-xproto-7.0.31-hcd874cb_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2 @@ -2087,7 +2087,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.102-h593d115_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py311h4bc866e_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.12.1-py311h4332511_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.4-py311h64a7726_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/occt-7.7.2-novtk_h130ccc2_101.conda @@ -2284,7 +2284,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 @@ -2628,7 +2628,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/nspr-4.35-hea0b92c_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.102-he7eb89d_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numba-0.60.0-py311h0e5bd6a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numcodecs-0.12.1-py311hbafa61a_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numpy-1.26.4-py311hc43a94b_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/occt-7.7.2-all_hd7ce604_201.conda @@ -2805,7 +2805,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-renderproto-0.11.1-h0d85af4_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-xextproto-7.3.0-hb7f2c08_1003.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-xproto-7.0.31-h35c211d_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/yaml-0.2.5-h0d85af4_2.tar.bz2 @@ -3141,7 +3141,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/nspr-4.35-hb7217d7_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/nss-3.102-hc42bcbf_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numba-0.60.0-py311h9506ed5_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numcodecs-0.12.1-py311hb9542d7_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numpy-1.26.4-py311h7125741_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/occt-7.7.2-novtk_h5f4376a_101.conda @@ -3319,7 +3319,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-renderproto-0.11.1-h27ca646_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-xextproto-7.3.0-h1a8c8d9_1003.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-xproto-7.0.31-h27ca646_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/yaml-0.2.5-h3422bc3_2.tar.bz2 @@ -3616,7 +3616,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-7.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numba-0.60.0-py311h0673bce_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numcodecs-0.12.1-py311hda3d55a_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-1.26.4-py311h0b4df5a_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/occt-7.7.2-novtk_hdfb195f_101.conda @@ -3804,7 +3804,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-renderproto-0.11.1-hcd874cb_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-xextproto-7.3.0-hcd874cb_1003.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-xproto-7.0.31-hcd874cb_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2 @@ -22614,23 +22614,23 @@ packages: timestamp: 1718888718616 - kind: conda name: numba_celltree - version: 0.1.6 + version: 0.1.8 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.6-pyhd8ed1ab_0.conda - sha256: 0ecb9c31a9d6cc23c9099c391ca14645f9b0118f5a22818c88b5091e3d0d27b2 - md5: fa93424676054b2d1fcc7864c4e68698 + url: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda + sha256: 4b30d964bf034b41ce14842bf8fa3040b21878c0da1bdec15c5523ab785bc311 + md5: 02b10d14b2e6693519804fe90d41c589 depends: - numba >=0.50 - numpy - - python >=3.7 + - python >=3.9 license: MIT license_family: MIT purls: - pkg:pypi/numba-celltree?source=conda-forge-mapping - size: 32808 - timestamp: 1672825982456 + size: 33524 + timestamp: 1722763371483 - kind: conda name: numcodecs version: 0.12.1 @@ -31926,18 +31926,18 @@ packages: timestamp: 1607292254607 - kind: conda name: xugrid - version: 0.10.0 + version: 0.11.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.10.0-pyhd8ed1ab_0.conda - sha256: cf9b0c63b573405d5ac34a35819d98c10884abb0d49b19af0fc7414f8f44fa00 - md5: 7ef2f388e3b2adcecfed74591bff2451 + url: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda + sha256: 1f4bef6a7edb21c173c6b69788a42a46e350275664a79de14dc9efcdb9460627 + md5: 1d2fe2eccb1568bee8641cdf359ff742 depends: - dask - geopandas - numba >=0.50 - - numba_celltree + - numba_celltree >=0.1.8 - numpy - pandas - pooch @@ -31949,8 +31949,8 @@ packages: license_family: MIT purls: - pkg:pypi/xugrid?source=conda-forge-mapping - size: 94214 - timestamp: 1714580639671 + size: 94836 + timestamp: 1722866212176 - kind: conda name: xyzservices version: 2024.6.0 From a0b091f8930dbd7553000c577d266f1710c944f1 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Tue, 13 Aug 2024 15:55:11 +0200 Subject: [PATCH 03/34] Add test to convert LHM --- imod/tests/test_mf6/test_mf6_LHM.py | 64 +++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 imod/tests/test_mf6/test_mf6_LHM.py diff --git a/imod/tests/test_mf6/test_mf6_LHM.py b/imod/tests/test_mf6/test_mf6_LHM.py new file mode 100644 index 000000000..3172a5319 --- /dev/null +++ b/imod/tests/test_mf6/test_mf6_LHM.py @@ -0,0 +1,64 @@ +import sys +from pathlib import Path + +import pandas as pd + +import imod +from imod.formats.prj.prj import open_projectfile_data +from imod.logging.config import LoggerType +from imod.logging.loglevel import LogLevel +from imod.mf6.oc import OutputControl +from imod.mf6.regrid.regrid_schemes import ( + DiscretizationRegridMethod, + NodePropertyFlowRegridMethod, + StorageCoefficientRegridMethod, +) +from imod.mf6.simulation import Modflow6Simulation +from imod.mf6.utilities.mask import mask_arrays +from imod.prepare.topsystem.default_allocation_methods import ( + SimulationAllocationOptions, + SimulationDistributingOptions, +) + +LHM_DIR = Path(r"c:\Users\engelen\projects_wdir\imod-python\imod5_converter\MODFLOW6_MODEL") + + +def test_mf6_LHM(tmp_path): + logfile_path = tmp_path / "logfile.txt" + with open(logfile_path, "w") as sys.stdout: + imod.logging.configure( + LoggerType.PYTHON, + log_level=LogLevel.DEBUG, + add_default_file_handler=False, + add_default_stream_handler=True, + ) + data = open_projectfile_data(LHM_DIR / "LHM4.3_test.prj") + + imod5_data = data[0] + period_data = data[1] + default_simulation_allocation_options = SimulationAllocationOptions + default_simulation_distributing_options = SimulationDistributingOptions + + regridding_option = {} + regridding_option["npf"] = NodePropertyFlowRegridMethod() + regridding_option["dis"] = DiscretizationRegridMethod() + regridding_option["sto"] = StorageCoefficientRegridMethod() + times = pd.date_range(start="1/1/2018", end="12/1/2018", freq="ME") + + simulation = Modflow6Simulation.from_imod5_data( + imod5_data, + period_data, + default_simulation_allocation_options, + default_simulation_distributing_options, + times, + regridding_option, + ) + simulation["imported_model"]["oc"] = OutputControl( + save_head="last", save_budget="last" + ) + + for k, package in simulation["imported_model"].items(): + package.dataset.load() + simulation.write(tmp_path, binary=False, validate=True) + pass + From 66cc255fcdab11efb8f1d4a4dc0dcb1c49f9cb1d Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Tue, 13 Aug 2024 17:37:19 +0200 Subject: [PATCH 04/34] Add test with multiple linestrings --- imod/tests/test_mf6/test_mf6_hfb.py | 34 +++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/imod/tests/test_mf6/test_mf6_hfb.py b/imod/tests/test_mf6/test_mf6_hfb.py index fa2a5335b..49600da15 100644 --- a/imod/tests/test_mf6/test_mf6_hfb.py +++ b/imod/tests/test_mf6/test_mf6_hfb.py @@ -697,6 +697,40 @@ def test_hfb_from_imod5(imod5_dataset, tmp_path): assert list(np.unique(hfb_package["layer"].values)) == [7] + + +@pytest.mark.usefixtures("structured_flow_model") +def test_combine_linestrings(structured_flow_model): + dis = structured_flow_model["dis"] + top, bottom, idomain = dis["top"], dis["bottom"], dis["idomain"], + k = xr.ones_like(idomain) + + barrier_y = [11.0, 5.0, -1.0] + barrier_x = [5.0, 5.0, 5.0] + line = linestrings(barrier_x, barrier_y) + + geometry_single = gpd.GeoDataFrame( + geometry=[line], + data={ + "resistance": [1200.0], + "layer": [1], + }, + ) + geometry_triple = gpd.GeoDataFrame( + geometry=[line, line, line], + data={ + "resistance": [400.0, 400.0, 400.0], + "layer": [1, 1, 1], + }, + ) + hfb_single = SingleLayerHorizontalFlowBarrierResistance(geometry_single) + hfb_triple = SingleLayerHorizontalFlowBarrierResistance(geometry_triple) + mf6_hfb_single = hfb_single.to_mf6_pkg(idomain, top, bottom, k) + mf6_hfb_triple = hfb_triple.to_mf6_pkg(idomain, top, bottom, k) + + xr.testing.equals(mf6_hfb_single.dataset, mf6_hfb_triple.dataset) + + @pytest.mark.usefixtures("structured_flow_model") def test_run_multiple_hfbs(tmp_path, structured_flow_model): # Single layered model From 2bb61cd7a088b32acd1c557b7edf9d6dffc78476 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Wed, 14 Aug 2024 18:14:11 +0200 Subject: [PATCH 05/34] Call proper assert function --- imod/tests/test_mf6/test_mf6_hfb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imod/tests/test_mf6/test_mf6_hfb.py b/imod/tests/test_mf6/test_mf6_hfb.py index 49600da15..ac05e254b 100644 --- a/imod/tests/test_mf6/test_mf6_hfb.py +++ b/imod/tests/test_mf6/test_mf6_hfb.py @@ -728,7 +728,7 @@ def test_combine_linestrings(structured_flow_model): mf6_hfb_single = hfb_single.to_mf6_pkg(idomain, top, bottom, k) mf6_hfb_triple = hfb_triple.to_mf6_pkg(idomain, top, bottom, k) - xr.testing.equals(mf6_hfb_single.dataset, mf6_hfb_triple.dataset) + xr.testing.assert_equal(mf6_hfb_single.dataset, mf6_hfb_triple.dataset) @pytest.mark.usefixtures("structured_flow_model") From 5bc8fb160abe76832724a8a9eb5684f261188c15 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Wed, 14 Aug 2024 18:14:55 +0200 Subject: [PATCH 06/34] Aggregate lines to edges --- imod/mf6/hfb.py | 59 ++++++++++++++++++++++++++----------------------- 1 file changed, 31 insertions(+), 28 deletions(-) diff --git a/imod/mf6/hfb.py b/imod/mf6/hfb.py index 027cae283..7d80aff46 100644 --- a/imod/mf6/hfb.py +++ b/imod/mf6/hfb.py @@ -752,43 +752,46 @@ def __to_unstructured( def _snap_to_grid( self, idomain: GridDataArray ) -> Tuple[xu.UgridDataset, np.ndarray]: - if "layer" in self._get_vertical_variables(): - variable_names = [self._get_variable_name(), "geometry", "layer"] + variable_name = self._get_variable_name() + has_layer = "layer" in self._get_vertical_variables() + # Create geodataframe with barriers + if has_layer: + varnames_for_df = [variable_name, "geometry", "layer"] else: - variable_names = [self._get_variable_name(), "geometry"] + varnames_for_df = [variable_name, "geometry"] barrier_dataframe = gpd.GeoDataFrame( - self.dataset[variable_names].to_dataframe() + self.dataset[varnames_for_df].to_dataframe() ) - - if "layer" not in self._get_vertical_variables(): + # Convert vertical polygon to linestring + if not has_layer: lower, _ = _extract_hfb_bounds_from_zpolygons(barrier_dataframe) linestring = _create_zlinestring_from_bound_df(lower) barrier_dataframe["geometry"] = linestring["geometry"] - + # Clip barriers outside domain barrier_dataframe = clip_line_gdf_by_grid( barrier_dataframe, idomain.sel(layer=1) ) - - # Work around issue where xu.snap_to_grid cannot handle snapping a - # dataset with multiple lines appropriately. This can be later replaced - # to a single call to xu.snap_to_grid if this bug is fixed: - # - snapped_dataset_rows: List[xr.Dataset] = [] - for index in barrier_dataframe.index: - # Index with list to return pd.DataFrame instead of pd.Series for - # xu.snap_to_grid. - row_df = barrier_dataframe.loc[[index]] - snapped_dataset_row, _ = typing.cast( - xu.UgridDataset, - xu.snap_to_grid(row_df, grid=idomain, max_snap_distance=0.5), - ) - snapped_dataset_row["line_index"] += index[0] # Set to original line index. - snapped_dataset_rows.append(snapped_dataset_row) - snapped_dataset = xu.merge(snapped_dataset_rows) - - edge_index = np.argwhere( - snapped_dataset[self._get_variable_name()].notnull().values - ).ravel() + # Prepare variable names and methods for aggregation + varnames_agg = ["line_index", variable_name] + methods_agg = ["first", "sum"] + if has_layer: + varnames_agg.append("layer") + methods_agg.append("first") + # Snap line to edges, groupby edge index + grid2d = xu.UgridDataArray.from_structured(idomain.sel(layer=1)).grid + snapping_df = xu.create_snap_to_grid_dataframe(barrier_dataframe, grid2d, max_snap_distance=0.5) + for varname in varnames_agg[1:]: + line_index = snapping_df["line_index"] + snapping_df[varname] = barrier_dataframe[varname].iloc[line_index].to_numpy() + gb_edge = snapping_df.groupby("edge_index") + # Initialize dataset and dataarray with the right shape and dims + snapped_dataset = xu.UgridDataset(grids=[grid2d]) + new = xr.DataArray(np.full(grid2d.n_edge, np.nan), dims=[grid2d.edge_dimension]) + edge_index = np.array(list(gb_edge.indices.keys())) + # Aggregate with different methods per variable + for varname, method in zip(varnames_agg, methods_agg): + snapped_dataset[varname] = new.copy() + snapped_dataset[varname].data[edge_index] = gb_edge[varname].aggregate(method) return snapped_dataset, edge_index From 386af3ac68813b9ed73b3653a8804f39a4cddfe6 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Wed, 14 Aug 2024 18:41:29 +0200 Subject: [PATCH 07/34] Move snapping and aggregation logic to separate function. --- imod/mf6/hfb.py | 65 ++++++++++++++++++++++++++++++++++--------------- 1 file changed, 45 insertions(+), 20 deletions(-) diff --git a/imod/mf6/hfb.py b/imod/mf6/hfb.py index 7d80aff46..cbe9e2650 100644 --- a/imod/mf6/hfb.py +++ b/imod/mf6/hfb.py @@ -8,6 +8,7 @@ import cftime import numpy as np +import numpy.typing as npt import pandas as pd import xarray as xr import xugrid as xu @@ -374,6 +375,45 @@ def _prepare_barrier_dataset_for_mf6_adapter(dataset: xr.Dataset) -> xr.Dataset: return dataset +def _snap_to_grid_and_aggregate( + barrier_dataframe: gpd.GeoDataFrame, grid2d: xu.Ugrid2d, vardict_agg: dict[str, str] +) -> tuple[xu.UgridDataset, npt.NDArray]: + """ + Snap barrier dataframe to grid and aggregate multiple lines with a list of + methods per variable. + + Parameters + ---------- + barrier_dataframe: geopandas.GeoDataFrame + GeoDataFrame with barriers, should have variable "line_index". + grid2d: xugrid.Ugrid2d + Grid to snap lines to + vardict_agg: dict + Mapping of variable name to aggregation method + """ + snapping_df = xu.create_snap_to_grid_dataframe( + barrier_dataframe, grid2d, max_snap_distance=0.5 + ) + # Map other variables to snapping_df with line indices + line_index = snapping_df["line_index"] + vars_to_snap = list(vardict_agg.keys()) + vars_to_snap.remove("line_index") # snapping_df already has line_index + for varname in vars_to_snap: + snapping_df[varname] = barrier_dataframe[varname].iloc[line_index].to_numpy() + # Aggregate to grid edges + gb_edge = snapping_df.groupby("edge_index") + # Initialize dataset and dataarray with the right shape and dims + snapped_dataset = xu.UgridDataset(grids=[grid2d]) + new = xr.DataArray(np.full(grid2d.n_edge, np.nan), dims=[grid2d.edge_dimension]) + edge_index = np.array(list(gb_edge.indices.keys())) + # Aggregate with different methods per variable + for varname, method in vardict_agg.items(): + snapped_dataset[varname] = new.copy() + snapped_dataset[varname].data[edge_index] = gb_edge[varname].aggregate(method) + + return snapped_dataset, edge_index + + class BarrierType(Enum): HydraulicCharacteristic = 0 Multiplier = 1 @@ -772,28 +812,13 @@ def _snap_to_grid( barrier_dataframe, idomain.sel(layer=1) ) # Prepare variable names and methods for aggregation - varnames_agg = ["line_index", variable_name] - methods_agg = ["first", "sum"] + vardict_agg = {"line_index": "first", variable_name: "sum"} if has_layer: - varnames_agg.append("layer") - methods_agg.append("first") - # Snap line to edges, groupby edge index + vardict_agg["layer"] = "first" + # Create grid from structured grid2d = xu.UgridDataArray.from_structured(idomain.sel(layer=1)).grid - snapping_df = xu.create_snap_to_grid_dataframe(barrier_dataframe, grid2d, max_snap_distance=0.5) - for varname in varnames_agg[1:]: - line_index = snapping_df["line_index"] - snapping_df[varname] = barrier_dataframe[varname].iloc[line_index].to_numpy() - gb_edge = snapping_df.groupby("edge_index") - # Initialize dataset and dataarray with the right shape and dims - snapped_dataset = xu.UgridDataset(grids=[grid2d]) - new = xr.DataArray(np.full(grid2d.n_edge, np.nan), dims=[grid2d.edge_dimension]) - edge_index = np.array(list(gb_edge.indices.keys())) - # Aggregate with different methods per variable - for varname, method in zip(varnames_agg, methods_agg): - snapped_dataset[varname] = new.copy() - snapped_dataset[varname].data[edge_index] = gb_edge[varname].aggregate(method) - - return snapped_dataset, edge_index + + return _snap_to_grid_and_aggregate(barrier_dataframe, grid2d, vardict_agg) @staticmethod def __remove_invalid_edges( From 8564f0c205ca674feb8177c367f165d3c43157a4 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Wed, 14 Aug 2024 18:41:41 +0200 Subject: [PATCH 08/34] format --- imod/tests/test_mf6/test_mf6_LHM.py | 6 +++--- imod/tests/test_mf6/test_mf6_hfb.py | 8 +++++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/imod/tests/test_mf6/test_mf6_LHM.py b/imod/tests/test_mf6/test_mf6_LHM.py index 3172a5319..edde6fe05 100644 --- a/imod/tests/test_mf6/test_mf6_LHM.py +++ b/imod/tests/test_mf6/test_mf6_LHM.py @@ -14,13 +14,14 @@ StorageCoefficientRegridMethod, ) from imod.mf6.simulation import Modflow6Simulation -from imod.mf6.utilities.mask import mask_arrays from imod.prepare.topsystem.default_allocation_methods import ( SimulationAllocationOptions, SimulationDistributingOptions, ) -LHM_DIR = Path(r"c:\Users\engelen\projects_wdir\imod-python\imod5_converter\MODFLOW6_MODEL") +LHM_DIR = Path( + r"c:\Users\engelen\projects_wdir\imod-python\imod5_converter\MODFLOW6_MODEL" +) def test_mf6_LHM(tmp_path): @@ -61,4 +62,3 @@ def test_mf6_LHM(tmp_path): package.dataset.load() simulation.write(tmp_path, binary=False, validate=True) pass - diff --git a/imod/tests/test_mf6/test_mf6_hfb.py b/imod/tests/test_mf6/test_mf6_hfb.py index ac05e254b..a7ab2d490 100644 --- a/imod/tests/test_mf6/test_mf6_hfb.py +++ b/imod/tests/test_mf6/test_mf6_hfb.py @@ -697,12 +697,14 @@ def test_hfb_from_imod5(imod5_dataset, tmp_path): assert list(np.unique(hfb_package["layer"].values)) == [7] - - @pytest.mark.usefixtures("structured_flow_model") def test_combine_linestrings(structured_flow_model): dis = structured_flow_model["dis"] - top, bottom, idomain = dis["top"], dis["bottom"], dis["idomain"], + top, bottom, idomain = ( + dis["top"], + dis["bottom"], + dis["idomain"], + ) k = xr.ones_like(idomain) barrier_y = [11.0, 5.0, -1.0] From 16ff403396022b86686333606fb276a1f5490e27 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Thu, 15 Aug 2024 10:49:12 +0200 Subject: [PATCH 09/34] Add function to enforce ugrid dataarays --- imod/typing/grid.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/imod/typing/grid.py b/imod/typing/grid.py index 9daa867d7..7817a0fe9 100644 --- a/imod/typing/grid.py +++ b/imod/typing/grid.py @@ -431,3 +431,17 @@ def is_transient_data_grid( if len(grid["time"]) > 1: return True return False + + +@typedispatch +def enforce_ugrid(grid: xr.DataArray) -> xu.UgridDataArray: + return xu.UgridDataArray.from_structured(grid) + +@typedispatch # type: ignore[no-redef] +def enforce_ugrid(grid: xu.UgridDataArray | xu.UgridDataset) -> xu.UgridDataArray: + return grid + +@typedispatch # type: ignore[no-redef] +def enforce_ugrid(grid: object) -> xu.UgridDataArray: + raise TypeError(f"function doesn't support type {type(grid)}") + From 9cfe7f9ae75dc9f72f795a0b61a4bdcaafccf262 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Thu, 15 Aug 2024 10:50:25 +0200 Subject: [PATCH 10/34] Better name --- imod/typing/grid.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/imod/typing/grid.py b/imod/typing/grid.py index 7817a0fe9..31c18bbe3 100644 --- a/imod/typing/grid.py +++ b/imod/typing/grid.py @@ -365,7 +365,7 @@ def enforce_dim_order(grid: xu.UgridDataArray) -> xu.UgridDataArray: # noqa: F8 ) -def _enforce_unstructured(obj: GridDataArray, ugrid2d=xu.Ugrid2d) -> xu.UgridDataArray: +def _enforce_unstructured(obj: GridDataArray, ugrid2d: xu.Ugrid2d) -> xu.UgridDataArray: """Force obj to unstructured""" return xu.UgridDataArray(xr.DataArray(obj), ugrid2d) @@ -434,14 +434,16 @@ def is_transient_data_grid( @typedispatch -def enforce_ugrid(grid: xr.DataArray) -> xu.UgridDataArray: +def enforce_ugrid_da(grid: xr.DataArray) -> xu.UgridDataArray: return xu.UgridDataArray.from_structured(grid) + @typedispatch # type: ignore[no-redef] -def enforce_ugrid(grid: xu.UgridDataArray | xu.UgridDataset) -> xu.UgridDataArray: +def enforce_ugrid_da(grid: xu.UgridDataArray | xu.UgridDataset) -> xu.UgridDataArray: return grid + @typedispatch # type: ignore[no-redef] -def enforce_ugrid(grid: object) -> xu.UgridDataArray: +def enforce_ugrid_da(grid: object) -> xu.UgridDataArray: raise TypeError(f"function doesn't support type {type(grid)}") From cde947b641c28a54728df076245b7dd15db64226 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Thu, 15 Aug 2024 11:04:45 +0200 Subject: [PATCH 11/34] Further improve naming --- imod/typing/grid.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/imod/typing/grid.py b/imod/typing/grid.py index 31c18bbe3..0f19cefb3 100644 --- a/imod/typing/grid.py +++ b/imod/typing/grid.py @@ -365,9 +365,11 @@ def enforce_dim_order(grid: xu.UgridDataArray) -> xu.UgridDataArray: # noqa: F8 ) -def _enforce_unstructured(obj: GridDataArray, ugrid2d: xu.Ugrid2d) -> xu.UgridDataArray: - """Force obj to unstructured""" - return xu.UgridDataArray(xr.DataArray(obj), ugrid2d) +def _enforce_uda_with_topology( + obj: GridDataArray, topology: xu.Ugrid2d +) -> xu.UgridDataArray: + """Force obj and topology to ugrid dataarray""" + return xu.UgridDataArray(xr.DataArray(obj), topology) def preserve_gridtype(func: Callable[P, T]) -> Callable[P, T]: @@ -399,8 +401,8 @@ def decorator(*args: P.args, **kwargs: P.kwargs): if unstructured: # Multiple grids returned if isinstance(x, tuple): - return tuple(_enforce_unstructured(i, grid) for i in x) - return _enforce_unstructured(x, grid) + return tuple(_enforce_uda_with_topology(i, grid) for i in x) + return _enforce_uda_with_topology(x, grid) return x return decorator @@ -434,16 +436,17 @@ def is_transient_data_grid( @typedispatch -def enforce_ugrid_da(grid: xr.DataArray) -> xu.UgridDataArray: +def enforce_uda(grid: xr.DataArray) -> xu.UgridDataArray: + """Enforce GridDataArray to UgridDataArray""" return xu.UgridDataArray.from_structured(grid) @typedispatch # type: ignore[no-redef] -def enforce_ugrid_da(grid: xu.UgridDataArray | xu.UgridDataset) -> xu.UgridDataArray: +def enforce_uda(grid: xu.UgridDataArray) -> xu.UgridDataArray: # noqa: F811 + """Enforce GridDataArray to UgridDataArray""" return grid @typedispatch # type: ignore[no-redef] -def enforce_ugrid_da(grid: object) -> xu.UgridDataArray: - raise TypeError(f"function doesn't support type {type(grid)}") - +def enforce_uda(grid: object) -> xu.UgridDataArray: # noqa: F811 + raise TypeError(f"Function doesn't support type {type(grid)}") From 2573907a9d75c9c56939a50f4ea383d66e68a3ba Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Thu, 15 Aug 2024 11:12:45 +0200 Subject: [PATCH 12/34] Apply enforce_uda function --- imod/mf6/hfb.py | 23 ++++------------------- 1 file changed, 4 insertions(+), 19 deletions(-) diff --git a/imod/mf6/hfb.py b/imod/mf6/hfb.py index cbe9e2650..fa14d55e3 100644 --- a/imod/mf6/hfb.py +++ b/imod/mf6/hfb.py @@ -28,6 +28,7 @@ ) from imod.schemata import EmptyIndexesSchema, MaxNUniqueValuesSchema from imod.typing import GeoDataFrameType, GridDataArray, LineStringType +from imod.typing.grid import enforce_uda from imod.util.imports import MissingOptionalModule if TYPE_CHECKING: @@ -504,11 +505,8 @@ def _to_connected_cells_dataset( """ top, bottom = broadcast_to_full_domain(idomain, top, bottom) k = idomain * k - unstructured_grid, top, bottom, k = ( - self.__to_unstructured(idomain, top, bottom, k) - if isinstance(idomain, xr.DataArray) - else [idomain, top, bottom, k] - ) + # Enforce unstructured + unstructured_grid, top, bottom, k = (enforce_uda(grid) for grid in [idomain, top, bottom, k]) snapped_dataset, edge_index = self._snap_to_grid(idomain) edge_index = self.__remove_invalid_edges(unstructured_grid, edge_index) @@ -776,19 +774,6 @@ def mask(self, _) -> Package: """ return deepcopy(self) - @staticmethod - def __to_unstructured( - idomain: xr.DataArray, top: xr.DataArray, bottom: xr.DataArray, k: xr.DataArray - ) -> Tuple[ - xu.UgridDataArray, xu.UgridDataArray, xu.UgridDataArray, xu.UgridDataArray - ]: - unstruct = xu.UgridDataArray.from_structured(idomain) - top = xu.UgridDataArray.from_structured(top) - bottom = xu.UgridDataArray.from_structured(bottom) - k = xu.UgridDataArray.from_structured(k) - - return unstruct, top, bottom, k - def _snap_to_grid( self, idomain: GridDataArray ) -> Tuple[xu.UgridDataset, np.ndarray]: @@ -816,7 +801,7 @@ def _snap_to_grid( if has_layer: vardict_agg["layer"] = "first" # Create grid from structured - grid2d = xu.UgridDataArray.from_structured(idomain.sel(layer=1)).grid + grid2d = enforce_uda(idomain.sel(layer=1)).grid return _snap_to_grid_and_aggregate(barrier_dataframe, grid2d, vardict_agg) From a301e7ed9983a07cb91795c56df2c66139446939 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Thu, 15 Aug 2024 14:59:39 +0200 Subject: [PATCH 13/34] Make test to write HFBs work --- imod/tests/test_mf6/test_mf6_LHM.py | 88 +++++++++++++++++++++-------- 1 file changed, 64 insertions(+), 24 deletions(-) diff --git a/imod/tests/test_mf6/test_mf6_LHM.py b/imod/tests/test_mf6/test_mf6_LHM.py index edde6fe05..14437e36c 100644 --- a/imod/tests/test_mf6/test_mf6_LHM.py +++ b/imod/tests/test_mf6/test_mf6_LHM.py @@ -14,6 +14,8 @@ StorageCoefficientRegridMethod, ) from imod.mf6.simulation import Modflow6Simulation +from imod.mf6.utilities.mf6hfb import merge_hfb_packages +from imod.mf6.write_context import WriteContext from imod.prepare.topsystem.default_allocation_methods import ( SimulationAllocationOptions, SimulationDistributingOptions, @@ -23,6 +25,34 @@ r"c:\Users\engelen\projects_wdir\imod-python\imod5_converter\MODFLOW6_MODEL" ) +# @pytest.fixture(scope="module") +def LHM_imod5_data(): + data = open_projectfile_data(LHM_DIR / "LHM4.3_test.prj") + + imod5_data = data[0] + period_data = data[1] + default_simulation_allocation_options = SimulationAllocationOptions + default_simulation_distributing_options = SimulationDistributingOptions + + regridding_option = {} + regridding_option["npf"] = NodePropertyFlowRegridMethod() + regridding_option["dis"] = DiscretizationRegridMethod() + regridding_option["sto"] = StorageCoefficientRegridMethod() + times = pd.date_range(start="1/1/2018", end="12/1/2018", freq="ME") + + simulation = Modflow6Simulation.from_imod5_data( + imod5_data, + period_data, + default_simulation_allocation_options, + default_simulation_distributing_options, + times, + regridding_option, + ) + simulation["imported_model"]["oc"] = OutputControl( + save_head="last", save_budget="last" + ) + return simulation + def test_mf6_LHM(tmp_path): logfile_path = tmp_path / "logfile.txt" @@ -33,32 +63,42 @@ def test_mf6_LHM(tmp_path): add_default_file_handler=False, add_default_stream_handler=True, ) - data = open_projectfile_data(LHM_DIR / "LHM4.3_test.prj") + simulation = LHM_imod5_data() - imod5_data = data[0] - period_data = data[1] - default_simulation_allocation_options = SimulationAllocationOptions - default_simulation_distributing_options = SimulationDistributingOptions + for k, package in simulation["imported_model"].items(): + package.dataset.load() + simulation.write(tmp_path, binary=False, validate=True) - regridding_option = {} - regridding_option["npf"] = NodePropertyFlowRegridMethod() - regridding_option["dis"] = DiscretizationRegridMethod() - regridding_option["sto"] = StorageCoefficientRegridMethod() - times = pd.date_range(start="1/1/2018", end="12/1/2018", freq="ME") - simulation = Modflow6Simulation.from_imod5_data( - imod5_data, - period_data, - default_simulation_allocation_options, - default_simulation_distributing_options, - times, - regridding_option, - ) - simulation["imported_model"]["oc"] = OutputControl( - save_head="last", save_budget="last" +def test_mf6_LHM_write_HFB(tmp_path): + logfile_path = tmp_path / "logfile.txt" + with open(logfile_path, "w") as sys.stdout: + imod.logging.configure( + LoggerType.PYTHON, + log_level=LogLevel.DEBUG, + add_default_file_handler=False, + add_default_stream_handler=True, ) + simulation = LHM_imod5_data() + model = simulation["imported_model"] + + mf6_hfb_ls = [] + for key, pkg in model.items(): + if issubclass(type(pkg), imod.mf6.HorizontalFlowBarrierBase): + mf6_hfb_ls.append(pkg) + pkg.dataset.load() + + top, bottom, idomain = model._Modflow6Model__get_domain_geometry() + k = model._Modflow6Model__get_k() + + mf6_hfb = merge_hfb_packages(mf6_hfb_ls, idomain, top, bottom, k) + + times = pd.date_range(start="1/1/2018", end="12/1/2018", freq="ME") + + out_dir = tmp_path / "LHM" + out_dir.mkdir(parents=True, exist_ok=True) + write_context = WriteContext(out_dir, use_binary=True, use_absolute_paths=False) + + mf6_hfb.write("hfb", times, write_context) + - for k, package in simulation["imported_model"].items(): - package.dataset.load() - simulation.write(tmp_path, binary=False, validate=True) - pass From c13a8d2d7d43c98655401bd974aae879751f9ea1 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Fri, 16 Aug 2024 14:31:16 +0200 Subject: [PATCH 14/34] Add LHM and a pixi task for user acceptance tests. --- imod/tests/test_mf6/test_mf6_LHM.py | 38 ++++++++------------- pixi.lock | 52 +++++++++++++++++++++++++++++ pixi.toml | 10 +++++- pyproject.toml | 1 + 4 files changed, 76 insertions(+), 25 deletions(-) diff --git a/imod/tests/test_mf6/test_mf6_LHM.py b/imod/tests/test_mf6/test_mf6_LHM.py index 14437e36c..ac210cd5b 100644 --- a/imod/tests/test_mf6/test_mf6_LHM.py +++ b/imod/tests/test_mf6/test_mf6_LHM.py @@ -1,7 +1,16 @@ +""" +LHM tests, these are pytest marked with 'lhm'. + +These require the LHM model to be available on the local drive. The tests looks +for the path to the projectfile needs to be included in a .env file, with the +environmental variable "LHM_PRJ" with the path to the projectfile. +""" + +import os import sys -from pathlib import Path import pandas as pd +import pytest import imod from imod.formats.prj.prj import open_projectfile_data @@ -21,13 +30,11 @@ SimulationDistributingOptions, ) -LHM_DIR = Path( - r"c:\Users\engelen\projects_wdir\imod-python\imod5_converter\MODFLOW6_MODEL" -) -# @pytest.fixture(scope="module") +# In function, not a fixture, to allow logging of the import. def LHM_imod5_data(): - data = open_projectfile_data(LHM_DIR / "LHM4.3_test.prj") + lhm_prjfile = os.environ["LHM_PRJ"] + data = open_projectfile_data(lhm_prjfile) imod5_data = data[0] period_data = data[1] @@ -54,22 +61,7 @@ def LHM_imod5_data(): return simulation -def test_mf6_LHM(tmp_path): - logfile_path = tmp_path / "logfile.txt" - with open(logfile_path, "w") as sys.stdout: - imod.logging.configure( - LoggerType.PYTHON, - log_level=LogLevel.DEBUG, - add_default_file_handler=False, - add_default_stream_handler=True, - ) - simulation = LHM_imod5_data() - - for k, package in simulation["imported_model"].items(): - package.dataset.load() - simulation.write(tmp_path, binary=False, validate=True) - - +@pytest.mark.lhm def test_mf6_LHM_write_HFB(tmp_path): logfile_path = tmp_path / "logfile.txt" with open(logfile_path, "w") as sys.stdout: @@ -100,5 +92,3 @@ def test_mf6_LHM_write_HFB(tmp_path): write_context = WriteContext(out_dir, use_binary=True, use_absolute_paths=False) mf6_hfb.write("hfb", times, write_context) - - diff --git a/pixi.lock b/pixi.lock index 37a608fd6..6f2e4a544 100644 --- a/pixi.lock +++ b/pixi.lock @@ -362,9 +362,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-benchmark-4.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cases-3.8.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-dotenv-0.5.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.11.0-he550d4f_1_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.0.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-gmsh-4.12.2-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh717bed2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda @@ -808,9 +810,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-benchmark-4.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cases-3.8.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-dotenv-0.5.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.11.0-he7542f4_1_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.0.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-gmsh-4.12.2-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh717bed2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda @@ -1231,9 +1235,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-benchmark-4.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cases-3.8.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-dotenv-0.5.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.11.0-h3ba56d0_1_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.0.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-gmsh-4.12.2-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh717bed2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda @@ -1619,9 +1625,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-benchmark-4.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cases-3.8.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-dotenv-0.5.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.11.0-hcf16a7b_0_cpython.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.0.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-gmsh-4.12.2-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh717bed2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda @@ -2155,9 +2163,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-benchmark-4.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cases-3.8.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-dotenv-0.5.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.11.0-he550d4f_1_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.0.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-gmsh-4.12.2-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh717bed2_0.conda @@ -2692,9 +2702,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-benchmark-4.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cases-3.8.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-dotenv-0.5.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.11.0-he7542f4_1_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.0.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-gmsh-4.12.2-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh717bed2_0.conda @@ -3206,9 +3218,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-benchmark-4.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cases-3.8.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-dotenv-0.5.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.11.0-h3ba56d0_1_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.0.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-gmsh-4.12.2-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh717bed2_0.conda @@ -3680,9 +3694,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-benchmark-4.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cases-3.8.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-dotenv-0.5.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.11.0-hcf16a7b_0_cpython.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.0.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-gmsh-4.12.2-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh717bed2_0.conda @@ -25901,6 +25917,25 @@ packages: - pkg:pypi/pytest-cov?source=conda-forge-mapping size: 25507 timestamp: 1711411153367 +- kind: conda + name: pytest-dotenv + version: 0.5.2 + build: pyhd8ed1ab_0 + subdir: noarch + noarch: python + url: https://conda.anaconda.org/conda-forge/noarch/pytest-dotenv-0.5.2-pyhd8ed1ab_0.tar.bz2 + sha256: 43ab7de6af7b298a9199aea2bf6fa481a3059ba1068dd0967fe3a040ff6e9303 + md5: 11b16b526f60cc18748c3fe45d10315a + depends: + - pytest >=5.0.0 + - python >=3.6 + - python-dotenv >=0.9.1 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pytest-dotenv?source=conda-forge-mapping + size: 7383 + timestamp: 1606859705188 - kind: conda name: pytest-xdist version: 3.6.1 @@ -26355,6 +26390,23 @@ packages: - pkg:pypi/python-dateutil?source=conda-forge-mapping size: 222742 timestamp: 1709299922152 +- kind: conda + name: python-dotenv + version: 1.0.1 + build: pyhd8ed1ab_0 + subdir: noarch + noarch: python + url: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.0.1-pyhd8ed1ab_0.conda + sha256: 2d4c80364f03315d606a50eddd493dbacc078e21412c2462c0f781eec49b572c + md5: c2997ea9360ac4e015658804a7a84f94 + depends: + - python >=3.8 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/python-dotenv?source=conda-forge-mapping + size: 24278 + timestamp: 1706018281544 - kind: conda name: python-fastjsonschema version: 2.20.0 diff --git a/pixi.toml b/pixi.toml index a1f41bfc2..0d8770862 100644 --- a/pixi.toml +++ b/pixi.toml @@ -23,7 +23,7 @@ unittests = { cmd = [ "NUMBA_DISABLE_JIT=1", "pytest", "-n", "auto", - "-m", "not example", + "-m", "not example and not lhm", "--cache-clear", "--verbose", "--junitxml=unittest_report.xml", @@ -41,6 +41,13 @@ examples = { cmd = [ "--verbose", "--junitxml=examples_report.xml", ], depends_on = ["install"], cwd = "imod/tests" } +user_acceptance = { cmd = [ + "pytest", + "-m", "lhm", + "--cache-clear", + "--verbose", + "--junitxml=user_acceptance_report.xml", +], depends_on = ["install"], cwd = "imod/tests" } test_import = { cmd = [ "python", "-c", @@ -87,6 +94,7 @@ pytest = "<8" # Newer version incompatible with pytest-cases pytest-benchmark = "*" pytest-cases = "*" pytest-cov = "*" +pytest-dotenv = "*" pytest-xdist = "*" python = "3.11" python-graphviz = "*" diff --git a/pyproject.toml b/pyproject.toml index a84fc4762..4d56e68f6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -157,6 +157,7 @@ ignore_missing_imports = true [tool.pytest.ini_options] markers = [ "example: marks test as example (deselect with '-m \"not example\"')", + "lhm: marks lhm tests (deselect with '-m \"not example\"')", ] [tool.hatch.version] From 5a24e8c7e548da2f4b350f1faf2f0eb2d8b06847 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Fri, 16 Aug 2024 14:40:56 +0200 Subject: [PATCH 15/34] format --- imod/mf6/hfb.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/imod/mf6/hfb.py b/imod/mf6/hfb.py index fa14d55e3..19aad4183 100644 --- a/imod/mf6/hfb.py +++ b/imod/mf6/hfb.py @@ -506,7 +506,9 @@ def _to_connected_cells_dataset( top, bottom = broadcast_to_full_domain(idomain, top, bottom) k = idomain * k # Enforce unstructured - unstructured_grid, top, bottom, k = (enforce_uda(grid) for grid in [idomain, top, bottom, k]) + unstructured_grid, top, bottom, k = ( + enforce_uda(grid) for grid in [idomain, top, bottom, k] + ) snapped_dataset, edge_index = self._snap_to_grid(idomain) edge_index = self.__remove_invalid_edges(unstructured_grid, edge_index) From 7ce2d33f7fb0ce20b3c7a7211647ad3171407cf2 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Fri, 16 Aug 2024 14:52:56 +0200 Subject: [PATCH 16/34] mark with "user_acceptance" instead "lhm" --- imod/tests/test_mf6/test_mf6_LHM.py | 4 ++-- pixi.toml | 6 ++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/imod/tests/test_mf6/test_mf6_LHM.py b/imod/tests/test_mf6/test_mf6_LHM.py index ac210cd5b..a2b5cacea 100644 --- a/imod/tests/test_mf6/test_mf6_LHM.py +++ b/imod/tests/test_mf6/test_mf6_LHM.py @@ -1,5 +1,5 @@ """ -LHM tests, these are pytest marked with 'lhm'. +LHM tests, these are pytest-marked with 'user_acceptance'. These require the LHM model to be available on the local drive. The tests looks for the path to the projectfile needs to be included in a .env file, with the @@ -61,7 +61,7 @@ def LHM_imod5_data(): return simulation -@pytest.mark.lhm +@pytest.mark.user_acceptance def test_mf6_LHM_write_HFB(tmp_path): logfile_path = tmp_path / "logfile.txt" with open(logfile_path, "w") as sys.stdout: diff --git a/pixi.toml b/pixi.toml index 0d8770862..eac3c0f4d 100644 --- a/pixi.toml +++ b/pixi.toml @@ -23,7 +23,7 @@ unittests = { cmd = [ "NUMBA_DISABLE_JIT=1", "pytest", "-n", "auto", - "-m", "not example and not lhm", + "-m", "not example and not user_acceptance", "--cache-clear", "--verbose", "--junitxml=unittest_report.xml", @@ -41,9 +41,11 @@ examples = { cmd = [ "--verbose", "--junitxml=examples_report.xml", ], depends_on = ["install"], cwd = "imod/tests" } +# User acceptance tests, only works when paths to models are located on local +# drive and are specified in a .env file. user_acceptance = { cmd = [ "pytest", - "-m", "lhm", + "-m", "user_acceptance", "--cache-clear", "--verbose", "--junitxml=user_acceptance_report.xml", From be20a24a943c5bdb71244613fa7032e035bf599c Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Fri, 16 Aug 2024 14:57:38 +0200 Subject: [PATCH 17/34] update changelog --- docs/api/changelog.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/api/changelog.rst b/docs/api/changelog.rst index 8bdd35b79..53c474160 100644 --- a/docs/api/changelog.rst +++ b/docs/api/changelog.rst @@ -15,7 +15,8 @@ Fixed - Multiple ``HorizontalFlowBarrier`` objects attached to :class:`imod.mf6.GroundwaterFlowModel` are merged into a single horizontal flow barrier for MODFLOW 6 - +- Bug where error would be thrown when barriers in a ``HorizontalFlowBarrier`` + would be snapped to the same cell edge. These are now summed. Changed ~~~~~~~ From b505b523b00eb5a32fe90d87aadd90f4db439011 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Fri, 16 Aug 2024 15:00:01 +0200 Subject: [PATCH 18/34] Also update mark in pytest ini options --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4d56e68f6..dcaaaa437 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -157,7 +157,7 @@ ignore_missing_imports = true [tool.pytest.ini_options] markers = [ "example: marks test as example (deselect with '-m \"not example\"')", - "lhm: marks lhm tests (deselect with '-m \"not example\"')", + "user_acceptance: marks user acceptance tests (deselect with '-m \"not user_acceptance\"')", ] [tool.hatch.version] From e28b4eef117c530a2b741c825db0de97e3cda0ac Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Fri, 16 Aug 2024 15:03:54 +0200 Subject: [PATCH 19/34] Update geodataframe type, to avoid restricted pip installation failing. --- imod/mf6/hfb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imod/mf6/hfb.py b/imod/mf6/hfb.py index 19aad4183..8b5ddc287 100644 --- a/imod/mf6/hfb.py +++ b/imod/mf6/hfb.py @@ -377,7 +377,7 @@ def _prepare_barrier_dataset_for_mf6_adapter(dataset: xr.Dataset) -> xr.Dataset: def _snap_to_grid_and_aggregate( - barrier_dataframe: gpd.GeoDataFrame, grid2d: xu.Ugrid2d, vardict_agg: dict[str, str] + barrier_dataframe: GeoDataFrameType, grid2d: xu.Ugrid2d, vardict_agg: dict[str, str] ) -> tuple[xu.UgridDataset, npt.NDArray]: """ Snap barrier dataframe to grid and aggregate multiple lines with a list of From 756120a1410a7180c71bfdd5bc32312c08d7b998 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Fri, 16 Aug 2024 16:01:52 +0200 Subject: [PATCH 20/34] Ensure shortcut taken in and comparison and improve readability --- imod/schemata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imod/schemata.py b/imod/schemata.py index 95b5d6a3a..fd9d8d737 100644 --- a/imod/schemata.py +++ b/imod/schemata.py @@ -74,7 +74,7 @@ def scalar_None(obj): if not isinstance(obj, (xr.DataArray, xu.UgridDataArray)): return False else: - return (len(obj.shape) == 0) & (~obj.notnull()).all() + return (len(obj.shape) == 0) and (obj.isnull()).all() def align_other_obj_with_coords( From a3b922f1c8212a1ef8e4fd263bf55b4241f25239 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Fri, 16 Aug 2024 16:02:08 +0200 Subject: [PATCH 21/34] Remove automatic masking --- imod/mf6/simulation.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/imod/mf6/simulation.py b/imod/mf6/simulation.py index 810a42a1a..54613eb3c 100644 --- a/imod/mf6/simulation.py +++ b/imod/mf6/simulation.py @@ -1378,8 +1378,5 @@ def from_imod5_data( ) simulation["ims"] = solution - # cleanup packages for validation - idomain = groundwaterFlowModel.domain - simulation.mask_all_models(idomain) simulation.create_time_discretization(additional_times=times) return simulation From 6f8cd6a3975a13b948aab78038d740dfae3782a1 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Fri, 16 Aug 2024 17:48:35 +0200 Subject: [PATCH 22/34] Cache from_structured grid --- imod/typing/grid.py | 34 ++++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/imod/typing/grid.py b/imod/typing/grid.py index 0f19cefb3..d2a450cd9 100644 --- a/imod/typing/grid.py +++ b/imod/typing/grid.py @@ -435,10 +435,40 @@ def is_transient_data_grid( return False +class GridCache: + """ + Cache grids in this object for a specific function, lookup grids based on + unique geometry hash. + """ + def __init__(self, func: Callable, max_cache_size=5): + self.max_cache_size = max_cache_size + self.grid_cache: dict[int, GridDataArray] = {} + self.func = func + + def get_grid(self, grid: GridDataArray): + hash = get_grid_geometry_hash(grid) + if hash not in self.grid_cache.keys(): + if len(self.grid_cache.keys()) > self.max_cache_size: + self.remove_first() + self.grid_cache[hash] = self.func(grid) + return self.grid_cache[hash] + + def remove_first(self): + keys = list(self.grid_cache.keys()) + self.grid_cache.pop(keys[0]) + + +UGRID_FROM_STRUCTURED_CACHE = GridCache(xu.UgridDataArray.from_structured) + + @typedispatch def enforce_uda(grid: xr.DataArray) -> xu.UgridDataArray: - """Enforce GridDataArray to UgridDataArray""" - return xu.UgridDataArray.from_structured(grid) + """ + Enforce GridDataArray to UgridDataArray, calls + xu.UgridDataArray.from_structured, which is a costly operation. Therefore + cache results. + """ + return UGRID_FROM_STRUCTURED_CACHE.get_grid(grid) @typedispatch # type: ignore[no-redef] From f9ddb1b4aa6608a32ad5eb39765683a6576951f8 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Fri, 16 Aug 2024 17:49:53 +0200 Subject: [PATCH 23/34] format --- imod/typing/grid.py | 1 + 1 file changed, 1 insertion(+) diff --git a/imod/typing/grid.py b/imod/typing/grid.py index d2a450cd9..215efa661 100644 --- a/imod/typing/grid.py +++ b/imod/typing/grid.py @@ -440,6 +440,7 @@ class GridCache: Cache grids in this object for a specific function, lookup grids based on unique geometry hash. """ + def __init__(self, func: Callable, max_cache_size=5): self.max_cache_size = max_cache_size self.grid_cache: dict[int, GridDataArray] = {} From 6334b72b9d7bc9c01bae5098a99ba1ae6142cef5 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Fri, 16 Aug 2024 17:58:19 +0200 Subject: [PATCH 24/34] Update changelog --- docs/api/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/api/changelog.rst b/docs/api/changelog.rst index 53c474160..b6ed9efb5 100644 --- a/docs/api/changelog.rst +++ b/docs/api/changelog.rst @@ -17,6 +17,8 @@ Fixed flow barrier for MODFLOW 6 - Bug where error would be thrown when barriers in a ``HorizontalFlowBarrier`` would be snapped to the same cell edge. These are now summed. +- Improve performance validation upon Package initialization +- Improve performance writing ``HorizontalFlowBarrier`` objects Changed ~~~~~~~ From a80110bc449f93a0df5c9733effb2419845b14eb Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Mon, 19 Aug 2024 11:23:34 +0200 Subject: [PATCH 25/34] Cache topology instead of dataarray --- imod/typing/grid.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/imod/typing/grid.py b/imod/typing/grid.py index 86f698d05..3f229907d 100644 --- a/imod/typing/grid.py +++ b/imod/typing/grid.py @@ -459,7 +459,7 @@ def remove_first(self): self.grid_cache.pop(keys[0]) -UGRID_FROM_STRUCTURED_CACHE = GridCache(xu.UgridDataArray.from_structured) +UGRID2D_FROM_STRUCTURED_CACHE = GridCache(xu.Ugrid2d.from_structured) @typedispatch @@ -469,7 +469,21 @@ def as_ugrid_dataarray(grid: xr.DataArray) -> xu.UgridDataArray: xu.UgridDataArray.from_structured, which is a costly operation. Therefore cache results. """ - return UGRID_FROM_STRUCTURED_CACHE.get_grid(grid) + + topology = UGRID2D_FROM_STRUCTURED_CACHE.get_grid(grid) + + # Copied from: + # https://github.com/Deltares/xugrid/blob/3dee693763da1c4c0859a4f53ac38d4b99613a33/xugrid/core/wrap.py#L236 + # Note that "da" is renamed to "grid" and "grid" to "topology" + dims = grid.dims[:-2] + coords = {k: grid.coords[k] for k in dims} + face_da = xr.DataArray( + grid.data.reshape(*grid.shape[:-2], -1), + coords=coords, + dims=[*dims, topology.face_dimension], + name=grid.name, + ) + return xu.UgridDataArray(face_da, topology) @typedispatch # type: ignore[no-redef] From b8fcf839a9c98c329ff2dd843dc6c55475d65096 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Mon, 19 Aug 2024 11:24:13 +0200 Subject: [PATCH 26/34] Add snap_to_grid test that got accidentily removed in merge again --- imod/tests/test_mf6/test_mf6_hfb.py | 35 +++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/imod/tests/test_mf6/test_mf6_hfb.py b/imod/tests/test_mf6/test_mf6_hfb.py index 817922d66..b4892b302 100644 --- a/imod/tests/test_mf6/test_mf6_hfb.py +++ b/imod/tests/test_mf6/test_mf6_hfb.py @@ -698,6 +698,41 @@ def test_hfb_from_imod5(imod5_dataset, tmp_path): assert list(np.unique(hfb_package["layer"].values)) == [7] +@pytest.mark.usefixtures("structured_flow_model") +def test_snap_to_grid_and_aggregate(structured_flow_model): + idomain = structured_flow_model["dis"]["idomain"] + grid2d = xu.Ugrid2d.from_structured(idomain) + + barrier_y = [11.0, 5.0, -1.0] + barrier_x = [5.0, 5.0, 5.0] + line = linestrings(barrier_x, barrier_y) + layer = [1, 1, 1] + + geometry_triple = gpd.GeoDataFrame( + geometry=[line, line, line], + data={ + "resistance": [400.0, 400.0, 400.0], + "layer": layer, + "line_index": [0, 1, 2], + }, + ) + geometry_triple = geometry_triple.set_index("line_index") + + vardict_agg = {"resistance": "sum", "layer": "first"} + + snapped_dataset, edge_index = _snap_to_grid_and_aggregate( + geometry_triple, grid2d, vardict_agg + ) + + argwhere_summed_expected = np.array([7, 20, 33, 46, 59, 72], dtype=np.int64) + argwhere_summed_actual = np.nonzero((snapped_dataset["resistance"] == 1200).values)[ + 0 + ] + + np.testing.assert_array_equal(argwhere_summed_actual, argwhere_summed_expected) + np.testing.assert_array_equal(edge_index, argwhere_summed_expected) + + @pytest.mark.usefixtures("structured_flow_model") def test_combine_linestrings(structured_flow_model): dis = structured_flow_model["dis"] From ceb49fa58a330d8f1e61660c9fd53d21d83c5cec Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Mon, 19 Aug 2024 11:57:22 +0200 Subject: [PATCH 27/34] In model cleanup in tests, add mask_all_models --- imod/tests/test_mf6/test_mf6_simulation.py | 24 +++++++++++----------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/imod/tests/test_mf6/test_mf6_simulation.py b/imod/tests/test_mf6/test_mf6_simulation.py index b60643793..d1441071a 100644 --- a/imod/tests/test_mf6/test_mf6_simulation.py +++ b/imod/tests/test_mf6/test_mf6_simulation.py @@ -495,14 +495,15 @@ def test_import_from_imod5(imod5_dataset, tmp_path): simulation["imported_model"]["oc"] = OutputControl( save_head="last", save_budget="last" ) - simulation.create_time_discretization(["01-01-2003", "02-01-2003"]) - + # Cleanup # Remove HFB packages outside domain # TODO: Build in support for hfb packages outside domain for hfb_outside in ["hfb-24", "hfb-26"]: simulation["imported_model"].pop(hfb_outside) - + # Align NoData to domain + idomain = simulation["imported_model"].domain + simulation.mask_all_models(idomain) # write and validate the simulation. simulation.write(tmp_path, binary=False, validate=True) @@ -558,18 +559,18 @@ def test_import_from_imod5__nonstandard_regridding(imod5_dataset, tmp_path): times, regridding_option, ) - simulation["imported_model"]["oc"] = OutputControl( save_head="last", save_budget="last" ) - simulation.create_time_discretization(["01-01-2003", "02-01-2003"]) - + # Cleanup # Remove HFB packages outside domain # TODO: Build in support for hfb packages outside domain for hfb_outside in ["hfb-24", "hfb-26"]: simulation["imported_model"].pop(hfb_outside) - + # Align NoData to domain + idomain = simulation["imported_model"].domain + simulation.mask_all_models(idomain) # write and validate the simulation. simulation.write(tmp_path, binary=False, validate=True) @@ -594,23 +595,22 @@ def test_import_from_imod5_no_storage_no_recharge(imod5_dataset, tmp_path): default_simulation_distributing_options, times, ) - simulation["imported_model"]["oc"] = OutputControl( save_head="last", save_budget="last" ) - simulation.create_time_discretization(["01-01-2003", "02-01-2003"]) - + # Cleanup # Remove HFB packages outside domain # TODO: Build in support for hfb packages outside domain for hfb_outside in ["hfb-24", "hfb-26"]: simulation["imported_model"].pop(hfb_outside) - # check storage is present and rch is absent assert not simulation["imported_model"]["sto"].dataset["transient"].values[()] package_keys = simulation["imported_model"].keys() for key in package_keys: assert key[0:3] != "rch" - + # Align NoData to domain + idomain = simulation["imported_model"].domain + simulation.mask_all_models(idomain) # write and validate the simulation. simulation.write(tmp_path, binary=False, validate=True) From d7fd9fa9bd03d11309a87d7328e34b787ea44fc5 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Mon, 19 Aug 2024 13:18:00 +0200 Subject: [PATCH 28/34] Separate slow unittests to separate task with jitting enabled --- .gitignore | 3 +-- imod/tests/test_mf6/test_mf6_chd.py | 1 + imod/tests/test_mf6/test_mf6_simulation.py | 4 ++++ pixi.toml | 13 +++++++++++-- pyproject.toml | 1 + 5 files changed, 18 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index fd95f7d8d..28f9b76a9 100644 --- a/.gitignore +++ b/.gitignore @@ -141,5 +141,4 @@ examples/data .pixi /imod/tests/mydask.png -/imod/tests/unittest_report.xml -/imod/tests/examples_report.xml +/imod/tests/*_report.xml diff --git a/imod/tests/test_mf6/test_mf6_chd.py b/imod/tests/test_mf6/test_mf6_chd.py index ef0a04108..c3b96b982 100644 --- a/imod/tests/test_mf6/test_mf6_chd.py +++ b/imod/tests/test_mf6/test_mf6_chd.py @@ -238,6 +238,7 @@ def test_from_imod5_shd(imod5_dataset, tmp_path): chd_shd.write("chd_shd", [1], write_context) +@pytest.mark.unittest_jit @pytest.mark.parametrize("remove_merged_packages", [True, False]) @pytest.mark.usefixtures("imod5_dataset") def test_concatenate_chd(imod5_dataset, tmp_path, remove_merged_packages): diff --git a/imod/tests/test_mf6/test_mf6_simulation.py b/imod/tests/test_mf6/test_mf6_simulation.py index d1441071a..b9b78fdcb 100644 --- a/imod/tests/test_mf6/test_mf6_simulation.py +++ b/imod/tests/test_mf6/test_mf6_simulation.py @@ -476,6 +476,7 @@ def compare_submodel_partition_info(first: PartitionInfo, second: PartitionInfo) ) +@pytest.mark.unittest_jit @pytest.mark.usefixtures("imod5_dataset") def test_import_from_imod5(imod5_dataset, tmp_path): imod5_data = imod5_dataset[0] @@ -508,6 +509,7 @@ def test_import_from_imod5(imod5_dataset, tmp_path): simulation.write(tmp_path, binary=False, validate=True) +@pytest.mark.unittest_jit @pytest.mark.usefixtures("imod5_dataset") def test_import_from_imod5__correct_well_type(imod5_dataset): # Unpack @@ -538,6 +540,7 @@ def test_import_from_imod5__correct_well_type(imod5_dataset): assert isinstance(simulation["imported_model"]["wel-WELLS_L5"], LayeredWell) +@pytest.mark.unittest_jit @pytest.mark.usefixtures("imod5_dataset") def test_import_from_imod5__nonstandard_regridding(imod5_dataset, tmp_path): imod5_data = imod5_dataset[0] @@ -575,6 +578,7 @@ def test_import_from_imod5__nonstandard_regridding(imod5_dataset, tmp_path): simulation.write(tmp_path, binary=False, validate=True) +@pytest.mark.unittest_jit @pytest.mark.usefixtures("imod5_dataset") def test_import_from_imod5_no_storage_no_recharge(imod5_dataset, tmp_path): # this test imports an imod5 simulation, but it has no recharge and no storage package. diff --git a/pixi.toml b/pixi.toml index eac3c0f4d..e7c867b60 100644 --- a/pixi.toml +++ b/pixi.toml @@ -19,11 +19,12 @@ install_with_deps = "python -m pip install --editable ." format = "ruff check --fix .; ruff format ." lint = "ruff check . ; ruff format --check ." tests = { depends_on = ["unittests", "examples"] } -unittests = { cmd = [ +unittests = { depends_on = ["unittests_njit", "unittests_jit"] } +unittests_njit = { cmd = [ "NUMBA_DISABLE_JIT=1", "pytest", "-n", "auto", - "-m", "not example and not user_acceptance", + "-m", "not example and not user_acceptance and not unittest_jit", "--cache-clear", "--verbose", "--junitxml=unittest_report.xml", @@ -32,6 +33,14 @@ unittests = { cmd = [ "--cov-report=html:coverage", "--cov-config=.coveragerc" ], depends_on = ["install"], cwd = "imod/tests" } +unittests_jit = { cmd = [ + "pytest", + "-n", "auto", + "-m", "unittest_jit", + "--cache-clear", + "--verbose", + "--junitxml=unittest_jit_report.xml", +], depends_on = ["install"], cwd = "imod/tests" } examples = { cmd = [ "NUMBA_DISABLE_JIT=1", "pytest", diff --git a/pyproject.toml b/pyproject.toml index dcaaaa437..c265cabe8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -158,6 +158,7 @@ ignore_missing_imports = true markers = [ "example: marks test as example (deselect with '-m \"not example\"')", "user_acceptance: marks user acceptance tests (deselect with '-m \"not user_acceptance\"')", + "unittest_jit: marks unit tests that should be jitted (deselect with '-m \"not unittest_jit\"')" ] [tool.hatch.version] From 2d0d5c73a1e74e713529cb5ce35045e3a05cbadf Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Mon, 19 Aug 2024 13:22:58 +0200 Subject: [PATCH 29/34] Extend docstrings again that were accidentily removed during merging --- imod/mf6/hfb.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/imod/mf6/hfb.py b/imod/mf6/hfb.py index 5b5e8e220..5713be0a7 100644 --- a/imod/mf6/hfb.py +++ b/imod/mf6/hfb.py @@ -391,6 +391,13 @@ def _snap_to_grid_and_aggregate( Grid to snap lines to vardict_agg: dict Mapping of variable name to aggregation method + + Returns + ------- + snapping_dataset: xugrid.UgridDataset + Dataset with all variables snapped and aggregated to cell edges + edge_index: numpy.array + 1D array with indices of cell edges that lines were snapped to """ snapping_df = xu.create_snap_to_grid_dataframe( barrier_dataframe, grid2d, max_snap_distance=0.5 From 86a05f452f6aa1dcdefd41213ee40dc6610e7bf7 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Mon, 19 Aug 2024 14:39:48 +0200 Subject: [PATCH 30/34] Make sure cache size is not exceeded and add method to clear cache --- imod/typing/grid.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/imod/typing/grid.py b/imod/typing/grid.py index 3f229907d..f8d8ebf6e 100644 --- a/imod/typing/grid.py +++ b/imod/typing/grid.py @@ -449,7 +449,7 @@ def __init__(self, func: Callable, max_cache_size=5): def get_grid(self, grid: GridDataArray): hash = get_grid_geometry_hash(grid) if hash not in self.grid_cache.keys(): - if len(self.grid_cache.keys()) > self.max_cache_size: + if len(self.grid_cache.keys()) >= self.max_cache_size: self.remove_first() self.grid_cache[hash] = self.func(grid) return self.grid_cache[hash] @@ -457,6 +457,9 @@ def get_grid(self, grid: GridDataArray): def remove_first(self): keys = list(self.grid_cache.keys()) self.grid_cache.pop(keys[0]) + + def clear(self): + self.grid_cache: dict[int, GridDataArray] = {} UGRID2D_FROM_STRUCTURED_CACHE = GridCache(xu.Ugrid2d.from_structured) From 4bef88d0b7533210cf500d11114ccc722e6c1c74 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Mon, 19 Aug 2024 14:40:25 +0200 Subject: [PATCH 31/34] format --- imod/typing/grid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imod/typing/grid.py b/imod/typing/grid.py index f8d8ebf6e..748970663 100644 --- a/imod/typing/grid.py +++ b/imod/typing/grid.py @@ -457,7 +457,7 @@ def get_grid(self, grid: GridDataArray): def remove_first(self): keys = list(self.grid_cache.keys()) self.grid_cache.pop(keys[0]) - + def clear(self): self.grid_cache: dict[int, GridDataArray] = {} From 61e4752920596f54d8be956cf4d6fea3f9d8243d Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Mon, 19 Aug 2024 14:42:32 +0200 Subject: [PATCH 32/34] Add unittests for as_ugrid_dataarray and grid cache --- imod/tests/test_typing/test_typing_grid.py | 74 ++++++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/imod/tests/test_typing/test_typing_grid.py b/imod/tests/test_typing/test_typing_grid.py index a1129940b..597aea135 100644 --- a/imod/tests/test_typing/test_typing_grid.py +++ b/imod/tests/test_typing/test_typing_grid.py @@ -1,7 +1,11 @@ +import numpy as np import xarray as xr import xugrid as xu from imod.typing.grid import ( + UGRID2D_FROM_STRUCTURED_CACHE, + GridCache, + as_ugrid_dataarray, enforce_dim_order, is_planar_grid, is_spatial_grid, @@ -145,3 +149,73 @@ def test_merge_dictionary__unstructured(basic_unstructured_dis): assert isinstance(uds["bottom"], xr.DataArray) assert uds["ibound"].dims == ("layer", "mesh2d_nFaces") assert uds["bottom"].dims == ("layer",) + + +def test_as_ugrid_dataarray__structured(basic_dis): + # Arrange + ibound, top, bottom = basic_dis + top_3D = top * ibound + bottom_3D = bottom * ibound + # Clear cache + UGRID2D_FROM_STRUCTURED_CACHE.clear() + # Act + ibound_disv = as_ugrid_dataarray(ibound) + top_disv = as_ugrid_dataarray(top_3D) + bottom_disv = as_ugrid_dataarray(bottom_3D) + # Assert + # Test types + assert isinstance(ibound_disv, xu.UgridDataArray) + assert isinstance(top_disv, xu.UgridDataArray) + assert isinstance(bottom_disv, xu.UgridDataArray) + # Test cache proper size + assert len(UGRID2D_FROM_STRUCTURED_CACHE.grid_cache) == 1 + # Test that data is different + assert np.all(ibound_disv != top_disv) + assert np.all(top_disv != bottom_disv) + # Test that grid is equal + assert np.all(ibound_disv.grid == top_disv.grid) + assert np.all(top_disv.grid == bottom_disv.grid) + + +def test_as_ugrid_dataarray__unstructured(basic_unstructured_dis): + # Arrange + ibound, top, bottom = basic_unstructured_dis + top_3D = enforce_dim_order(ibound * top) + bottom_3D = enforce_dim_order(ibound * bottom) + # Clear cache + UGRID2D_FROM_STRUCTURED_CACHE.clear() + # Act + ibound_disv = as_ugrid_dataarray(ibound) + top_disv = as_ugrid_dataarray(top_3D) + bottom_disv = as_ugrid_dataarray(bottom_3D) + # Assert + # Test types + assert isinstance(ibound_disv, xu.UgridDataArray) + assert isinstance(top_disv, xu.UgridDataArray) + assert isinstance(bottom_disv, xu.UgridDataArray) + assert len(UGRID2D_FROM_STRUCTURED_CACHE.grid_cache) == 0 + + +def test_ugrid2d_cache(basic_dis): + # Arrange + ibound, _, _ = basic_dis + # Act + cache = GridCache(xu.Ugrid2d.from_structured, max_cache_size=3) + for i in range(5): + ugrid2d = cache.get_grid(ibound[:, i:, :]) + # Assert + # Test types + assert isinstance(ugrid2d, xu.Ugrid2d) + # Test cache proper size + assert cache.max_cache_size == 3 + assert len(cache.grid_cache) == 3 + # Check if smallest grid in last cache list by checking if amount of faces + # correct + expected_size = ibound[0, i:, :].size + keys = list(cache.grid_cache.keys()) + last_ugrid = cache.grid_cache[keys[-1]] + actual_size = last_ugrid.n_face + assert expected_size == actual_size + # Test clear cache + cache.clear() + assert len(cache.grid_cache) == 0 From 999c90c17f792eee1996d58f7ef58aa5896547e9 Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Mon, 19 Aug 2024 14:52:43 +0200 Subject: [PATCH 33/34] Fix code smells --- imod/tests/test_typing/test_typing_grid.py | 16 ++++++++-------- imod/typing/grid.py | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/imod/tests/test_typing/test_typing_grid.py b/imod/tests/test_typing/test_typing_grid.py index 597aea135..ff7f398b0 100644 --- a/imod/tests/test_typing/test_typing_grid.py +++ b/imod/tests/test_typing/test_typing_grid.py @@ -154,14 +154,14 @@ def test_merge_dictionary__unstructured(basic_unstructured_dis): def test_as_ugrid_dataarray__structured(basic_dis): # Arrange ibound, top, bottom = basic_dis - top_3D = top * ibound - bottom_3D = bottom * ibound + top_3d = top * ibound + bottom_3d = bottom * ibound # Clear cache UGRID2D_FROM_STRUCTURED_CACHE.clear() # Act ibound_disv = as_ugrid_dataarray(ibound) - top_disv = as_ugrid_dataarray(top_3D) - bottom_disv = as_ugrid_dataarray(bottom_3D) + top_disv = as_ugrid_dataarray(top_3d) + bottom_disv = as_ugrid_dataarray(bottom_3d) # Assert # Test types assert isinstance(ibound_disv, xu.UgridDataArray) @@ -180,14 +180,14 @@ def test_as_ugrid_dataarray__structured(basic_dis): def test_as_ugrid_dataarray__unstructured(basic_unstructured_dis): # Arrange ibound, top, bottom = basic_unstructured_dis - top_3D = enforce_dim_order(ibound * top) - bottom_3D = enforce_dim_order(ibound * bottom) + top_3d = enforce_dim_order(ibound * top) + bottom_3d = enforce_dim_order(ibound * bottom) # Clear cache UGRID2D_FROM_STRUCTURED_CACHE.clear() # Act ibound_disv = as_ugrid_dataarray(ibound) - top_disv = as_ugrid_dataarray(top_3D) - bottom_disv = as_ugrid_dataarray(bottom_3D) + top_disv = as_ugrid_dataarray(top_3d) + bottom_disv = as_ugrid_dataarray(bottom_3d) # Assert # Test types assert isinstance(ibound_disv, xu.UgridDataArray) diff --git a/imod/typing/grid.py b/imod/typing/grid.py index 748970663..5423f67f7 100644 --- a/imod/typing/grid.py +++ b/imod/typing/grid.py @@ -447,12 +447,12 @@ def __init__(self, func: Callable, max_cache_size=5): self.func = func def get_grid(self, grid: GridDataArray): - hash = get_grid_geometry_hash(grid) - if hash not in self.grid_cache.keys(): + geom_hash = get_grid_geometry_hash(grid) + if geom_hash not in self.grid_cache.keys(): if len(self.grid_cache.keys()) >= self.max_cache_size: self.remove_first() - self.grid_cache[hash] = self.func(grid) - return self.grid_cache[hash] + self.grid_cache[geom_hash] = self.func(grid) + return self.grid_cache[geom_hash] def remove_first(self): keys = list(self.grid_cache.keys()) From be685f109286dda516c8a1f549d8a295c2b702ce Mon Sep 17 00:00:00 2001 From: Joeri van Engelen Date: Tue, 20 Aug 2024 11:11:20 +0200 Subject: [PATCH 34/34] Fix mypy error --- imod/typing/grid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imod/typing/grid.py b/imod/typing/grid.py index 5423f67f7..509da6aef 100644 --- a/imod/typing/grid.py +++ b/imod/typing/grid.py @@ -459,7 +459,7 @@ def remove_first(self): self.grid_cache.pop(keys[0]) def clear(self): - self.grid_cache: dict[int, GridDataArray] = {} + self.grid_cache = {} UGRID2D_FROM_STRUCTURED_CACHE = GridCache(xu.Ugrid2d.from_structured)