diff --git a/autotest/test_download.py b/autotest/test_download.py index b1ecc47..9c5ea5a 100644 --- a/autotest/test_download.py +++ b/autotest/test_download.py @@ -53,9 +53,9 @@ def test_get_release(repo): if repo == "MODFLOW-USGS/modflow6": # can remove if modflow6 releases follow asset name # conventions followed in executables and nightly build repos - assert set([a.rpartition("_")[2] for a in actual_names]) >= set( - [a for a in expected_names if not a.startswith("win")] - ) + assert {a.rpartition("_")[2] for a in actual_names} >= { + a for a in expected_names if not a.startswith("win") + } else: assert set(actual_names) >= set(expected_names) @@ -64,7 +64,7 @@ def test_get_release(repo): @requires_github @pytest.mark.parametrize("name", [None, "rtd-files", "run-time-comparison"]) @pytest.mark.parametrize("per_page", [None, 100]) -def test_list_artifacts(tmp_path, name, per_page): +def test_list_artifacts(name, per_page): artifacts = list_artifacts( "MODFLOW-USGS/modflow6", name=name, diff --git a/autotest/test_fixtures.py b/autotest/test_fixtures.py index 7470745..e2e8880 100644 --- a/autotest/test_fixtures.py +++ b/autotest/test_fixtures.py @@ -63,14 +63,14 @@ class TestClassScopedTmpdir: @pytest.fixture(autouse=True) def setup(self, class_tmpdir): - with open(class_tmpdir / self.fname, "w") as file: - file.write("hello, class-scoped tmpdir") + file = class_tmpdir / self.fname + file.write_text("hello, class-scoped tmpdir") def test_class_scoped_tmpdir(self, class_tmpdir): assert isinstance(class_tmpdir, Path) assert class_tmpdir.is_dir() assert self.__class__.__name__ in class_tmpdir.stem - assert Path(class_tmpdir / self.fname).is_file() + assert (class_tmpdir / self.fname).is_file() def test_module_scoped_tmpdir(module_tmpdir): @@ -91,33 +91,33 @@ def test_session_scoped_tmpdir(session_tmpdir): @pytest.mark.meta("test_keep") def test_keep_function_scoped_tmpdir_inner(function_tmpdir): - with open(function_tmpdir / test_keep_fname, "w") as f: - f.write("hello, function-scoped tmpdir") + file = function_tmpdir / test_keep_fname + file.write_text("hello, function-scoped tmpdir") @pytest.mark.meta("test_keep") class TestKeepClassScopedTmpdirInner: def test_keep_class_scoped_tmpdir_inner(self, class_tmpdir): - with open(class_tmpdir / test_keep_fname, "w") as f: - f.write("hello, class-scoped tmpdir") + file = class_tmpdir / test_keep_fname + file.write_text("hello, class-scoped tmpdir") @pytest.mark.meta("test_keep") def test_keep_module_scoped_tmpdir_inner(module_tmpdir): - with open(module_tmpdir / test_keep_fname, "w") as f: - f.write("hello, module-scoped tmpdir") + file = module_tmpdir / test_keep_fname + file.write_text("hello, module-scoped tmpdir") @pytest.mark.meta("test_keep") def test_keep_session_scoped_tmpdir_inner(session_tmpdir): - with open(session_tmpdir / test_keep_fname, "w") as f: - f.write("hello, session-scoped tmpdir") + file = session_tmpdir / test_keep_fname + file.write_text("hello, session-scoped tmpdir") @pytest.mark.parametrize("arg", ["--keep", "-K"]) def test_keep_function_scoped_tmpdir(function_tmpdir, arg): inner_fn = test_keep_function_scoped_tmpdir_inner.__name__ - file_path = Path(function_tmpdir / f"{inner_fn}0" / test_keep_fname) + file_path = function_tmpdir / f"{inner_fn}0" / test_keep_fname args = [ __file__, "-v", @@ -126,7 +126,7 @@ def test_keep_function_scoped_tmpdir(function_tmpdir, arg): inner_fn, "-M", "test_keep", - "-K", + arg, function_tmpdir, ] assert pytest.main(args) == ExitCode.OK @@ -155,7 +155,7 @@ def test_keep_class_scoped_tmpdir(tmp_path, arg): tmp_path, ] assert pytest.main(args) == ExitCode.OK - assert Path( + assert ( tmp_path / f"{TestKeepClassScopedTmpdirInner.__name__}0" / test_keep_fname ).is_file() @@ -175,7 +175,7 @@ def test_keep_module_scoped_tmpdir(tmp_path, arg): ] assert pytest.main(args) == ExitCode.OK this_path = Path(__file__) - keep_path = tmp_path / f"{str(this_path.parent.name)}.{str(this_path.stem)}0" + keep_path = tmp_path / f"{this_path.parent.name}.{this_path.stem}0" assert test_keep_fname in [f.name for f in keep_path.glob("*")] @@ -193,17 +193,15 @@ def test_keep_session_scoped_tmpdir(tmp_path, arg, request): tmp_path, ] assert pytest.main(args) == ExitCode.OK - assert Path( - tmp_path / f"{request.config.rootpath.name}0" / test_keep_fname - ).is_file() + assert (tmp_path / f"{request.config.rootpath.name}0" / test_keep_fname).is_file() @pytest.mark.meta("test_keep_failed") def test_keep_failed_function_scoped_tmpdir_inner(function_tmpdir): - with open(function_tmpdir / test_keep_fname, "w") as f: - f.write("hello, function-scoped tmpdir") + file = function_tmpdir / test_keep_fname + file.write_text("hello, function-scoped tmpdir") - assert False, "oh no" + raise AssertionError("oh no") @pytest.mark.parametrize("keep", [True, False]) @@ -214,7 +212,7 @@ def test_keep_failed_function_scoped_tmpdir(function_tmpdir, keep): args += ["--keep-failed", function_tmpdir] assert pytest.main(args) == ExitCode.TESTS_FAILED - kept_file = Path(function_tmpdir / f"{inner_fn}0" / test_keep_fname).is_file() + kept_file = (function_tmpdir / f"{inner_fn}0" / test_keep_fname).is_file() assert kept_file if keep else not kept_file @@ -290,8 +288,8 @@ def test_large_test_model(large_test_model): @pytest.mark.meta("test_tabular") def test_tabular_inner(function_tmpdir, tabular): - with open(function_tmpdir / test_tabular_fname, "w") as f: - f.write(str(tabular)) + file = function_tmpdir / test_tabular_fname + file.write_text(str(tabular)) @pytest.mark.parametrize("tabular", ["raw", "recarray", "dataframe"]) @@ -312,5 +310,5 @@ def test_tabular(tabular, arg, function_tmpdir): "test_tabular", ] assert pytest.main(args) == ExitCode.OK - res = open(next(function_tmpdir.rglob(test_tabular_fname))).readlines()[0] - assert tabular == res + file = next(function_tmpdir.rglob(test_tabular_fname)) + assert tabular == file.read_text() diff --git a/autotest/test_misc.py b/autotest/test_misc.py index 257e5b3..c56f916 100644 --- a/autotest/test_misc.py +++ b/autotest/test_misc.py @@ -22,10 +22,10 @@ def test_set_dir(tmp_path): - assert Path(os.getcwd()) != tmp_path + assert Path.cwd() != tmp_path with set_dir(tmp_path): - assert Path(os.getcwd()) == tmp_path - assert Path(os.getcwd()) != tmp_path + assert Path.cwd() == tmp_path + assert Path.cwd() != tmp_path def test_set_env(): @@ -49,17 +49,13 @@ def test_set_env(): _repos_path = Path(__file__).parent.parent.parent.parent _repos_path = Path(_repos_path).expanduser().absolute() _testmodels_repo_path = _repos_path / "modflow6-testmodels" -_testmodels_repo_paths_mf6 = sorted(list((_testmodels_repo_path / "mf6").glob("test*"))) -_testmodels_repo_paths_mf5to6 = sorted( - list((_testmodels_repo_path / "mf5to6").glob("test*")) -) +_testmodels_repo_paths_mf6 = sorted((_testmodels_repo_path / "mf6").glob("test*")) +_testmodels_repo_paths_mf5to6 = sorted((_testmodels_repo_path / "mf5to6").glob("test*")) _largetestmodels_repo_path = _repos_path / "modflow6-largetestmodels" -_largetestmodel_paths = sorted(list(_largetestmodels_repo_path.glob("test*"))) +_largetestmodel_paths = sorted(_largetestmodels_repo_path.glob("test*")) _examples_repo_path = _repos_path / "modflow6-examples" _examples_path = _examples_repo_path / "examples" -_example_paths = ( - sorted(list(_examples_path.glob("ex-*"))) if _examples_path.is_dir() else [] -) +_example_paths = sorted(_examples_path.glob("ex-*")) if _examples_path.is_dir() else [] @pytest.mark.skipif( @@ -95,20 +91,19 @@ def test_get_packages_fails_on_invalid_namefile(module_tmpdir): # invalid gwf namefile reference: # result should only contain packages from mfsim.nam - lines = open(namefile_path, "r").read().splitlines() - with open(namefile_path, "w") as f: - for line in lines: - if "GWF6" in line: - line = line.replace("GWF6", "GWF6 garbage") - f.write(line + os.linesep) - assert set(get_packages(namefile_path)) == {"gwf", "tdis", "ims"} + lines = [] + for line in namefile_path.read_text().splitlines(): + if "GWF6" in line: + line = line.replace("GWF6", "GWF6 garbage") + lines.append(line) + namefile_path.write_text("\n".join(lines)) + + with pytest.warns(UserWarning, match="Invalid namefile format"): + assert set(get_packages(namefile_path)) == {"gwf", "tdis", "ims"} # entirely unparsable namefile - result should be empty - lines = open(namefile_path, "r").read().splitlines() - with open(namefile_path, "w") as f: - for _ in lines: - f.write("garbage" + os.linesep) - assert not any(get_packages(namefile_path)) + namefile_path.write_text("garbage\n" * 20) + assert get_packages(namefile_path) == [] @pytest.mark.skipif(not any(_example_paths), reason="examples not found") @@ -129,34 +124,34 @@ def test_has_package(): def get_expected_model_dirs(path, pattern="mfsim.nam") -> List[Path]: folders = [] - for root, dirs, files in os.walk(path): + for root, dirs, _ in os.walk(path): for d in dirs: p = Path(root) / d if any(p.glob(pattern)): folders.append(p) - return sorted(list(set(folders))) + return sorted(set(folders)) def get_expected_namefiles(path, pattern="mfsim.nam") -> List[Path]: folders = [] - for root, dirs, files in os.walk(path): + for root, dirs, _ in os.walk(path): for d in dirs: p = Path(root) / d found = list(p.glob(pattern)) folders = folders + found - return sorted(list(set(folders))) + return sorted(set(folders)) @pytest.mark.skipif(not any(_example_paths), reason="modflow6-examples repo not found") def test_get_model_paths_examples(): expected_paths = get_expected_model_dirs(_examples_path) paths = get_model_paths(_examples_path) - assert sorted(paths) == sorted(list(set(paths))) # no duplicates + assert sorted(paths) == sorted(set(paths)) # no duplicates assert set(expected_paths) == set(paths) expected_paths = get_expected_model_dirs(_examples_path, "*.nam") paths = get_model_paths(_examples_path, namefile="*.nam") - assert sorted(paths) == sorted(list(set(paths))) + assert sorted(paths) == sorted(set(paths)) assert set(expected_paths) == set(paths) @@ -166,12 +161,12 @@ def test_get_model_paths_examples(): def test_get_model_paths_largetestmodels(): expected_paths = get_expected_model_dirs(_examples_path) paths = get_model_paths(_examples_path) - assert sorted(paths) == sorted(list(set(paths))) + assert sorted(paths) == sorted(set(paths)) assert set(expected_paths) == set(paths) expected_paths = get_expected_model_dirs(_examples_path) paths = get_model_paths(_examples_path) - assert sorted(paths) == sorted(list(set(paths))) + assert sorted(paths) == sorted(set(paths)) assert set(expected_paths) == set(paths) @@ -192,12 +187,12 @@ def test_get_model_paths_exclude_patterns(models): def test_get_namefile_paths_examples(): expected_paths = get_expected_namefiles(_examples_path) paths = get_namefile_paths(_examples_path) - assert paths == sorted(list(set(paths))) + assert paths == sorted(set(paths)) assert set(expected_paths) == set(paths) expected_paths = get_expected_namefiles(_examples_path, "*.nam") paths = get_namefile_paths(_examples_path, namefile="*.nam") - assert paths == sorted(list(set(paths))) + assert paths == sorted(set(paths)) assert set(expected_paths) == set(paths) @@ -207,12 +202,12 @@ def test_get_namefile_paths_examples(): def test_get_namefile_paths_largetestmodels(): expected_paths = get_expected_namefiles(_largetestmodels_repo_path) paths = get_namefile_paths(_largetestmodels_repo_path) - assert paths == sorted(list(set(paths))) + assert paths == sorted(set(paths)) assert set(expected_paths) == set(paths) expected_paths = get_expected_namefiles(_largetestmodels_repo_path) paths = get_namefile_paths(_largetestmodels_repo_path) - assert paths == sorted(list(set(paths))) + assert paths == sorted(set(paths)) assert set(expected_paths) == set(paths) diff --git a/autotest/test_snapshots.py b/autotest/test_snapshots.py index 01d5f3e..8fc93a2 100644 --- a/autotest/test_snapshots.py +++ b/autotest/test_snapshots.py @@ -56,10 +56,7 @@ def test_readable_text_array_snapshot(readable_array_snapshot): ) assert snapshot_path.is_file() assert np.allclose( - np.fromstring( - open(snapshot_path).readlines()[0].replace("[", "").replace("]", ""), - sep=" ", - ), + np.fromstring(snapshot_path.read_text().strip("[]\r\n"), sep=" "), snapshot_array, ) diff --git a/autotest/test_zip.py b/autotest/test_zip.py index 96f1c79..072e87b 100644 --- a/autotest/test_zip.py +++ b/autotest/test_zip.py @@ -23,8 +23,8 @@ def test_compressall(function_tmpdir): zip_file = function_tmpdir / "output.zip" input_dir = function_tmpdir / "input" input_dir.mkdir() - with open(input_dir / "data.txt", "w") as f: - f.write("hello world") + file = input_dir / "data.txt" + file.write_text("hello world") MFZipFile.compressall(str(zip_file), dir_pths=str(input_dir)) pprint(list(function_tmpdir.iterdir())) @@ -42,8 +42,7 @@ def empty_archive(module_tmpdir) -> Path: # https://stackoverflow.com/a/25195628/6514033 data = b"PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # noqa: E501 path = module_tmpdir / "empty.zip" - with open(path, "wb") as zip: - zip.write(data) + path.write_bytes(data) yield path diff --git a/modflow_devtools/build.py b/modflow_devtools/build.py index cdb1991..51249bf 100644 --- a/modflow_devtools/build.py +++ b/modflow_devtools/build.py @@ -1,9 +1,6 @@ -import platform -import subprocess from os import PathLike from pathlib import Path - -from modflow_devtools.misc import set_dir +from subprocess import run def meson_build( @@ -11,21 +8,31 @@ def meson_build( build_path: PathLike, bin_path: PathLike, ): - project_path = Path(project_path).expanduser().absolute() - build_path = Path(build_path).expanduser().absolute() - bin_path = Path(bin_path).expanduser().absolute() + project_path = Path(project_path).expanduser().resolve() + build_path = Path(build_path).expanduser().resolve() + bin_path = Path(bin_path).expanduser().resolve() + + # meson setup + args = [ + "meson", + "setup", + str(build_path), + f"--bindir={bin_path}", + f"--libdir={bin_path}", + f"--prefix={Path.cwd()}", + ] + if build_path.is_dir(): + args.append("--wipe") + + print("Running command: " + " ".join(args)) + run(args, check=True, cwd=project_path) - with set_dir(Path(project_path)): - cmd = ( - f"meson setup {build_path} " - + f"--bindir={bin_path} " - + f"--libdir={bin_path} " - + f"--prefix={('%CD%' if platform.system() == 'Windows' else '$(pwd)')}" - + (" --wipe" if build_path.is_dir() else "") - ) - print(f"Running meson setup command: {cmd}") - subprocess.run(cmd, shell=True, check=True) + # meson compile + args = ["meson", "compile", "-C", str(build_path)] + print("Running command: " + " ".join(args)) + run(args, check=True, cwd=project_path) - cmd = f"meson install -C {build_path}" - print(f"Running meson install command: {cmd}") - subprocess.run(cmd, shell=True, check=True) + # meson install + args = ["meson", "install", "-C", str(build_path)] + print("Running command: " + " ".join(args)) + run(args, check=True, cwd=project_path) diff --git a/modflow_devtools/download.py b/modflow_devtools/download.py index 475bc06..aa21495 100644 --- a/modflow_devtools/download.py +++ b/modflow_devtools/download.py @@ -396,13 +396,13 @@ def download_artifact( if github_token: request.add_header("Authorization", f"Bearer {github_token}") - zip_path = Path(path).expanduser().absolute() / f"{str(uuid4())}.zip" + zip_path = Path(path).expanduser().absolute() / f"{uuid4()!s}.zip" tries = 0 while True: tries += 1 try: - with urllib.request.urlopen(request) as url_file, open( - zip_path, "wb" + with urllib.request.urlopen(request) as url_file, zip_path.open( + "wb" ) as out_file: content = url_file.read() out_file.write(content) @@ -456,8 +456,10 @@ def download_and_unzip( Path The path to the directory where the zip file was unzipped """ - - path = Path(path if path else os.getcwd()) + if path: + path = Path(path) + else: + path = Path.cwd() path.mkdir(exist_ok=True) if verbose: @@ -477,8 +479,8 @@ def download_and_unzip( while True: tries += 1 try: - with urllib.request.urlopen(request) as url_file, open( - file_path, "wb" + with urllib.request.urlopen(request) as url_file, file_path.open( + "wb" ) as out_file: content = url_file.read() out_file.write(content) diff --git a/modflow_devtools/fixtures.py b/modflow_devtools/fixtures.py index 3e6dc12..bd567df 100644 --- a/modflow_devtools/fixtures.py +++ b/modflow_devtools/fixtures.py @@ -319,7 +319,7 @@ def group_examples(namefile_paths) -> Dict[str, List[Path]]: namefile_paths, key=example_name_from_namfile_path ): # sort alphabetically (gwf < gwt) - nfpaths = sorted(list(paths)) + nfpaths = sorted(paths) # skip if no models found if len(nfpaths) == 0: @@ -333,9 +333,7 @@ def get_examples(): # find MODFLOW 6 namfiles examples_path = repo_path / "examples" namfiles = ( - [p for p in examples_path.rglob("mfsim.nam")] - if examples_path.is_dir() - else [] + list(examples_path.rglob("mfsim.nam")) if examples_path.is_dir() else [] ) # group by scenario @@ -377,9 +375,9 @@ def get_examples(): return examples - example_scenarios = get_examples() if repo_path else dict() + example_scenarios = get_examples() if repo_path else {} metafunc.parametrize( key, - [(name, nfps) for name, nfps in example_scenarios.items()], + list(example_scenarios.items()), ids=list(example_scenarios.keys()), ) diff --git a/modflow_devtools/latex.py b/modflow_devtools/latex.py index 4670c2d..c848fee 100644 --- a/modflow_devtools/latex.py +++ b/modflow_devtools/latex.py @@ -1,14 +1,14 @@ from os import PathLike from pathlib import Path -from typing import Iterable, Union +from typing import Iterable, Optional, Union def build_table( caption: str, fpth: Union[str, PathLike], arr, - headings: Iterable[str] = None, - col_widths: Iterable[float] = None, + headings: Optional[Iterable[str]] = None, + col_widths: Optional[Iterable[float]] = None, ): """ Build a LaTeX table from the given NumPy array. @@ -32,7 +32,7 @@ def build_table( if headings is None: headings = arr.dtype.names ncols = len(arr.dtype.names) - label = "tab:{}".format(fpth.stem) + label = f"tab:{fpth.stem}" line = get_header(caption, label, headings, col_widths=col_widths) @@ -49,15 +49,14 @@ def build_table( # footer line += get_footer() - with open(fpth, "w") as f: - f.write(line) + fpth.write_text(line) def get_header( caption: str, label: str, headings: Iterable[str], - col_widths: Iterable[float] = None, + col_widths: Optional[Iterable[float]] = None, center: bool = True, firsthead: bool = False, ): diff --git a/modflow_devtools/misc.py b/modflow_devtools/misc.py index cdab271..c351738 100644 --- a/modflow_devtools/misc.py +++ b/modflow_devtools/misc.py @@ -7,11 +7,10 @@ from contextlib import contextmanager from functools import wraps from importlib import metadata -from os import PathLike, chdir, environ, getcwd -from os.path import basename, normpath -from pathlib import Path +from os import PathLike, chdir, environ +from pathlib import Path, PurePosixPath from shutil import which -from subprocess import PIPE, Popen +from subprocess import run from timeit import timeit from typing import Dict, List, Optional, Tuple from urllib import request @@ -20,8 +19,8 @@ @contextmanager def set_dir(path: PathLike): - origin = Path(getcwd()).absolute() - wrkdir = Path(path).expanduser().absolute() + origin = Path.cwd() + wrkdir = Path(path).expanduser().resolve() try: chdir(path) @@ -92,17 +91,14 @@ def get_suffixes(ostag) -> Tuple[str, str]: def run_cmd(*args, verbose=False, **kwargs): """ Run any command, return tuple (stdout, stderr, returncode). - - Originally written by Mike Toews (mwtoews@gmail.com) for FloPy. """ args = [str(g) for g in args] if verbose: print("running: " + " ".join(args)) - p = Popen(args, stdout=PIPE, stderr=PIPE, **kwargs) - stdout, stderr = p.communicate() - stdout = stdout.decode() - stderr = stderr.decode() - returncode = p.returncode + proc = run(args, capture_output=True, text=True, **kwargs) + stdout = proc.stdout + stderr = proc.stderr + returncode = proc.returncode if verbose: print(f"stdout:\n{stdout}") print(f"stderr:\n{stderr}") @@ -136,16 +132,15 @@ def get_current_branch() -> str: """ # check if on GitHub Actions CI - ref = environ.get("GITHUB_REF") - if ref is not None: - return basename(normpath(ref)).lower() + if ref := environ.get("GITHUB_REF"): + return PurePosixPath(ref).name # otherwise ask git about it if not which("git"): raise RuntimeError("'git' required to determine current branch") - stdout, stderr, code = run_cmd("git", "rev-parse", "--abbrev-ref", "HEAD") + stdout, stderr, code = run_cmd("git", "branch", "--show-current") if code == 0 and stdout: - return stdout.strip().lower() + return stdout.strip() raise ValueError(f"Could not determine current branch: {stderr}") @@ -164,12 +159,13 @@ def get_packages(namefile_path: PathLike) -> List[str]: Returns ------- + list a list of packages used by the simulation or model """ packages = [] - path = Path(namefile_path).expanduser().absolute() - lines = open(path, "r").readlines() + path = Path(namefile_path).expanduser().resolve() + lines = path.read_text().splitlines() gwf_lines = [ln for ln in lines if ln.strip().lower().startswith("gwf6 ")] gwt_lines = [ln for ln in lines if ln.strip().lower().startswith("gwt6 ")] @@ -223,7 +219,7 @@ def has_package(namefile_path: PathLike, package: str) -> bool: def get_namefile_paths( path: PathLike, - prefix: str = None, + prefix: Optional[str] = None, namefile: str = "mfsim.nam", excluded=None, selected=None, @@ -235,15 +231,14 @@ def get_namefile_paths( by parent directory name prefix or pattern, or by packages used. """ + path = Path(path) # if path doesn't exist, return empty list - if not Path(path).is_dir(): + if not path.is_dir(): return [] # find simulation namefiles - paths = [ - p for p in Path(path).rglob(f"{prefix}*/**/{namefile}" if prefix else namefile) - ] + paths = list(path.rglob(f"{prefix}*/**/{namefile}" if prefix else namefile)) # remove excluded paths = [ @@ -255,7 +250,7 @@ def get_namefile_paths( filtered = [] for nfp in paths: nf_pkgs = get_packages(nfp) - shared = set(nf_pkgs).intersection(set([p.lower() for p in packages])) + shared = set(nf_pkgs).intersection({p.lower() for p in packages}) if any(shared): filtered.append(nfp) paths = filtered @@ -273,7 +268,7 @@ def get_namefile_paths( def get_model_paths( path: PathLike, - prefix: str = None, + prefix: Optional[str] = None, namefile: str = "mfsim.nam", excluded=None, selected=None, @@ -304,16 +299,12 @@ def keyfunc(v): example_paths = [p for p in globbed if p.is_dir()] for p in example_paths: for mp in sorted( - list( - set( - [ - p.parent - for p in get_namefile_paths( - p, prefix, namefile, excluded, selected, packages - ) - ] + { + p.parent + for p in get_namefile_paths( + p, prefix, namefile, excluded, selected, packages ) - ), + }, key=keyfunc, ): if mp not in model_paths: diff --git a/modflow_devtools/ostags.py b/modflow_devtools/ostags.py index 14186fc..07e35ad 100644 --- a/modflow_devtools/ostags.py +++ b/modflow_devtools/ostags.py @@ -5,7 +5,7 @@ import sys from platform import processor, system -from typing import Tuple +from typing import Optional, Tuple _system = system() _processor = processor() @@ -42,7 +42,7 @@ def get_ostag(kind: str = "modflow") -> str: raise ValueError(f"Invalid kind: {kind}") -def get_binary_suffixes(ostag: str = None) -> Tuple[str, str]: +def get_binary_suffixes(ostag: Optional[str] = None) -> Tuple[str, str]: """ Returns executable and library suffixes for the given OS tag, if provided, otherwise for the current operating system. diff --git a/pyproject.toml b/pyproject.toml index a176bd1..3829ab3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,7 +96,20 @@ ignore-words-list = [ line-length = 88 [tool.ruff.lint] -select = ["F", "E", "I001"] +select = [ + # "ARG", # flake8-unused-arguments + "C4", # flake8 comprehensions + "D409", # pydocstyle - section-underline-matches-section-length + "E", "W", # pycodestyle + "F", # Pyflakes + "I", # isort + "PTH", # flake8-use-pathlib + "RUF", # Ruff-specific rules + "UP", # pyupgrade +] + +[tool.ruff.lint.per-file-ignores] +"modflow_devtools/zip.py" = ["PTH"] [tool.pytest.ini_options] addopts = ["--import-mode=importlib"] diff --git a/scripts/update_version.py b/scripts/update_version.py index 26d34c0..608657b 100644 --- a/scripts/update_version.py +++ b/scripts/update_version.py @@ -15,30 +15,29 @@ def update_version_txt(version: Version): - with open(_version_txt_path, "w") as f: - f.write(str(version)) + _version_txt_path.write_text(str(version)) print(f"Updated {_version_txt_path} to version {version}") def update_init_py(timestamp: datetime, version: Version): - lines = _package_init_path.read_text().rstrip().split("\n") - with open(_package_init_path, "w") as f: - for line in lines: - if "__date__" in line: - line = f'__date__ = "{timestamp.strftime("%b %d, %Y")}"' - if "__version__" in line: - line = f'__version__ = "{version}"' - f.write(f"{line}\n") + lines = [] + for line in _package_init_path.read_text().splitlines(): + if "__date__" in line: + line = f'__date__ = "{timestamp:%b %d, %Y}"' + if "__version__" in line: + line = f'__version__ = "{version}"' + lines.append(line) + _package_init_path.write_text("\n".join(lines) + "\n") print(f"Updated {_package_init_path} to version {version}") def update_docs_config(version: Version): - lines = _docs_config_path.read_text().rstrip().split("\n") - with open(_docs_config_path, "w") as f: - for line in lines: - line = f"release = '{version}'" if "release = " in line else line - f.write(f"{line}\n") - + lines = [] + for line in _docs_config_path.read_text().splitlines(): + if "release = " in line: + line = f'release = "{version}"' + lines.append(line) + _docs_config_path.write_text("\n".join(lines) + "\n") print(f"Updated {_docs_config_path} to version {version}") @@ -60,6 +59,8 @@ def update_version( update_init_py(timestamp, version) update_docs_config(version) + lock_path.unlink() + if __name__ == "__main__": parser = argparse.ArgumentParser(