Skip to content

Commit

Permalink
ruff fixes (materialsproject#894)
Browse files Browse the repository at this point in the history
* ruff fixes

* ruff unignore PLR, only ignore PLR0911 PLR0912 PLR0913 PLR0915 PLR2004, fix the rest
  • Loading branch information
janosh authored and hrushikesh-s committed Jun 28, 2024
1 parent f34d369 commit 13e731a
Show file tree
Hide file tree
Showing 25 changed files with 119 additions and 120 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ default_language_version:
exclude: ^(.github/|tests/test_data/abinit/)
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.4.4
rev: v0.4.10
hooks:
- id: ruff
args: [--fix]
Expand Down Expand Up @@ -38,7 +38,7 @@ repos:
- tokenize-rt==4.1.0
- types-paramiko
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
rev: v2.3.0
hooks:
- id: codespell
stages: [commit, commit-msg]
Expand Down
4 changes: 2 additions & 2 deletions docs/dev/abinit_tests.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ be added to this pseudopotential table.
Note that information from the real pseudopotential files is used in the creation
of the jobs and flows, hence fake pseudopotentials are not an option here.


### File sizes

The files produced by ABINIT are generally large and would overwhelm the size of the
Expand Down Expand Up @@ -121,7 +120,7 @@ atm dev abinit-test-data TEST_NAME

You should change `TEST_NAME` to be a name for the workflow test. Note, `TEST_NAME` should not
contain spaces or punctuation. For example, the band structure workflow test data was
genenerated using `atm dev vasp-test-data Si_band_structure`.
generated using `atm dev vasp-test-data Si_band_structure`.

This will automatically detect whether the Maker is a Job Maker or a Flow Maker and
copy files in the corresponding `tests/test_data/abinit/jobs/NameOfMaker/TEST_NAME`
Expand All @@ -145,6 +144,7 @@ a unique name. For example, there cannot be two calculations called "relax".
Instead you should ensure they are named something like "relax 1" and "relax 2".

Each `REF_RUN_FOLDER` contains:

- A folder called "inputs" with the run.abi and abinit_input.json, as well as with the
indata, outdata and tmpdata directories. The indata directory potentially contains
the reference fake input files needed for the job to be executed (e.g. a fake link to a
Expand Down
18 changes: 10 additions & 8 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -170,15 +170,14 @@ select = ["ALL"]
ignore = [
"ANN002",
"ANN003",
"ANN101", # missing self type annotation
"ANN102",
"ANN101", # missing self type annotation
"ANN102", # missing cls annotation
"ANN401",
"ARG002", # unused method argument
"BLE001",
"ARG002", # unused method argument
# "BLE001",
"C408", # Unnecessary (dict/list/tuple) call - remove call
"C901", # function too complex
"COM812", # trailing comma missing
"DTZ", # datetime-tz-now
"EM", # exception message must not use f-string literal
"ERA001", # found commented out code
"FBT001",
Expand All @@ -188,15 +187,18 @@ ignore = [
"ISC001",
"PD011", # pandas-use-of-dot-values
"PERF203", # try-except-in-loop
"PLR", # pylint-refactor
"PLR0911", # too many returns
"PLR0912", # too many branches
"PLR0913", # too many arguments
"PLR0915", # too many local statements
"PLR2004",
"PT004", # pytest-missing-fixture-name-underscore
"PT006", # pytest-parametrize-names-wrong-type
"PT013", # pytest-incorrect-pytest-import
"PTH", # prefer Pathlib to os.path
"RUF013", # implicit-optional
"S324", # use of insecure hash function
"S507", # paramiko auto trust
"SLF", # private member accessed outside class
"TD", # TODOs
"TRY003", # long message outside exception class
]
Expand All @@ -209,7 +211,7 @@ docstring-code-format = true

[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"]
"**/tests/*" = ["ANN", "ARG001", "D", "INP001", "S101"]
"**/tests/*" = ["ANN", "ARG001", "D", "INP001", "PLR2004", "S101"]
# flake8-type-checking (TCH): things inside TYPE_CHECKING aren't available
# at runtime and so can't be used by pydantic models
# flake8-future-annotations (FA): pipe operator for type unions only work in pydantic models in python 3.10+
Expand Down
1 change: 0 additions & 1 deletion src/atomate2/abinit/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,4 +62,3 @@ def run_abinit(
process.terminate()

process.wait()
return
8 changes: 5 additions & 3 deletions src/atomate2/abinit/schemas/calculation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import logging
import os
from datetime import datetime
from datetime import datetime, timezone
from pathlib import Path
from typing import Optional, Union

Expand Down Expand Up @@ -236,7 +236,9 @@ def from_abinit_files(

abinit_gsr = GsrFile.from_file(abinit_gsr_file)

completed_at = str(datetime.fromtimestamp(os.stat(abinit_log_file).st_mtime))
completed_at = str(
datetime.fromtimestamp(os.stat(abinit_log_file).st_mtime, tz=timezone.utc)
)

output_doc = CalculationOutput.from_abinit_gsr(abinit_gsr)

Expand All @@ -255,7 +257,7 @@ def from_abinit_files(
if report.run_completed:
has_abinit_completed = TaskState.SUCCESS

except Exception as exc:
except (ValueError, RuntimeError, Exception) as exc:
msg = f"{cls} exception while parsing event_report:\n{exc}"
logger.critical(msg)

Expand Down
20 changes: 9 additions & 11 deletions src/atomate2/abinit/sets/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ def get_input_set(
else:
if prev_outputs is not None and not self.prev_outputs_deps:
raise RuntimeError(
f"Previous outputs not allowed for {self.__class__.__name__}."
f"Previous outputs not allowed for {type(self).__name__}."
)
abinit_input = self.get_abinit_input(
structure=structure,
Expand Down Expand Up @@ -573,7 +573,7 @@ def get_abinit_input(
if self.factory_prev_inputs_kwargs:
if not prev_outputs:
raise RuntimeError(
f"No previous_outputs. Required for {self.__class__.__name__}."
f"No previous_outputs. Required for {type(self).__name__}."
)

# TODO consider cases where structure might be defined even if
Expand All @@ -588,18 +588,16 @@ def get_abinit_input(
)
total_factory_kwargs.update(abinit_inputs)

else:
# TODO check if this should be removed or the check be improved
if structure is None:
msg = (
f"Structure is mandatory for {self.__class__.__name__} "
f"generation since no previous output is used."
)
raise RuntimeError(msg)
elif structure is None:
msg = (
f"Structure is mandatory for {type(self).__name__} "
f"generation since no previous output is used."
)
raise RuntimeError(msg)

if not self.prev_outputs_deps and prev_outputs:
msg = (
f"Previous outputs not allowed for {self.__class__.__name__} "
f"Previous outputs not allowed for {type(self).__name__} "
"Consider if restart_from argument of get_input_set method "
"can fit your needs instead."
)
Expand Down
2 changes: 1 addition & 1 deletion src/atomate2/abinit/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ def get_event_report(ofile: File, mpiabort_file: File) -> EventReport | None:
report.append(last_abort_event)
else:
report.append(last_abort_event)
except Exception as exc:
except (ValueError, RuntimeError, Exception) as exc:
# Return a report with an error entry with info on the exception.
logger.critical(f"{ofile}: Exception while parsing ABINIT events:\n {exc!s}")
return parser.report_exception(ofile.path, exc)
Expand Down
2 changes: 1 addition & 1 deletion src/atomate2/aims/jobs/convergence.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def make(
"convergence_field_values": [],
"epsilon": self.epsilon,
}
convergence_data.update({"idx": idx, "converged": converged})
convergence_data.update(idx=idx, converged=converged)

if prev_dir is not None:
split_prev_dir = str(prev_dir).split(":")[-1]
Expand Down
6 changes: 4 additions & 2 deletions src/atomate2/aims/schemas/calculation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import os
from collections.abc import Sequence
from datetime import datetime
from datetime import datetime, timezone
from pathlib import Path
from typing import TYPE_CHECKING, Any, Optional, Union

Expand Down Expand Up @@ -291,7 +291,9 @@ def from_aims_files(
volumetric_files = [] if volumetric_files is None else volumetric_files
aims_output = AimsOutput.from_outfile(aims_output_file)

completed_at = str(datetime.fromtimestamp(os.stat(aims_output_file).st_mtime))
completed_at = str(
datetime.fromtimestamp(os.stat(aims_output_file).st_mtime, tz=timezone.utc)
)

output_file_paths = _get_output_file_paths(volumetric_files)
aims_objects: dict[AimsObject, Any] = _get_volumetric_data(
Expand Down
4 changes: 2 additions & 2 deletions src/atomate2/aims/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""A collection of helper utils found in atomate2 package."""

from datetime import datetime
from datetime import datetime, timezone


def datetime_str() -> str:
Expand All @@ -12,4 +12,4 @@ def datetime_str() -> str:
str
The current time.
"""
return str(datetime.utcnow())
return str(datetime.now(tz=timezone.utc))
4 changes: 2 additions & 2 deletions src/atomate2/cli/dev.py
Original file line number Diff line number Diff line change
Expand Up @@ -584,12 +584,12 @@ def test_run_{test_name}(self, mock_abinit, abinit_test_dir, clean_dir):

def save_abinit_maker(maker: Maker) -> None:
"""Save maker, the script used to create it and additional metadata."""
import datetime
import inspect
import json
import shutil
import subprocess
import warnings
from datetime import datetime, timezone

caller_frame = inspect.stack()[1]
caller_filename_full = caller_frame.filename
Expand Down Expand Up @@ -632,7 +632,7 @@ def save_abinit_maker(maker: Maker) -> None:
{
"author": author,
"author_mail": author_mail,
"created_on": str(datetime.datetime.now()),
"created_on": str(datetime.now(tz=timezone.utc)),
"maker": jsanitize(maker.as_dict()),
"script": script_str,
},
Expand Down
6 changes: 1 addition & 5 deletions src/atomate2/common/builders/magnetism.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,11 +88,7 @@ def get_items(self) -> Iterator[list[dict]]:
self.ensure_indexes()

criteria = dict(self.query)
criteria.update(
{
"metadata.ordering": {"$exists": True},
}
)
criteria.update({"metadata.ordering": {"$exists": True}})
self.logger.info("Grouping by formula...")
num_formulas = len(
self.tasks.distinct("output.formula_pretty", criteria=criteria)
Expand Down
3 changes: 1 addition & 2 deletions src/atomate2/common/jobs/eos.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,7 @@ def fit(self, eos_flow_output: dict[str, Any]) -> None:
for job_type in self._use_job_types
):
raise ValueError(
f"{self.__class__} requires {self.min_data_points} "
"frames to fit an EOS."
f"{type(self)} requires {self.min_data_points} frames to fit an EOS."
)

self.sort_by_quantity()
Expand Down
6 changes: 4 additions & 2 deletions src/atomate2/common/schemas/phonons.py
Original file line number Diff line number Diff line change
Expand Up @@ -448,7 +448,7 @@ def from_forces_born(
phonon.thermal_displacement_matrices.write_cif(
phonon.primitive, idx, filename=f"tdispmat_{temp}K.cif"
)
_disp_mat = phonon._thermal_displacement_matrices
_disp_mat = phonon._thermal_displacement_matrices # noqa: SLF001
tdisp_mat = _disp_mat.thermal_displacement_matrices.tolist()

tdisp_mat_cif = _disp_mat.thermal_displacement_matrices_cif.tolist()
Expand Down Expand Up @@ -537,7 +537,9 @@ def get_kpath(
kpath = high_symm_kpath.kpath
elif kpath_scheme == "seekpath":
high_symm_kpath = KPathSeek(structure, symprec=symprec, **kpath_kwargs)
kpath = high_symm_kpath._kpath
kpath = high_symm_kpath._kpath # noqa: SLF001
else:
raise ValueError(f"Unexpected {kpath_scheme=}")

path = copy.deepcopy(kpath["path"])

Expand Down
6 changes: 4 additions & 2 deletions src/atomate2/cp2k/schemas/calculation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import logging
import os
from datetime import datetime
from datetime import datetime, timezone
from pathlib import Path
from shutil import which
from typing import Any, Optional, Union
Expand Down Expand Up @@ -364,7 +364,9 @@ def from_cp2k_files(

volumetric_files = [] if volumetric_files is None else volumetric_files
cp2k_output = Cp2kOutput(cp2k_output_file, auto_load=True)
completed_at = str(datetime.fromtimestamp(os.stat(cp2k_output_file).st_mtime))
completed_at = str(
datetime.fromtimestamp(os.stat(cp2k_output_file).st_mtime, tz=timezone.utc)
)

output_file_paths = _get_output_file_paths(volumetric_files)
cp2k_objects: dict[Cp2kObject, Any] = _get_volumetric_data(
Expand Down
Loading

0 comments on commit 13e731a

Please sign in to comment.