From de08b3c03d85b33e649af5db083d0213f696ff5e Mon Sep 17 00:00:00 2001 From: VsevolodX <79542055+VsevolodX@users.noreply.github.com> Date: Wed, 7 Feb 2024 11:01:31 -0800 Subject: [PATCH 1/2] chore: fix a typo --- pymatgen/analysis/interfaces/coherent_interfaces.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pymatgen/analysis/interfaces/coherent_interfaces.py b/pymatgen/analysis/interfaces/coherent_interfaces.py index c02f38db62b..b946b87174b 100644 --- a/pymatgen/analysis/interfaces/coherent_interfaces.py +++ b/pymatgen/analysis/interfaces/coherent_interfaces.py @@ -129,7 +129,7 @@ def _find_terminations(self): film_slabs = film_sg.get_slabs() sub_slabs = sub_sg.get_slabs() - film_shits = [s.shift for s in film_slabs] + film_shifts = [s.shift for s in film_slabs] film_terminations = [label_termination(s) for s in film_slabs] sub_shifts = [s.shift for s in sub_slabs] @@ -138,7 +138,7 @@ def _find_terminations(self): self._terminations = { (film_label, sub_label): (film_shift, sub_shift) for (film_label, film_shift), (sub_label, sub_shift) in product( - zip(film_terminations, film_shits), zip(sub_terminations, sub_shifts) + zip(film_terminations, film_shifts), zip(sub_terminations, sub_shifts) ) } self.terminations = list(self._terminations) From f8490d0dd557ec5454ba7c60e36524cb1e7c574c Mon Sep 17 00:00:00 2001 From: Janosh Riebesell Date: Wed, 7 Feb 2024 20:09:48 +0100 Subject: [PATCH 2/2] rename e->exc --- pymatgen/analysis/local_env.py | 6 +++--- pymatgen/analysis/magnetism/analyzer.py | 4 ++-- pymatgen/analysis/magnetism/jahnteller.py | 8 ++++---- pymatgen/analysis/phase_diagram.py | 4 ++-- pymatgen/electronic_structure/dos.py | 4 ++-- pymatgen/ext/matproj_legacy.py | 14 +++++++------- pymatgen/io/abinit/abitimer.py | 4 ++-- pymatgen/io/vasp/optics.py | 12 ++++++------ pymatgen/io/vasp/outputs.py | 4 ++-- pymatgen/util/due.py | 10 +++++----- 10 files changed, 35 insertions(+), 35 deletions(-) diff --git a/pymatgen/analysis/local_env.py b/pymatgen/analysis/local_env.py index 9e0b51713d6..e657d32f3fc 100644 --- a/pymatgen/analysis/local_env.py +++ b/pymatgen/analysis/local_env.py @@ -766,11 +766,11 @@ def get_voronoi_polyhedra(self, structure: Structure, n: int): cell_info = self._extract_cell_info(0, neighbors, targets, voro, self.compute_adj_neighbors) break - except RuntimeError as e: + except RuntimeError as exc: if cutoff >= max_cutoff: - if e.args and "vertex" in e.args[0]: + if exc.args and "vertex" in exc.args[0]: # pass through the error raised by _extract_cell_info - raise e + raise exc raise RuntimeError("Error in Voronoi neighbor finding; max cutoff exceeded") cutoff = min(cutoff * 2, max_cutoff + 0.001) return cell_info diff --git a/pymatgen/analysis/magnetism/analyzer.py b/pymatgen/analysis/magnetism/analyzer.py index ebb37e2efbf..453a62bcaee 100644 --- a/pymatgen/analysis/magnetism/analyzer.py +++ b/pymatgen/analysis/magnetism/analyzer.py @@ -324,10 +324,10 @@ def _round_magmoms(magmoms: ArrayLike, round_magmoms_mode: float) -> np.ndarray: # round magmoms to these extrema magmoms = [extrema[(np.abs(extrema - m)).argmin()] for m in magmoms] - except Exception as e: + except Exception as exc: # TODO: typically a singular matrix warning, investigate this warnings.warn("Failed to round magmoms intelligently, falling back to simple rounding.") - warnings.warn(str(e)) + warnings.warn(str(exc)) # and finally round roughly to the number of significant figures in our kde width num_decimals = len(str(round_magmoms_mode).split(".")[1]) + 1 diff --git a/pymatgen/analysis/magnetism/jahnteller.py b/pymatgen/analysis/magnetism/jahnteller.py index 63bf8cb92a6..bc1b114ac85 100644 --- a/pymatgen/analysis/magnetism/jahnteller.py +++ b/pymatgen/analysis/magnetism/jahnteller.py @@ -282,8 +282,8 @@ def is_jahn_teller_active( op_threshold=op_threshold, ) active = analysis["active"] - except Exception as e: - warnings.warn(f"Error analyzing {structure.composition.reduced_formula}: {e}") + except Exception as exc: + warnings.warn(f"Error analyzing {structure.composition.reduced_formula}: {exc}") return active @@ -326,8 +326,8 @@ def tag_structure( jt_sites[index] = True structure.add_site_property("possible_jt_active", jt_sites) return structure - except Exception as e: - warnings.warn(f"Error analyzing {structure.composition.reduced_formula}: {e}") + except Exception as exc: + warnings.warn(f"Error analyzing {structure.composition.reduced_formula}: {exc}") return structure @staticmethod diff --git a/pymatgen/analysis/phase_diagram.py b/pymatgen/analysis/phase_diagram.py index a0b5e992c80..6f617665754 100644 --- a/pymatgen/analysis/phase_diagram.py +++ b/pymatgen/analysis/phase_diagram.py @@ -1776,9 +1776,9 @@ def get_decomposition(self, comp: Composition) -> dict[PDEntry, float]: try: pd = self.get_pd_for_entry(comp) return pd.get_decomposition(comp) - except ValueError as e: + except ValueError as exc: # NOTE warn when stitching across pds is being used - warnings.warn(f"{e} Using SLSQP to find decomposition") + warnings.warn(f"{exc} Using SLSQP to find decomposition") competing_entries = self._get_stable_entries_in_space(frozenset(comp.elements)) return _get_slsqp_decomp(comp, competing_entries) diff --git a/pymatgen/electronic_structure/dos.py b/pymatgen/electronic_structure/dos.py index 8d31a373c93..0f6b755e024 100644 --- a/pymatgen/electronic_structure/dos.py +++ b/pymatgen/electronic_structure/dos.py @@ -495,9 +495,9 @@ def get_fermi_interextrapolated( """ try: return self.get_fermi(concentration, temperature, **kwargs) - except ValueError as e: + except ValueError as exc: if warn: - warnings.warn(str(e)) + warnings.warn(str(exc)) if abs(concentration) < c_ref: if abs(concentration) < 1e-10: diff --git a/pymatgen/ext/matproj_legacy.py b/pymatgen/ext/matproj_legacy.py index cde1776b9e1..37b2eb4fe80 100644 --- a/pymatgen/ext/matproj_legacy.py +++ b/pymatgen/ext/matproj_legacy.py @@ -992,14 +992,14 @@ def query( return self._make_request("/query", payload=payload, method="POST", mp_decode=mp_decode) data = [] - mids = [d["material_id"] for d in self.query(criteria, ["material_id"], chunk_size=0)] + mids = [dct["material_id"] for dct in self.query(criteria, ["material_id"], chunk_size=0)] chunks = get_chunks(mids, size=chunk_size) progress_bar = tqdm(total=len(mids), disable=not show_progress_bar) for chunk in chunks: chunk_criteria = criteria.copy() chunk_criteria.update({"material_id": {"$in": chunk}}) - num_tries = 0 - while num_tries < max_tries_per_chunk: + n_tries = 0 + while n_tries < max_tries_per_chunk: try: data += self.query( chunk_criteria, @@ -1008,12 +1008,12 @@ def query( mp_decode=mp_decode, ) break - except MPRestError as e: - match = re.search(r"error status code (\d+)", str(e)) + except MPRestError as exc: + match = re.search(r"error status code (\d+)", str(exc)) if match: if not match.group(1).startswith("5"): - raise e - num_tries += 1 + raise exc + n_tries += 1 print( "Unknown server error. Trying again in five " f"seconds (will try at most {max_tries_per_chunk} times)..." diff --git a/pymatgen/io/abinit/abitimer.py b/pymatgen/io/abinit/abitimer.py index d4088bf8c05..33e9a243fc3 100644 --- a/pymatgen/io/abinit/abitimer.py +++ b/pymatgen/io/abinit/abitimer.py @@ -123,8 +123,8 @@ def parse(self, filenames): self._read(file, filename) read_ok.append(filename) - except self.Error as e: - logger.warning(f"exception while parsing file {filename}:\n{e}") + except self.Error as exc: + logger.warning(f"exception while parsing file {filename}:\n{exc}") continue finally: diff --git a/pymatgen/io/vasp/optics.py b/pymatgen/io/vasp/optics.py index bb83c236c49..dc7f71f0f91 100644 --- a/pymatgen/io/vasp/optics.py +++ b/pymatgen/io/vasp/optics.py @@ -118,10 +118,10 @@ def _try_reading(dtypes): for dtype in dtypes: try: return Waveder.from_binary(f"{directory}/WAVEDER", data_type=dtype) - except ValueError as e: - if "reshape" in str(e): + except ValueError as exc: + if "reshape" in str(exc): continue - raise e + raise exc return None vrun = Vasprun(f"{directory}/vasprun.xml") @@ -386,10 +386,10 @@ def epsilon_imag( try: min_band0, max_band0 = np.min(np.where(cderm)[0]), np.max(np.where(cderm)[0]) min_band1, max_band1 = np.min(np.where(cderm)[1]), np.max(np.where(cderm)[1]) - except ValueError as e: - if "zero-size array" in str(e): + except ValueError as exc: + if "zero-size array" in str(exc): return egrid, np.zeros_like(egrid, dtype=np.complex_) - raise e + raise exc _, _, nk, nspin = cderm.shape[:4] iter_idx = [ range(min_band0, max_band0 + 1), diff --git a/pymatgen/io/vasp/outputs.py b/pymatgen/io/vasp/outputs.py index e83530d1ce9..717a901b28a 100644 --- a/pymatgen/io/vasp/outputs.py +++ b/pymatgen/io/vasp/outputs.py @@ -1211,12 +1211,12 @@ def parse_atomic_symbol(symbol): try: return str(Element(symbol)) # vasprun.xml uses X instead of Xe for xenon - except ValueError as e: + except ValueError as exc: if symbol == "X": return "Xe" if symbol == "r": return "Zr" - raise e + raise exc elem.clear() return [parse_atomic_symbol(sym) for sym in atomic_symbols], potcar_symbols diff --git a/pymatgen/util/due.py b/pymatgen/util/due.py index 8b5d8e33dd5..1fc2dab7063 100644 --- a/pymatgen/util/due.py +++ b/pymatgen/util/due.py @@ -28,10 +28,10 @@ def _donothing(self, *args, **kwargs): def dcite(self, *args, **kwargs): """If I could cite I would.""" - def nondecorating_decorator(func): + def non_decorating_decorator(func): return func - return nondecorating_decorator + return non_decorating_decorator active = False activate = add = cite = dump = load = _donothing @@ -49,9 +49,9 @@ def _donothing_func(*args, **kwargs): if "due" in locals() and not hasattr(due, "cite"): raise RuntimeError("Imported due lacks .cite. DueCredit is now disabled") -except Exception as e: - if not isinstance(e, ImportError): - logging.getLogger("duecredit").error("Failed to import duecredit due to %s" % str(e)) +except Exception as exc: + if not isinstance(exc, ImportError): + logging.getLogger("duecredit").error("Failed to import duecredit due to %s" % str(exc)) # Initiate due stub due = InactiveDueCreditCollector() BibTeX = Doi = Url = Text = _donothing_func