Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Pass file IO modes as kwarg #3560

Merged
merged 4 commits into from
Jan 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions dev_scripts/chemenv/explicit_permutations.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@ class Algo:

if __name__ == "__main__":
# Choose the geometry
allcg = AllCoordinationGeometries()
all_cg = AllCoordinationGeometries()
while True:
cg_symbol = input("Enter symbol of the geometry for which you want to get the explicit permutations : ")
try:
cg = allcg[cg_symbol]
cg = all_cg[cg_symbol]
break
except LookupError:
print("Wrong geometry, try again ...")
Expand Down Expand Up @@ -93,5 +93,5 @@ class Algo:
cg._algorithms = [ExplicitPermutationsAlgorithm(permutations=explicit_permutations)]
new_geom_dir = "new_geometry_files"
os.makedirs(new_geom_dir, exist_ok=True)
with open(f"{new_geom_dir}/{cg_symbol}.json", "w") as f:
json.dump(cg.as_dict(), f)
with open(f"{new_geom_dir}/{cg_symbol}.json", mode="w") as file:
json.dump(cg.as_dict(), file)
Original file line number Diff line number Diff line change
Expand Up @@ -144,5 +144,5 @@
if test == "y":
cg._algorithms = new_algos
cg_dict = cg.as_dict()
with open(f"../coordination_geometries_files_new/{cg_symbol}.json", "w") as f:
json.dump(cg_dict, f)
with open(f"../coordination_geometries_files_new/{cg_symbol}.json", mode="w") as file:
json.dump(cg_dict, file)
2 changes: 1 addition & 1 deletion dev_scripts/chemenv/get_plane_permutations_optimized.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,5 +432,5 @@ def random_permutations_iterator(initial_permutation, n_permutations):
if test == "y":
new_geom_dir = "new_geometry_files"
os.makedirs(new_geom_dir, exist_ok=True)
with open(f"{new_geom_dir}/{cg_symbol}.json", "w") as file:
with open(f"{new_geom_dir}/{cg_symbol}.json", mode="w") as file:
json.dump(cg.as_dict(), file)
2 changes: 1 addition & 1 deletion dev_scripts/potcar_scrambler.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def scramble_single_potcar(self, potcar: PotcarSingle):
return scrambled_potcar_str

def to_file(self, filename: str):
with zopen(filename, "wt") as file:
with zopen(filename, mode="wt") as file:
file.write(self.scrambled_potcars_str)

@classmethod
Expand Down
16 changes: 8 additions & 8 deletions dev_scripts/regen_libxcfunc.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@ def parse_section(section):
return int(dct["Number"]), dct

dct = {}
with open(path) as fh:
with open(path) as file:
section = []
for line in fh:
for line in file:
if not line.startswith("-"):
section.append(line)
else:
Expand Down Expand Up @@ -63,8 +63,8 @@ def write_libxc_docs_json(xcfuncs, jpath):
if desc is not None:
xcfuncs[num][opt] = desc

with open(jpath, "w") as fh:
json.dump(xcfuncs, fh)
with open(jpath, mode="w") as file:
json.dump(xcfuncs, file)

return xcfuncs

Expand Down Expand Up @@ -101,8 +101,8 @@ def main():
# Re-generate enumerations.
# [0] read py module.
xc_funcpy_path = f"{pmg_core}/libxcfunc.py"
with open(xc_funcpy_path) as fh:
lines = fh.readlines()
with open(xc_funcpy_path) as file:
lines = file.readlines()

# [1] insert new enum values in list
start = lines.index("#begin_include_dont_touch\n")
Expand All @@ -111,8 +111,8 @@ def main():
del lines[start + 1 : stop]

# [2] write new py module
with open(xc_funcpy_path, "w") as fh:
fh.writelines(lines)
with open(xc_funcpy_path, mode="w") as file:
file.writelines(lines)

print("Files have been regenerated")
print("Remember to update libxc_version in libxcfuncs.py!")
Expand Down
16 changes: 8 additions & 8 deletions dev_scripts/update_pt_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def parse_oxi_state():
data[el]["Common oxidation states"] = common_oxi
else:
print(el)
with open("periodic_table2.yaml", "w") as file:
with open("periodic_table2.yaml", mode="w") as file:
yaml.dump(data, file)


Expand Down Expand Up @@ -87,7 +87,7 @@ def parse_ionic_radii():
data[el]["Ionic_radii"] = ionic_radii
else:
print(el)
with open("periodic_table2.yaml", "w") as file:
with open("periodic_table2.yaml", mode="w") as file:
yaml.dump(data, file)


Expand Down Expand Up @@ -121,9 +121,9 @@ def parse_radii():
data[el]["Van der waals radius"] = vdw_radii
else:
print(el)
with open("periodic_table2.yaml", "w") as file:
with open("periodic_table2.yaml", mode="w") as file:
yaml.dump(data, file)
with open("../pymatgen/core/periodic_table.json", "w") as file:
with open("../pymatgen/core/periodic_table.json", mode="w") as file:
json.dump(data, file)


Expand All @@ -140,9 +140,9 @@ def update_ionic_radii():
if "Ionic_radii_ls" in d:
d["Ionic radii ls"] = {k: v / 100 for k, v in d["Ionic_radii_ls"].items()}
del d["Ionic_radii_ls"]
with open("periodic_table2.yaml", "w") as file:
with open("periodic_table2.yaml", mode="w") as file:
yaml.dump(data, file)
with open("../pymatgen/core/periodic_table.json", "w") as file:
with open("../pymatgen/core/periodic_table.json", mode="w") as file:
json.dump(data, file)


Expand Down Expand Up @@ -180,14 +180,14 @@ def parse_shannon_radii():
data[el]["Shannon radii"] = dict(radii[el])

dumpfn(data, ptable_yaml_path)
with open("../pymatgen/core/periodic_table.json", "w") as file:
with open("../pymatgen/core/periodic_table.json", mode="w") as file:
json.dump(data, file)


def gen_periodic_table():
data = loadfn(ptable_yaml_path)

with open("../pymatgen/core/periodic_table.json", "w") as file:
with open("../pymatgen/core/periodic_table.json", mode="w") as file:
json.dump(data, file)


Expand Down
2 changes: 1 addition & 1 deletion pymatgen/alchemy/materials.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def write_vasp_input(
**kwargs: All keyword args supported by the VASP input set.
"""
vasp_input_set(self.final_structure, **kwargs).write_input(output_dir, make_dir_if_not_present=create_directory)
with open(f"{output_dir}/transformations.json", "w") as fp:
with open(f"{output_dir}/transformations.json", mode="w") as fp:
json.dump(self.as_dict(), fp)

def __str__(self) -> str:
Expand Down
4 changes: 2 additions & 2 deletions pymatgen/alchemy/transmuters.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,8 +308,8 @@ def from_filenames(poscar_filenames, transformations=None, extend_collection=Fal
"""
trafo_structs = []
for filename in poscar_filenames:
with open(filename) as f:
trafo_structs.append(TransformedStructure.from_poscar_string(f.read(), []))
with open(filename) as file:
trafo_structs.append(TransformedStructure.from_poscar_string(file.read(), []))
return StandardTransmuter(trafo_structs, transformations, extend_collection=extend_collection)


Expand Down
4 changes: 2 additions & 2 deletions pymatgen/analysis/chemenv/utils/chemenv_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,8 @@ def save(self, root_dir=None):
if test != "Y":
print("Configuration not saved")
return config_file
with open(config_file, "w") as f:
json.dump(config_dict, f)
with open(config_file, mode="w") as file:
json.dump(config_dict, file)
print("Configuration saved")
return config_file

Expand Down
4 changes: 2 additions & 2 deletions pymatgen/analysis/cost.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,8 @@ def __init__(self, filename):
# read in data from file
self._chemsys_entries = defaultdict(list)
filename = os.path.join(os.path.dirname(__file__), filename)
with open(filename) as f:
reader = csv.reader(f, quotechar="|")
with open(filename) as file:
reader = csv.reader(file, quotechar="|")
for row in reader:
comp = Composition(row[0])
cost_per_mol = float(row[1]) * comp.weight.to("kg") * const.N_A
Expand Down
4 changes: 2 additions & 2 deletions pymatgen/analysis/diffraction/neutron.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
__email__ = "[email protected]"
__date__ = "4/19/18"

with open(os.path.join(os.path.dirname(__file__), "neutron_scattering_length.json")) as f:
with open(os.path.join(os.path.dirname(__file__), "neutron_scattering_length.json")) as file:
# This table was cited from "Neutron Data Booklet" 2nd ed (Old City 2003).
ATOMIC_SCATTERING_LEN = json.load(f)
ATOMIC_SCATTERING_LEN = json.load(file)


class NDCalculator(AbstractDiffractionPatternCalculator):
Expand Down
4 changes: 2 additions & 2 deletions pymatgen/analysis/diffraction/xrd.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@
"AgKb1": 0.497082,
}

with open(os.path.join(os.path.dirname(__file__), "atomic_scattering_params.json")) as f:
ATOMIC_SCATTERING_PARAMS = json.load(f)
with open(os.path.join(os.path.dirname(__file__), "atomic_scattering_params.json")) as file:
ATOMIC_SCATTERING_PARAMS = json.load(file)


class XRDCalculator(AbstractDiffractionPatternCalculator):
Expand Down
20 changes: 10 additions & 10 deletions pymatgen/analysis/graphs.py
Original file line number Diff line number Diff line change
Expand Up @@ -926,17 +926,17 @@ def draw_graph_to_file(
basename, extension = os.path.splitext(filename)
extension = extension[1:]

write_dot(g, basename + ".dot")
write_dot(g, f"{basename}.dot")

with open(filename, "w") as f:
args = [algo, "-T", extension, basename + ".dot"]
with subprocess.Popen(args, stdout=f, stdin=subprocess.PIPE, close_fds=True) as rs:
with open(filename, mode="w") as file:
args = [algo, "-T", extension, f"{basename}.dot"]
with subprocess.Popen(args, stdout=file, stdin=subprocess.PIPE, close_fds=True) as rs:
rs.communicate()
if rs.returncode != 0:
raise RuntimeError(f"{algo} exited with return code {rs.returncode}.")

if not keep_dot:
os.remove(basename + ".dot")
os.remove(f"{basename}.dot")

@property
def types_and_weights_of_connections(self):
Expand Down Expand Up @@ -2574,17 +2574,17 @@ def draw_graph_to_file(
basename, extension = os.path.splitext(filename)
extension = extension[1:]

write_dot(g, basename + ".dot")
write_dot(g, f"{basename}.dot")

with open(filename, "w") as f:
args = [algo, "-T", extension, basename + ".dot"]
with subprocess.Popen(args, stdout=f, stdin=subprocess.PIPE, close_fds=True) as rs:
with open(filename, mode="w") as file:
args = [algo, "-T", extension, f"{basename}.dot"]
with subprocess.Popen(args, stdout=file, stdin=subprocess.PIPE, close_fds=True) as rs:
rs.communicate()
if rs.returncode != 0:
raise RuntimeError(f"{algo} exited with return code {rs.returncode}.")

if not keep_dot:
os.remove(basename + ".dot")
os.remove(f"{basename}.dot")

def as_dict(self):
"""
Expand Down
4 changes: 2 additions & 2 deletions pymatgen/analysis/interface_reactions.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
__email__ = "[email protected]"
__date__ = "Sep 1, 2021"

with open(os.path.join(os.path.dirname(__file__), "..", "util", "plotly_interface_rxn_layouts.json")) as f:
plotly_layouts = json.load(f)
with open(os.path.join(os.path.dirname(__file__), "..", "util", "plotly_interface_rxn_layouts.json")) as file:
plotly_layouts = json.load(file)


@due.dcite(
Expand Down
12 changes: 6 additions & 6 deletions pymatgen/analysis/local_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,14 @@
module_dir = os.path.dirname(os.path.abspath(__file__))
yaml = YAML()

with open(f"{module_dir}/op_params.yaml") as f:
default_op_params = yaml.load(f)
with open(f"{module_dir}/op_params.yaml") as file:
default_op_params = yaml.load(file)

with open(f"{module_dir}/cn_opt_params.yaml") as f:
cn_opt_params = yaml.load(f)
with open(f"{module_dir}/cn_opt_params.yaml") as file:
cn_opt_params = yaml.load(file)

with open(f"{module_dir}/ionic_radii.json") as fp:
_ion_radii = json.load(fp)
with open(f"{module_dir}/ionic_radii.json") as file:
_ion_radii = json.load(file)


class ValenceIonicRadiusEvaluator:
Expand Down
4 changes: 2 additions & 2 deletions pymatgen/apps/borg/hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -417,8 +417,8 @@ def _get_transformation_history(path):
trans_json = glob(f"{path}/transformations.json*")
if trans_json:
try:
with zopen(trans_json[0]) as f:
return json.load(f)["history"]
with zopen(trans_json[0]) as file:
return json.load(file)["history"]
except Exception:
return None
return None
22 changes: 11 additions & 11 deletions pymatgen/apps/borg/queen.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,12 +77,12 @@ def serial_assimilate(self, rootpath):
count = 0
total = len(valid_paths)
for path in valid_paths:
newdata = self._drone.assimilate(path)
self._data.append(newdata)
new_data = self._drone.assimilate(path)
self._data.append(new_data)
count += 1
logger.info(f"{count}/{total} ({count / total:.2%}) done")
for d in data:
self._data.append(json.loads(d, cls=MontyDecoder))
for json_str in data:
self._data.append(json.loads(json_str, cls=MontyDecoder))

def get_data(self):
"""Returns an list of assimilated objects."""
Expand All @@ -96,21 +96,21 @@ def save_data(self, filename):
that if the filename ends with gz or bz2, the relevant gzip
or bz2 compression will be applied.
"""
with zopen(filename, "wt") as f:
json.dump(list(self._data), f, cls=MontyEncoder)
with zopen(filename, mode="wt") as file:
json.dump(list(self._data), file, cls=MontyEncoder)

def load_data(self, filename):
"""Load assimilated data from a file."""
with zopen(filename, "rt") as f:
self._data = json.load(f, cls=MontyDecoder)
with zopen(filename, mode="rt") as file:
self._data = json.load(file, cls=MontyDecoder)


def order_assimilation(args):
"""Internal helper method for BorgQueen to process assimilation."""
(path, drone, data, status) = args
newdata = drone.assimilate(path)
if newdata:
data.append(json.dumps(newdata, cls=MontyEncoder))
new_data = drone.assimilate(path)
if new_data:
data.append(json.dumps(new_data, cls=MontyEncoder))
status["count"] += 1
count = status["count"]
total = status["total"]
Expand Down
2 changes: 1 addition & 1 deletion pymatgen/cli/pmg_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def setup_cp2k_data(cp2k_data_dirs: list[str]) -> None:

for el in settings:
print(f"Writing {el} settings file")
with open(os.path.join(target_dir, el), "w") as file:
with open(os.path.join(target_dir, el), mode="w") as file:
yaml.dump(settings.get(el), file, default_flow_style=False)

print(
Expand Down
10 changes: 3 additions & 7 deletions pymatgen/command_line/bader_caller.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,8 @@ def __init__(
)

data = []
with open("ACF.dat") as f:
lines = f.readlines()
with open("ACF.dat") as file:
lines = file.readlines()
headers = ("x", "y", "z", "charge", "min_dist", "atomic_vol")
lines.pop(0)
lines.pop(0)
Expand All @@ -184,11 +184,7 @@ def __init__(

atomic_densities = []
# For each atom in the structure
for _, loc, chg in zip(
self.chgcar.structure,
self.chgcar.structure.frac_coords,
atom_chgcars,
):
for _site, loc, chg in zip(self.chgcar.structure, self.chgcar.structure.frac_coords, atom_chgcars):
# Find the index of the atom in the charge density atom
index = np.round(np.multiply(loc, chg.dim))

Expand Down
4 changes: 2 additions & 2 deletions pymatgen/command_line/chargemol_caller.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,8 +395,8 @@ def _write_jobscript_for_chargemol(
bo = ".true." if compute_bond_orders else ".false."
lines += f"\n<compute BOs>\n{bo}\n</compute BOs>\n"

with open("job_control.txt", "w") as fh:
fh.write(lines)
with open("job_control.txt", mode="w") as file:
file.write(lines)

@staticmethod
def _get_dipole_info(filepath):
Expand Down
Loading