Skip to content

Commit

Permalink
Merge pull request #60 from aiidaplugins/feature/parser-convert-total…
Browse files Browse the repository at this point in the history
…-runtime-seconds

`LAMMPSBaseParser`: Fix parsing for nodes with the `script` input
  • Loading branch information
JPchico authored Mar 23, 2023
2 parents 52054ae + a8590b5 commit ed59902
Show file tree
Hide file tree
Showing 4 changed files with 103 additions and 28 deletions.
71 changes: 46 additions & 25 deletions aiida_lammps/parsers/lammps/lammps_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
It takes care of parsing the log.lammps file, the trajectory file and the
yaml file with the final value of the variables printed in the ``thermo_style``.
"""
import time

from aiida import orm
from aiida.common import exceptions
from aiida.parsers.parser import Parser
Expand Down Expand Up @@ -45,39 +47,55 @@ def parse(self, **kwargs):
list_of_files = out_folder.base.repository.list_object_names()

# check log file
if self.node.get_option("logfile_filename") not in list_of_files:
logfile_filename = self.node.get_option("logfile_filename")
if logfile_filename not in list_of_files:
return self.exit_codes.ERROR_LOG_FILE_MISSING
filename = self.node.get_option("logfile_filename")
parsed_data = parse_logfile(
file_contents=self.node.outputs.retrieved.base.repository.get_object_content(
filename
logfile_filename
)
)
if parsed_data is None:
return self.exit_codes.ERROR_PARSING_LOGFILE

global_data = parsed_data["global"]
arrays = parsed_data["time_dependent"]
results = {"compute_variables": global_data}

if "total_wall_time" in global_data:
try:
parsed_time = time.strptime(global_data["total_wall_time"], "%H:%M:%S")
except ValueError:
pass
else:
total_wall_time_seconds = (
parsed_time.tm_hour * 3600
+ parsed_time.tm_min * 60
+ parsed_time.tm_sec
)
global_data["total_wall_time_seconds"] = total_wall_time_seconds

# check final variable file
if self.node.get_option("variables_filename") not in list_of_files:
return self.exit_codes.ERROR_FINAL_VARIABLE_FILE_MISSING

filename = self.node.get_option("variables_filename")
final_variables = parse_final_data(
file_contents=self.node.outputs.retrieved.base.repository.get_object_content(
filename
final_variables = None
variables_filename = self.node.get_option("variables_filename")
if variables_filename not in list_of_files:
if "script" not in self.node.inputs:
return self.exit_codes.ERROR_FINAL_VARIABLE_FILE_MISSING
else:
final_variables = parse_final_data(
file_contents=self.node.outputs.retrieved.base.repository.get_object_content(
variables_filename
)
)
)
if final_variables is None:
return self.exit_codes.ERROR_PARSING_FINAL_VARIABLES
if final_variables is None:
return self.exit_codes.ERROR_PARSING_FINAL_VARIABLES

results = orm.Dict(dict={**final_variables, "compute_variables": global_data})
results.update(**final_variables)

# Expose the results from the log.lammps outputs
self.out("results", results)
self.out("results", orm.Dict(results))

# Get the time-dependent outputs exposed as an ArrayData

time_dependent_computes = orm.ArrayData()

for key, value in arrays.items():
Expand All @@ -87,15 +105,18 @@ def parse(self, **kwargs):
self.out("time_dependent_computes", time_dependent_computes)

# check trajectory file
if self.node.get_option("trajectory_filename") not in list_of_files:
return self.exit_codes.ERROR_TRAJECTORY_FILE_MISSING
# Gather the lammps trajectory data
filename = self.node.get_option("trajectory_filename")
with self.node.outputs.retrieved.base.repository.open(filename) as handle:
lammps_trajectory = LammpsTrajectory(handle)
self.out("trajectories", lammps_trajectory)

self.out("structure", lammps_trajectory.get_step_structure(-1))
trajectory_filename = self.node.get_option("trajectory_filename")
if trajectory_filename not in list_of_files:
if "script" not in self.node.inputs:
return self.exit_codes.ERROR_TRAJECTORY_FILE_MISSING
else:
with self.node.outputs.retrieved.base.repository.open(
trajectory_filename
) as handle:
lammps_trajectory = LammpsTrajectory(handle)

self.out("trajectories", lammps_trajectory)
self.out("structure", lammps_trajectory.get_step_structure(-1))

# check stdout
if self.node.get_option("scheduler_stdout") not in list_of_files:
Expand Down
39 changes: 36 additions & 3 deletions tests/test_parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from textwrap import dedent

from aiida.cmdline.utils.common import get_calcjob_report
from aiida.orm import FolderData
from aiida.orm import FolderData, SinglefileData
from aiida.plugins import ParserFactory
import pytest
import yaml
Expand Down Expand Up @@ -50,6 +50,41 @@ def get_traj_force():
)


def test_lammps_base(db_test_app, data_regression):
"""Check if the log file is produced during calculation."""
filename = os.path.join(
TEST_DIR,
"input_files",
"parsers",
"log.lammps",
)
retrieved = FolderData()
retrieved.base.repository.put_object_from_file(filename, "log.lammps")
retrieved.base.repository.put_object_from_filelike(
io.StringIO(""), "_scheduler-stdout.txt"
)
retrieved.base.repository.put_object_from_filelike(
io.StringIO(""), "_scheduler-stderr.txt"
)
inputs = {"script": SinglefileData(io.StringIO(""))}
calc_node = db_test_app.generate_calcjob_node(
"lammps.base", retrieved, inputs=inputs
)
parser = ParserFactory("lammps.base")
with db_test_app.sandbox_folder() as temp_path:
(
results,
calcfunction,
) = parser.parse_from_node( # pylint: disable=unused-variable
calc_node,
retrieved_temporary_folder=temp_path.abspath,
)

assert calcfunction.is_finished_ok
assert "results" in calcfunction.outputs
data_regression.check({"results": calcfunction.outputs.results.get_dict()})


@pytest.mark.parametrize(
"plugin_name", ["lammps.force", "lammps.optimize", "lammps.md", "lammps.md.multi"]
)
Expand Down Expand Up @@ -221,8 +256,6 @@ def test_run_error(db_test_app, plugin_name):
retrieved_temporary_folder=temp_path.abspath,
)

print(get_calcjob_report(calc_node))

assert calcfunction.is_finished, calcfunction.exception
assert calcfunction.is_failed, calcfunction.exit_status
assert (
Expand Down
15 changes: 15 additions & 0 deletions tests/test_parsers/test_lammps_base.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
results:
compute_variables:
bin: standard
bins:
- 1
- 1
- 1
binsize: 4.06435
ghost_atom_cutoff: 8.1287
master_list_distance_cutoff: 8.1287
max_neighbors_atom: 2000
steps_per_second: 45452.422
total_wall_time: 0:00:00
total_wall_time_seconds: 0
units_style: metal
6 changes: 6 additions & 0 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,7 @@ def generate_calcjob_node(
retrieved,
computer_name="localhost",
attributes=None,
inputs=None,
):
"""Fixture to generate a mock `CalcJobNode` for testing parsers.
Expand Down Expand Up @@ -283,6 +284,11 @@ def generate_calcjob_node(
if attributes:
node.base.attributes.set(attributes) # pylint: disable=no-member

if inputs:
for key, value in inputs.items():
value.store()
node.add_incoming(value, link_type=LinkType.INPUT_CALC, link_label=key)

node.store()

retrieved.base.links.add_incoming(
Expand Down

0 comments on commit ed59902

Please sign in to comment.