diff --git a/pyproject.toml b/pyproject.toml index d5863fc6..5d476b8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,7 @@ dependencies = [ "pydantic", "pydantic-yaml", "aiida-core>=2.5", + "aiida-workgraph==0.4.10", "termcolor", "pygraphviz", "lxml" diff --git a/src/sirocco/parsing/_yaml_data_models.py b/src/sirocco/parsing/_yaml_data_models.py index e48bf1bf..9a5b6fa1 100644 --- a/src/sirocco/parsing/_yaml_data_models.py +++ b/src/sirocco/parsing/_yaml_data_models.py @@ -244,6 +244,7 @@ def check_period_is_not_negative_or_zero(self) -> ConfigCycle: @dataclass class ConfigBaseTaskSpecs: + computer: str | None = None host: str | None = None account: str | None = None uenv: dict | None = None @@ -389,6 +390,7 @@ class ConfigBaseDataSpecs: type: str | None = None src: str | None = None format: str | None = None + computer: str | None = None class ConfigBaseData(_NamedBaseModel, ConfigBaseDataSpecs): @@ -416,7 +418,13 @@ class ConfigAvailableData(ConfigBaseData): class ConfigGeneratedData(ConfigBaseData): - pass + @field_validator("computer") + @classmethod + def invalid_field(cls, value: str | None) -> str | None: + if value is not None: + msg = "The field 'computer' can only be specified for available data." + raise ValueError(msg) + return value class ConfigData(BaseModel): diff --git a/src/sirocco/workgraph.py b/src/sirocco/workgraph.py new file mode 100644 index 00000000..fbdc02bf --- /dev/null +++ b/src/sirocco/workgraph.py @@ -0,0 +1,332 @@ +from __future__ import annotations + +from collections import defaultdict +from pathlib import Path +from typing import TYPE_CHECKING, Any + +import aiida.common +import aiida.orm +import aiida_workgraph.engine.utils # type: ignore[import-untyped] +from aiida.common.exceptions import NotExistent +from aiida_workgraph import WorkGraph + +from sirocco.core._tasks.icon_task import IconTask +from sirocco.core._tasks.shell_task import ShellTask + +if TYPE_CHECKING: + from aiida_workgraph.socket import TaskSocket # type: ignore[import-untyped] + + from sirocco import core + from sirocco.core import graph_items + + +# This is a workaround required when splitting the initialization of the task and its linked nodes Merging this into +# aiida-workgraph properly would require significant changes see issues +# https://github.com/aiidateam/aiida-workgraph/issues/168 The function is a copy of the original function in +# aiida-workgraph. The modifications are marked by comments. +def _prepare_for_shell_task(task: dict, inputs: dict) -> dict: + """Prepare the inputs for ShellJob""" + import inspect + + from aiida_shell.launch import prepare_shell_job_inputs + + # Retrieve the signature of `prepare_shell_job_inputs` to determine expected input parameters. + signature = inspect.signature(prepare_shell_job_inputs) + aiida_shell_input_keys = signature.parameters.keys() + + # Iterate over all WorkGraph `inputs`, and extract the ones which are expected by `prepare_shell_job_inputs` + inputs_aiida_shell_subset = {key: inputs[key] for key in inputs if key in aiida_shell_input_keys} + + try: + aiida_shell_inputs = prepare_shell_job_inputs(**inputs_aiida_shell_subset) + except ValueError: # noqa: TRY302 + raise + + # We need to remove the original input-keys, as they might be offending for the call to `launch_shell_job` + # E.g., `inputs` originally can contain `command`, which gets, however, transformed to # + # `code` by `prepare_shell_job_inputs` + for key in inputs_aiida_shell_subset: + inputs.pop(key) + + # Finally, we update the original `inputs` with the modified ones from the call to `prepare_shell_job_inputs` + inputs = {**inputs, **aiida_shell_inputs} + + inputs.setdefault("metadata", {}) + inputs["metadata"].update({"call_link_label": task["name"]}) + + # Workaround starts here + # This part is part of the workaround. We need to manually add the outputs from the task. + # Because kwargs are not populated with outputs + default_outputs = {"remote_folder", "remote_stash", "retrieved", "_outputs", "_wait", "stdout", "stderr"} + task_outputs = set(task["outputs"].keys()) + task_outputs = task_outputs.union(set(inputs.pop("outputs", []))) + missing_outputs = task_outputs.difference(default_outputs) + inputs["outputs"] = list(missing_outputs) + # Workaround ends here + + return inputs + + +aiida_workgraph.engine.utils.prepare_for_shell_task = _prepare_for_shell_task + + +class AiidaWorkGraph: + def __init__(self, core_workflow: core.Workflow): + # the core workflow that unrolled the time constraints for the whole graph + self._core_workflow = core_workflow + + self._validate_workflow() + + self._workgraph = WorkGraph(core_workflow.name) + + # stores the input data available on initialization + self._aiida_data_nodes: dict[str, aiida_workgraph.orm.Data] = {} + # stores the outputs sockets of tasks + self._aiida_socket_nodes: dict[str, TaskSocket] = {} + self._aiida_task_nodes: dict[str, aiida_workgraph.Task] = {} + + self._add_available_data() + self._add_tasks() + + def _validate_workflow(self): + """Checks if the core workflow uses for its tasks and data valid names for AiiDA.""" + for task in self._core_workflow.tasks: + try: + aiida.common.validate_link_label(task.name) + except ValueError as exception: + msg = f"Raised error when validating task name '{task.name}': {exception.args[0]}" + raise ValueError(msg) from exception + for input_ in task.inputs: + try: + aiida.common.validate_link_label(input_.name) + except ValueError as exception: + msg = f"Raised error when validating input name '{input_.name}': {exception.args[0]}" + raise ValueError(msg) from exception + for output in task.outputs: + try: + aiida.common.validate_link_label(output.name) + except ValueError as exception: + msg = f"Raised error when validating output name '{output.name}': {exception.args[0]}" + raise ValueError(msg) from exception + + def _add_available_data(self): + """Adds the available data on initialization to the workgraph""" + for task in self._core_workflow.tasks: + for input_ in task.inputs: + if input_.available: + self._add_aiida_input_data_node(task, input_) + + @staticmethod + def replace_invalid_chars_in_label(label: str) -> str: + """Replaces chars in the label that are invalid for AiiDA. + + The invalid chars ["-", " ", ":", "."] are replaced with underscores. + """ + invalid_chars = ["-", " ", ":", "."] + for invalid_char in invalid_chars: + label = label.replace(invalid_char, "_") + return label + + @staticmethod + def get_aiida_label_from_graph_item(obj: graph_items.GraphItem) -> str: + """Returns a unique AiiDA label for the given graph item. + + The graph item object is uniquely determined by its name and its coordinates. There is the possibility that + through the replacement of invalid chars in the coordinates duplication can happen but it is unlikely. + """ + return AiidaWorkGraph.replace_invalid_chars_in_label( + f"{obj.name}" + "__".join(f"_{key}_{value}" for key, value in obj.coordinates.items()) + ) + + def _add_aiida_input_data_node(self, task: graph_items.Task, input_: graph_items.Data): + """ + Create an `aiida.orm.Data` instance from the provided graph item. + """ + label = AiidaWorkGraph.get_aiida_label_from_graph_item(input_) + input_path = Path(input_.src) + input_full_path = input_.src if input_path.is_absolute() else task.config_rootdir / input_path + + if input_.computer is not None: + try: + computer = aiida.orm.load_computer(input_.computer) + except NotExistent as err: + msg = f"Could not find computer {input_.computer!r} for input {input_}." + raise ValueError(msg) from err + self._aiida_data_nodes[label] = aiida.orm.RemoteData(remote_path=input_.src, label=label, computer=computer) + elif input_.type == "file": + self._aiida_data_nodes[label] = aiida.orm.SinglefileData(label=label, file=input_full_path) + elif input_.type == "dir": + self._aiida_data_nodes[label] = aiida.orm.FolderData(label=label, tree=input_full_path) + else: + msg = f"Data type {input_.type!r} not supported. Please use 'file' or 'dir'." + raise ValueError(msg) + + def _add_tasks(self): + """Creates the AiiDA task nodes from the `GraphItem.Task`s in the core workflow. + + This includes the linking of all input and output nodes, the arguments and wait_on tasks + """ + for task in self._core_workflow.tasks: + self._create_task_node(task) + + # NOTE: The wait on tasks has to be added after the creation of the tasks because it might reference tasks from + # the future + for task in self._core_workflow.tasks: + self._link_wait_on_to_task(task) + + for task in self._core_workflow.tasks: + for output in task.outputs: + self._link_output_nodes_to_task(task, output) + for input_ in task.inputs: + self._link_input_nodes_to_task(task, input_) + self._link_arguments_to_task(task) + + def _create_task_node(self, task: graph_items.Task): + label = AiidaWorkGraph.get_aiida_label_from_graph_item(task) + if isinstance(task, ShellTask): + command_path = Path(task.command) + command_full_path = task.command if command_path.is_absolute() else task.config_rootdir / command_path + command = str(command_full_path) + + # metadata + metadata = {} + ## Source file + env_source_paths = [ + env_source_path + if (env_source_path := Path(env_source_file)).is_absolute() + else (task.config_rootdir / env_source_path) + for env_source_file in task.env_source_files + ] + prepend_text = "\n".join([f"source {env_source_path}" for env_source_path in env_source_paths]) + metadata["options"] = {"prepend_text": prepend_text} + + ## computer + if task.computer is not None: + try: + metadata["computer"] = aiida.orm.load_computer(task.computer) + except NotExistent as err: + msg = f"Could not find computer {task.computer} for task {task}." + raise ValueError(msg) from err + + # NOTE: We don't pass the `nodes` dictionary here, as then we would need to have the sockets available when + # we create the task. Instead, they are being updated via the WG internals when linking inputs/outputs to + # tasks + workgraph_task = self._workgraph.add_task( + "ShellJob", + name=label, + command=command, + arguments=[], + outputs=[], + metadata=metadata, + ) + + self._aiida_task_nodes[label] = workgraph_task + + elif isinstance(task, IconTask): + exc = "IconTask not implemented yet." + raise NotImplementedError(exc) + else: + exc = f"Task: {task.name} not implemented yet." + raise NotImplementedError(exc) + + def _link_wait_on_to_task(self, task: graph_items.Task): + label = AiidaWorkGraph.get_aiida_label_from_graph_item(task) + workgraph_task = self._aiida_task_nodes[label] + wait_on_tasks = [] + for wait_on in task.wait_on: + wait_on_task_label = AiidaWorkGraph.get_aiida_label_from_graph_item(wait_on) + wait_on_tasks.append(self._aiida_task_nodes[wait_on_task_label]) + workgraph_task.wait = wait_on_tasks + + def _link_input_nodes_to_task(self, task: graph_items.Task, input_: graph_items.Data): + """Links the input to the workgraph task.""" + task_label = AiidaWorkGraph.get_aiida_label_from_graph_item(task) + input_label = AiidaWorkGraph.get_aiida_label_from_graph_item(input_) + workgraph_task = self._aiida_task_nodes[task_label] + workgraph_task.add_input("workgraph.any", f"nodes.{input_label}") + + # resolve data + if (data_node := self._aiida_data_nodes.get(input_label)) is not None: + if not hasattr(workgraph_task.inputs.nodes, f"{input_label}"): + msg = f"Socket {input_label!r} was not found in workgraph. Please contact a developer." + raise ValueError(msg) + socket = getattr(workgraph_task.inputs.nodes, f"{input_label}") + socket.value = data_node + elif (output_socket := self._aiida_socket_nodes.get(input_label)) is not None: + self._workgraph.add_link(output_socket, workgraph_task.inputs[f"nodes.{input_label}"]) + else: + msg = ( + f"Input data node {input_label!r} was neither found in socket nodes nor in data nodes. The task " + f"{task_label!r} must have dependencies on inputs before they are created." + ) + raise ValueError(msg) + + def _link_arguments_to_task(self, task: graph_items.Task): + """Links the arguments to the workgraph task. + + Parses `cli_arguments` of the graph item task and links all arguments to the task node. It only adds arguments + corresponding to inputs if they are contained in the task. + """ + task_label = AiidaWorkGraph.get_aiida_label_from_graph_item(task) + workgraph_task = self._aiida_task_nodes[task_label] + if (workgraph_task_arguments := workgraph_task.inputs.arguments) is None: + msg = ( + f"Workgraph task {workgraph_task.name!r} did not initialize arguments nodes in the workgraph " + f"before linking. This is a bug in the code, please contact developers." + ) + raise ValueError(msg) + + name_to_input_map = defaultdict(list) + for input_ in task.inputs: + name_to_input_map[input_.name].append(input_) + + # we track the linked input arguments, to ensure that all linked input nodes got linked arguments + linked_input_args = [] + for arg in task.cli_arguments: + if arg.references_data_item: + # We only add an input argument to the args if it has been added to the nodes + # This ensures that inputs and their arguments are only added + # when the time conditions are fulfilled + if (inputs := name_to_input_map.get(arg.name)) is not None: + for input_ in inputs: + input_label = AiidaWorkGraph.get_aiida_label_from_graph_item(input_) + + if arg.cli_option_of_data_item is not None: + workgraph_task_arguments.value.append(f"{arg.cli_option_of_data_item}") + workgraph_task_arguments.value.append(f"{{{input_label}}}") + linked_input_args.append(input_.name) + else: + workgraph_task_arguments.value.append(f"{arg.name}") + + # Adding remaining input nodes as positional arguments + for input_name in name_to_input_map: + if input_name not in linked_input_args: + inputs = name_to_input_map[input_name] + for input_ in inputs: + input_label = AiidaWorkGraph.get_aiida_label_from_graph_item(input_) + workgraph_task_arguments.value.append(f"{{{input_label}}}") + + def _link_output_nodes_to_task(self, task: graph_items.Task, output: graph_items.Data): + """Links the output to the workgraph task.""" + + workgraph_task = self._aiida_task_nodes[AiidaWorkGraph.get_aiida_label_from_graph_item(task)] + output_label = AiidaWorkGraph.get_aiida_label_from_graph_item(output) + output_socket = workgraph_task.add_output("workgraph.any", output.src) + self._aiida_socket_nodes[output_label] = output_socket + + def run( + self, + inputs: None | dict[str, Any] = None, + metadata: None | dict[str, Any] = None, + ) -> dict[str, Any]: + return self._workgraph.run(inputs=inputs, metadata=metadata) + + def submit( + self, + *, + inputs: None | dict[str, Any] = None, + wait: bool = False, + timeout: int = 60, + metadata: None | dict[str, Any] = None, + ) -> dict[str, Any]: + return self._workgraph.submit(inputs=inputs, wait=wait, timeout=timeout, metadata=metadata) diff --git a/tests/cases/large/config/data/dummy_source_file.sh b/tests/cases/large/config/data/dummy_source_file.sh new file mode 100644 index 00000000..e69de29b diff --git a/tests/cases/large/config/data/era5 b/tests/cases/large/config/data/era5 new file mode 100644 index 00000000..e69de29b diff --git a/tests/cases/large/config/data/grid b/tests/cases/large/config/data/grid new file mode 100644 index 00000000..e69de29b diff --git a/tests/cases/large/config/data/obs_data b/tests/cases/large/config/data/obs_data new file mode 100644 index 00000000..e69de29b diff --git a/tests/cases/large/config/scripts/cleanup.sh b/tests/cases/large/config/scripts/cleanup.sh new file mode 100755 index 00000000..bc5435d9 --- /dev/null +++ b/tests/cases/large/config/scripts/cleanup.sh @@ -0,0 +1 @@ +echo "cleanup" > output diff --git a/tests/cases/large/config/scripts/extpar b/tests/cases/large/config/scripts/extpar new file mode 100755 index 00000000..66dc5ac0 --- /dev/null +++ b/tests/cases/large/config/scripts/extpar @@ -0,0 +1 @@ +echo "extpar" > output diff --git a/tests/cases/large/config/scripts/icon b/tests/cases/large/config/scripts/icon new file mode 100755 index 00000000..ec409b0e --- /dev/null +++ b/tests/cases/large/config/scripts/icon @@ -0,0 +1,4 @@ +echo "icon" > restart +echo "icon" > output +echo "icon" > output_1 +echo "icon" > output_2 diff --git a/tests/cases/large/config/scripts/main_script_atm.sh b/tests/cases/large/config/scripts/main_script_atm.sh new file mode 100755 index 00000000..2a361f83 --- /dev/null +++ b/tests/cases/large/config/scripts/main_script_atm.sh @@ -0,0 +1 @@ +echo "main_script_atm.sh" > postout diff --git a/tests/cases/large/config/scripts/main_script_ocn.sh b/tests/cases/large/config/scripts/main_script_ocn.sh new file mode 100755 index 00000000..1d01b24c --- /dev/null +++ b/tests/cases/large/config/scripts/main_script_ocn.sh @@ -0,0 +1 @@ +echo "python main_script_ocn.sh" > postout diff --git a/tests/cases/large/config/scripts/post_clean.sh b/tests/cases/large/config/scripts/post_clean.sh new file mode 100755 index 00000000..b91319f8 --- /dev/null +++ b/tests/cases/large/config/scripts/post_clean.sh @@ -0,0 +1 @@ +echo "store_and_clean" > stored_data diff --git a/tests/cases/large/config/test_config_large.yml b/tests/cases/large/config/test_config_large.yml index 8922879d..9b0961bc 100644 --- a/tests/cases/large/config/test_config_large.yml +++ b/tests/cases/large/config/test_config_large.yml @@ -61,7 +61,7 @@ tasks: account: g110 - extpar: plugin: shell # no extpar plugin available yet - command: $PWD/examples/files/scripts/extpar + command: scripts/extpar cli_arguments: "--verbose {--input obs_data}" uenv: squashfs: path/to/squashfs @@ -70,17 +70,17 @@ tasks: walltime: 00:02:00 - preproc: plugin: shell - command: $PWD/examples/files/scripts/cleanup.sh + command: scripts/cleanup.sh cli_arguments: "{-p extpar_file} {-e ERA5} {grid_file}" - env_source_files: $PWD/examples/files/data/dummy_source_file.sh + env_source_files: data/dummy_source_file.sh nodes: 4 walltime: 00:02:00 uenv: squashfs: path/to/squashfs mount_point: runtime/mount/point - icon: - plugin: icon - command: $PWD/examples/files/scripts/icon + plugin: shell + command: scripts/icon cli_arguments: "{-g grid_file} {--input icon_input}" nodes: 40 walltime: 23:59:59 @@ -92,7 +92,7 @@ tasks: mount_point: runtime/mount/point - postproc_1: plugin: shell - command: $PWD/examples/files/scripts/main_script_ocn.sh + command: scripts/main_script_ocn.sh cli_arguments: "{--input stream_1}" nodes: 2 walltime: 00:05:00 @@ -101,7 +101,7 @@ tasks: mount_point: runtime/mount/point - postproc_2: plugin: shell - command: $PWD/examples/files/scripts/main_script_atm.sh + command: scripts/main_script_atm.sh cli_arguments: "{--input stream_2}" nodes: 2 walltime: 00:05:00 @@ -111,13 +111,13 @@ tasks: mount_point: runtime/mount/point - store_and_clean_1: plugin: shell - command: $PWD/examples/files/scripts/post_clean.sh + command: scripts/post_clean.sh cli_arguments: "{--input postout_1} {--stream stream_1} {--icon_input icon_input}" nodes: 1 walltime: 00:01:00 - store_and_clean_2: plugin: shell - command: $PWD/examples/files/scripts/post_clean.sh + command: scripts/post_clean.sh cli_arguments: "{--input postout_2}" nodes: 1 walltime: 00:01:00 @@ -125,13 +125,13 @@ data: available: - grid_file: type: file - src: $PWD/examples/files/data/grid + src: data/grid - obs_data: type: file - src: $PWD/examples/files/data/obs_data + src: data/obs_data - ERA5: type: file - src: $PWD/examples/files/data/era5 + src: data/era5 generated: - extpar_file: type: file diff --git a/tests/cases/large/data/test_config_large.txt b/tests/cases/large/data/test_config_large.txt index 889fcd23..649cefdc 100644 --- a/tests/cases/large/data/test_config_large.txt +++ b/tests/cases/large/data/test_config_large.txt @@ -12,7 +12,7 @@ cycles: nodes: 1 walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=0, tm_min=2, tm_sec=0, tm_wday=0, tm_yday=1, tm_isdst=-1) plugin: 'shell' - command: '$PWD/examples/files/scripts/extpar' + command: 'scripts/extpar' cli arguments: [ShellCliArgument(name='--verbose', references_data_item=False, cli_option_of_data_item=None), ShellCliArgument(name='obs_data', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - icon_bimonthly [date: 2025-01-01 00:00:00]: @@ -32,9 +32,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2025-01-01 00:00:00]: input: - grid_file @@ -50,8 +50,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2025-01-01 00:00:00]: input: - stream_1 [date: 2025-01-01 00:00:00] @@ -65,7 +67,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2025-01-01 00:00:00]: @@ -82,7 +84,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - icon_bimonthly [date: 2025-03-01 00:00:00]: @@ -102,9 +104,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2025-03-01 00:00:00]: input: - grid_file @@ -121,8 +123,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2025-03-01 00:00:00]: input: - stream_1 [date: 2025-03-01 00:00:00] @@ -136,7 +140,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2025-03-01 00:00:00]: @@ -153,7 +157,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - icon_bimonthly [date: 2025-05-01 00:00:00]: @@ -175,9 +179,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2025-05-01 00:00:00]: input: - grid_file @@ -194,8 +198,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2025-05-01 00:00:00]: input: - stream_1 [date: 2025-05-01 00:00:00] @@ -209,7 +215,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2025-05-01 00:00:00]: @@ -226,7 +232,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - icon_bimonthly [date: 2025-07-01 00:00:00]: @@ -248,9 +254,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2025-07-01 00:00:00]: input: - grid_file @@ -267,8 +273,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2025-07-01 00:00:00]: input: - stream_1 [date: 2025-07-01 00:00:00] @@ -282,7 +290,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2025-07-01 00:00:00]: @@ -299,7 +307,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - icon_bimonthly [date: 2025-09-01 00:00:00]: @@ -321,9 +329,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2025-09-01 00:00:00]: input: - grid_file @@ -340,8 +348,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2025-09-01 00:00:00]: input: - stream_1 [date: 2025-09-01 00:00:00] @@ -355,7 +365,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2025-09-01 00:00:00]: @@ -372,7 +382,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - icon_bimonthly [date: 2025-11-01 00:00:00]: @@ -394,9 +404,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2025-11-01 00:00:00]: input: - grid_file @@ -413,8 +423,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2025-11-01 00:00:00]: input: - stream_1 [date: 2025-11-01 00:00:00] @@ -428,7 +440,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2025-11-01 00:00:00]: @@ -445,7 +457,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - icon_bimonthly [date: 2026-01-01 00:00:00]: @@ -467,9 +479,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2026-01-01 00:00:00]: input: - grid_file @@ -486,8 +498,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2026-01-01 00:00:00]: input: - stream_1 [date: 2026-01-01 00:00:00] @@ -501,7 +515,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2026-01-01 00:00:00]: @@ -518,7 +532,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - icon_bimonthly [date: 2026-03-01 00:00:00]: @@ -540,9 +554,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2026-03-01 00:00:00]: input: - grid_file @@ -559,8 +573,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2026-03-01 00:00:00]: input: - stream_1 [date: 2026-03-01 00:00:00] @@ -574,7 +590,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2026-03-01 00:00:00]: @@ -591,7 +607,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - icon_bimonthly [date: 2026-05-01 00:00:00]: @@ -613,9 +629,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2026-05-01 00:00:00]: input: - grid_file @@ -632,8 +648,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2026-05-01 00:00:00]: input: - stream_1 [date: 2026-05-01 00:00:00] @@ -647,7 +665,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2026-05-01 00:00:00]: @@ -664,7 +682,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - icon_bimonthly [date: 2026-07-01 00:00:00]: @@ -686,9 +704,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2026-07-01 00:00:00]: input: - grid_file @@ -705,8 +723,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2026-07-01 00:00:00]: input: - stream_1 [date: 2026-07-01 00:00:00] @@ -720,7 +740,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2026-07-01 00:00:00]: @@ -737,7 +757,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - icon_bimonthly [date: 2026-09-01 00:00:00]: @@ -759,9 +779,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2026-09-01 00:00:00]: input: - grid_file @@ -778,8 +798,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2026-09-01 00:00:00]: input: - stream_1 [date: 2026-09-01 00:00:00] @@ -793,7 +815,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2026-09-01 00:00:00]: @@ -810,7 +832,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - icon_bimonthly [date: 2026-11-01 00:00:00]: @@ -832,9 +854,9 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/cleanup.sh' + command: 'scripts/cleanup.sh' cli arguments: [ShellCliArgument(name='extpar_file', references_data_item=True, cli_option_of_data_item='-p'), ShellCliArgument(name='ERA5', references_data_item=True, cli_option_of_data_item='-e'), ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item=None)] - env source files: ['$PWD/examples/files/data/dummy_source_file.sh'] + env source files: ['data/dummy_source_file.sh'] - icon [date: 2026-11-01 00:00:00]: input: - grid_file @@ -851,8 +873,10 @@ cycles: walltime: time.struct_time(tm_year=1900, tm_mon=1, tm_mday=1, tm_hour=23, tm_min=59, tm_sec=59, tm_wday=0, tm_yday=1, tm_isdst=-1) start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 - plugin: 'icon' - namelists: {'master': 'path/to/mater_nml', 'model': 'path/to/model_nml'} + plugin: 'shell' + command: 'scripts/icon' + cli arguments: [ShellCliArgument(name='grid_file', references_data_item=True, cli_option_of_data_item='-g'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--input')] + env source files: [] - postproc_1 [date: 2026-11-01 00:00:00]: input: - stream_1 [date: 2026-11-01 00:00:00] @@ -866,7 +890,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_ocn.sh' + command: 'scripts/main_script_ocn.sh' cli arguments: [ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - store_and_clean_1 [date: 2026-11-01 00:00:00]: @@ -883,7 +907,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_1', references_data_item=True, cli_option_of_data_item='--input'), ShellCliArgument(name='stream_1', references_data_item=True, cli_option_of_data_item='--stream'), ShellCliArgument(name='icon_input', references_data_item=True, cli_option_of_data_item='--icon_input')] env source files: [] - yearly [date: 2025-01-01 00:00:00]: @@ -906,7 +930,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_atm.sh' + command: 'scripts/main_script_atm.sh' cli arguments: [ShellCliArgument(name='stream_2', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] src: 'path/to/src/dir' @@ -928,7 +952,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_2', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] - yearly [date: 2026-01-01 00:00:00]: @@ -951,7 +975,7 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/main_script_atm.sh' + command: 'scripts/main_script_atm.sh' cli arguments: [ShellCliArgument(name='stream_2', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] src: 'path/to/src/dir' @@ -973,6 +997,6 @@ cycles: start date: 2025-01-01 00:00:00 end date: 2027-01-01 00:00:00 plugin: 'shell' - command: '$PWD/examples/files/scripts/post_clean.sh' + command: 'scripts/post_clean.sh' cli arguments: [ShellCliArgument(name='postout_2', references_data_item=True, cli_option_of_data_item='--input')] env source files: [] \ No newline at end of file diff --git a/tests/cases/parameters/config/data/forcing b/tests/cases/parameters/config/data/forcing new file mode 100644 index 00000000..e69de29b diff --git a/tests/cases/parameters/config/data/initial_conditions b/tests/cases/parameters/config/data/initial_conditions new file mode 100644 index 00000000..e69de29b diff --git a/tests/cases/parameters/config/scripts/icon.py b/tests/cases/parameters/config/scripts/icon.py new file mode 100755 index 00000000..32f71ed6 --- /dev/null +++ b/tests/cases/parameters/config/scripts/icon.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +"""usage: icon.py [-h] [--init [INIT]] [--restart [RESTART]] [--forcing [FORCING]] [namelist] + +A script mocking parts of icon in a form of a shell script + +options: + -h, --help show this help message and exit + --init [INIT] The icon init file. + --restart [RESTART] The icon restart file. + --forcing [FORCING] The icon forcing file. +""" + +import argparse +from pathlib import Path + +LOG_FILE = Path("icon.log") + + +def log(text: str): + print(text) + with LOG_FILE.open("a") as f: + f.write(text) + + +def main(): + parser = argparse.ArgumentParser(description="A script mocking parts of icon in a form of a shell script.") + parser.add_argument("--init", nargs="?", type=str, help="The icon init file.") + parser.add_argument("namelist", nargs="?", default=None) + parser.add_argument("--restart", nargs="?", type=str, help="The icon restart file.") + parser.add_argument("--forcing", nargs="?", type=str, help="The icon forcing file.") + + args = parser.parse_args() + + output = Path("icon_output") + output.write_text("") + + if args.restart: + if args.init: + msg = "Cannot use '--init' and '--restart' option at the same time." + raise ValueError(msg) + if not Path(args.restart).exists(): + msg = f"The icon restart file {args.restart!r} was not found." + raise FileNotFoundError(msg) + restart = Path(args.restart) + + log(f"Restarting from file {args.restart!r}.") + elif args.init: + if not Path(args.init).exists(): + msg = f"The icon init file {args.init!r} was not found." + raise FileNotFoundError(msg) + + log(f"Starting from init file {args.init!r}.") + else: + msg = "Please provide a restart or init file with the corresponding option." + raise ValueError(msg) + + if args.namelist: + log(f"Namelist {args.namelist} provided. Continue with it.") + else: + log("No namelist provided. Continue with default one.") + + # Main script execution continues here + log("Script finished running calculations") + + restart = Path("restart") + restart.write_text("") + + +if __name__ == "__main__": + main() diff --git a/tests/cases/parameters/config/scripts/merge.py b/tests/cases/parameters/config/scripts/merge.py new file mode 100755 index 00000000..2fa94152 --- /dev/null +++ b/tests/cases/parameters/config/scripts/merge.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python + +import argparse +from pathlib import Path + + +def main(): + parser = argparse.ArgumentParser(description="A script mocking parts of icon in a form of a shell script.") + parser.add_argument("file", nargs="+", type=str, help="The files to analyse.") + args = parser.parse_args() + Path("analysis").write_text(f"analysis for file {args.file}") + + +if __name__ == "__main__": + main() diff --git a/tests/cases/parameters/config/scripts/statistics.py b/tests/cases/parameters/config/scripts/statistics.py new file mode 100755 index 00000000..2fa94152 --- /dev/null +++ b/tests/cases/parameters/config/scripts/statistics.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python + +import argparse +from pathlib import Path + + +def main(): + parser = argparse.ArgumentParser(description="A script mocking parts of icon in a form of a shell script.") + parser.add_argument("file", nargs="+", type=str, help="The files to analyse.") + args = parser.parse_args() + Path("analysis").write_text(f"analysis for file {args.file}") + + +if __name__ == "__main__": + main() diff --git a/tests/cases/parameters/config/test_config_parameters.yml b/tests/cases/parameters/config/test_config_parameters.yml index 0ef06058..da4f4715 100644 --- a/tests/cases/parameters/config/test_config_parameters.yml +++ b/tests/cases/parameters/config/test_config_parameters.yml @@ -10,7 +10,7 @@ cycles: tasks: - icon: inputs: - - initial conditions: + - initial_conditions: when: at: *root_start_date - icon_restart: @@ -45,32 +45,32 @@ cycles: tasks: - icon: plugin: shell - command: $PWD/tests/files/scripts/icon.py - cli_arguments: "{--restart icon_restart} {--init initial_conditions} {--forcing forcing}" + command: scripts/icon.py + cli_arguments: "{--restart icon_restart} {--init initial_conditions} {--forcing forcing} {analysis_foo_bar}" parameters: [foo, bar] - statistics_foo: plugin: shell - command: $PWD/tests/files/scripts/statistics.py + command: scripts/statistics.py parameters: [bar] - statistics_foo_bar: plugin: shell - command: $PWD/tests/files/scripts/statistics.py + command: scripts/statistics.py - merge: plugin: shell - command: $PWD/tests/files/scripts/merge.py + command: scripts/merge.py data: available: - - initial conditions: + - initial_conditions: type: file - src: . + src: data/initial_conditions - forcing: type: file - src: . + src: data/forcing generated: - icon_output: type: file - src: output + src: icon_output parameters: [foo, bar] - icon_restart: type: file @@ -78,14 +78,14 @@ data: parameters: [foo, bar] - analysis_foo: type: file - src: analysis_foo + src: analysis parameters: [bar] - analysis_foo_bar: type: file - src: foo_analysis_bar + src: analysis - yearly_analysis: type: file - src: yearly_analysis + src: analysis parameters: foo: [0, 1] diff --git a/tests/cases/parameters/data/test_config_parameters.txt b/tests/cases/parameters/data/test_config_parameters.txt index 224cd588..de5d5b0d 100644 --- a/tests/cases/parameters/data/test_config_parameters.txt +++ b/tests/cases/parameters/data/test_config_parameters.txt @@ -3,7 +3,7 @@ cycles: tasks: - icon [date: 2026-01-01 00:00:00, foo: 0, bar: 3.0]: input: - - initial conditions + - initial_conditions - forcing output: - icon_output [date: 2026-01-01 00:00:00, foo: 0, bar: 3.0] @@ -13,12 +13,12 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/icon.py' - cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing')] + command: 'scripts/icon.py' + cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing'), ShellCliArgument(name='analysis_foo_bar', references_data_item=True, cli_option_of_data_item=None)] env source files: [] - icon [date: 2026-01-01 00:00:00, foo: 1, bar: 3.0]: input: - - initial conditions + - initial_conditions - forcing output: - icon_output [date: 2026-01-01 00:00:00, foo: 1, bar: 3.0] @@ -28,8 +28,8 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/icon.py' - cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing')] + command: 'scripts/icon.py' + cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing'), ShellCliArgument(name='analysis_foo_bar', references_data_item=True, cli_option_of_data_item=None)] env source files: [] - statistics_foo [date: 2026-01-01 00:00:00, bar: 3.0]: input: @@ -42,7 +42,7 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/statistics.py' + command: 'scripts/statistics.py' cli arguments: [] env source files: [] - statistics_foo_bar [date: 2026-01-01 00:00:00]: @@ -55,7 +55,7 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/statistics.py' + command: 'scripts/statistics.py' cli arguments: [] env source files: [] - bimonthly_tasks [date: 2026-07-01 00:00:00]: @@ -72,8 +72,8 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/icon.py' - cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing')] + command: 'scripts/icon.py' + cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing'), ShellCliArgument(name='analysis_foo_bar', references_data_item=True, cli_option_of_data_item=None)] env source files: [] - icon [date: 2026-07-01 00:00:00, foo: 1, bar: 3.0]: input: @@ -87,8 +87,8 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/icon.py' - cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing')] + command: 'scripts/icon.py' + cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing'), ShellCliArgument(name='analysis_foo_bar', references_data_item=True, cli_option_of_data_item=None)] env source files: [] - statistics_foo [date: 2026-07-01 00:00:00, bar: 3.0]: input: @@ -101,7 +101,7 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/statistics.py' + command: 'scripts/statistics.py' cli arguments: [] env source files: [] - statistics_foo_bar [date: 2026-07-01 00:00:00]: @@ -114,7 +114,7 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/statistics.py' + command: 'scripts/statistics.py' cli arguments: [] env source files: [] - bimonthly_tasks [date: 2027-01-01 00:00:00]: @@ -131,8 +131,8 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/icon.py' - cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing')] + command: 'scripts/icon.py' + cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing'), ShellCliArgument(name='analysis_foo_bar', references_data_item=True, cli_option_of_data_item=None)] env source files: [] - icon [date: 2027-01-01 00:00:00, foo: 1, bar: 3.0]: input: @@ -146,8 +146,8 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/icon.py' - cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing')] + command: 'scripts/icon.py' + cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing'), ShellCliArgument(name='analysis_foo_bar', references_data_item=True, cli_option_of_data_item=None)] env source files: [] - statistics_foo [date: 2027-01-01 00:00:00, bar: 3.0]: input: @@ -160,7 +160,7 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/statistics.py' + command: 'scripts/statistics.py' cli arguments: [] env source files: [] - statistics_foo_bar [date: 2027-01-01 00:00:00]: @@ -173,7 +173,7 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/statistics.py' + command: 'scripts/statistics.py' cli arguments: [] env source files: [] - bimonthly_tasks [date: 2027-07-01 00:00:00]: @@ -190,8 +190,8 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/icon.py' - cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing')] + command: 'scripts/icon.py' + cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing'), ShellCliArgument(name='analysis_foo_bar', references_data_item=True, cli_option_of_data_item=None)] env source files: [] - icon [date: 2027-07-01 00:00:00, foo: 1, bar: 3.0]: input: @@ -205,8 +205,8 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/icon.py' - cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing')] + command: 'scripts/icon.py' + cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init'), ShellCliArgument(name='forcing', references_data_item=True, cli_option_of_data_item='--forcing'), ShellCliArgument(name='analysis_foo_bar', references_data_item=True, cli_option_of_data_item=None)] env source files: [] - statistics_foo [date: 2027-07-01 00:00:00, bar: 3.0]: input: @@ -219,7 +219,7 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/statistics.py' + command: 'scripts/statistics.py' cli arguments: [] env source files: [] - statistics_foo_bar [date: 2027-07-01 00:00:00]: @@ -232,7 +232,7 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/statistics.py' + command: 'scripts/statistics.py' cli arguments: [] env source files: [] - yearly [date: 2026-01-01 00:00:00]: @@ -248,7 +248,7 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/merge.py' + command: 'scripts/merge.py' cli arguments: [] env source files: [] - yearly [date: 2027-01-01 00:00:00]: @@ -264,6 +264,6 @@ cycles: start date: 2026-01-01 00:00:00 end date: 2028-01-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/merge.py' + command: 'scripts/merge.py' cli arguments: [] env source files: [] \ No newline at end of file diff --git a/tests/cases/small/config/data/initial_conditions b/tests/cases/small/config/data/initial_conditions new file mode 100644 index 00000000..e69de29b diff --git a/tests/cases/small/config/data/input b/tests/cases/small/config/data/input new file mode 100644 index 00000000..e69de29b diff --git a/tests/cases/small/config/scripts/cleanup.py b/tests/cases/small/config/scripts/cleanup.py new file mode 100755 index 00000000..de7aebad --- /dev/null +++ b/tests/cases/small/config/scripts/cleanup.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python + + +def main(): + # Main script execution continues here + print("Cleaning") + + +if __name__ == "__main__": + main() diff --git a/tests/cases/small/config/scripts/icon.py b/tests/cases/small/config/scripts/icon.py new file mode 100755 index 00000000..32f71ed6 --- /dev/null +++ b/tests/cases/small/config/scripts/icon.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +"""usage: icon.py [-h] [--init [INIT]] [--restart [RESTART]] [--forcing [FORCING]] [namelist] + +A script mocking parts of icon in a form of a shell script + +options: + -h, --help show this help message and exit + --init [INIT] The icon init file. + --restart [RESTART] The icon restart file. + --forcing [FORCING] The icon forcing file. +""" + +import argparse +from pathlib import Path + +LOG_FILE = Path("icon.log") + + +def log(text: str): + print(text) + with LOG_FILE.open("a") as f: + f.write(text) + + +def main(): + parser = argparse.ArgumentParser(description="A script mocking parts of icon in a form of a shell script.") + parser.add_argument("--init", nargs="?", type=str, help="The icon init file.") + parser.add_argument("namelist", nargs="?", default=None) + parser.add_argument("--restart", nargs="?", type=str, help="The icon restart file.") + parser.add_argument("--forcing", nargs="?", type=str, help="The icon forcing file.") + + args = parser.parse_args() + + output = Path("icon_output") + output.write_text("") + + if args.restart: + if args.init: + msg = "Cannot use '--init' and '--restart' option at the same time." + raise ValueError(msg) + if not Path(args.restart).exists(): + msg = f"The icon restart file {args.restart!r} was not found." + raise FileNotFoundError(msg) + restart = Path(args.restart) + + log(f"Restarting from file {args.restart!r}.") + elif args.init: + if not Path(args.init).exists(): + msg = f"The icon init file {args.init!r} was not found." + raise FileNotFoundError(msg) + + log(f"Starting from init file {args.init!r}.") + else: + msg = "Please provide a restart or init file with the corresponding option." + raise ValueError(msg) + + if args.namelist: + log(f"Namelist {args.namelist} provided. Continue with it.") + else: + log("No namelist provided. Continue with default one.") + + # Main script execution continues here + log("Script finished running calculations") + + restart = Path("restart") + restart.write_text("") + + +if __name__ == "__main__": + main() diff --git a/tests/cases/small/config/test_config_small.yml b/tests/cases/small/config/test_config_small.yml index 900e1c74..75098c23 100644 --- a/tests/cases/small/config/test_config_small.yml +++ b/tests/cases/small/config/test_config_small.yml @@ -9,6 +9,10 @@ cycles: tasks: - icon: inputs: + - icon_namelist + - initial_conditions: + when: + at: *root_start_date - icon_restart: when: after: *root_start_date @@ -22,21 +26,28 @@ cycles: date: 2026-05-01T00:00 tasks: - icon: + computer: localhost plugin: shell - command: $PWD/tests/files/scripts/icon.py + command: scripts/icon.py cli_arguments: "{--restart icon_restart} {--init initial_conditions}" + env_source_files: data/dummy_source_file.sh - cleanup: + computer: localhost plugin: shell - command: $PWD/tests/files/scripts/cleanup.py + command: scripts/cleanup.py data: available: - - icon_input: + - icon_namelist: + type: file + src: data/input + - initial_conditions: type: file - src: $PWD/tests/files/data/input + computer: localhost + src: data/initial_conditions generated: - icon_output: type: file - src: output + src: icon_output - icon_restart: type: file src: restart diff --git a/tests/cases/small/data/test_config_small.txt b/tests/cases/small/data/test_config_small.txt index 5e0e293d..3e799e13 100644 --- a/tests/cases/small/data/test_config_small.txt +++ b/tests/cases/small/data/test_config_small.txt @@ -2,49 +2,57 @@ cycles: - bimonthly_tasks [date: 2026-01-01 00:00:00]: tasks: - icon [date: 2026-01-01 00:00:00]: + input: + - icon_namelist + - initial_conditions output: - icon_output [date: 2026-01-01 00:00:00] - icon_restart [date: 2026-01-01 00:00:00] name: 'icon' coordinates: {'date': datetime.datetime(2026, 1, 1, 0, 0)} + computer: 'localhost' start date: 2026-01-01 00:00:00 end date: 2026-06-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/icon.py' + command: 'scripts/icon.py' cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init')] - env source files: [] + env source files: ['data/dummy_source_file.sh'] - bimonthly_tasks [date: 2026-03-01 00:00:00]: tasks: - icon [date: 2026-03-01 00:00:00]: input: + - icon_namelist - icon_restart [date: 2026-01-01 00:00:00] output: - icon_output [date: 2026-03-01 00:00:00] - icon_restart [date: 2026-03-01 00:00:00] name: 'icon' coordinates: {'date': datetime.datetime(2026, 3, 1, 0, 0)} + computer: 'localhost' start date: 2026-01-01 00:00:00 end date: 2026-06-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/icon.py' + command: 'scripts/icon.py' cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init')] - env source files: [] + env source files: ['data/dummy_source_file.sh'] - bimonthly_tasks [date: 2026-05-01 00:00:00]: tasks: - icon [date: 2026-05-01 00:00:00]: input: + - icon_namelist - icon_restart [date: 2026-03-01 00:00:00] output: - icon_output [date: 2026-05-01 00:00:00] - icon_restart [date: 2026-05-01 00:00:00] name: 'icon' coordinates: {'date': datetime.datetime(2026, 5, 1, 0, 0)} + computer: 'localhost' start date: 2026-01-01 00:00:00 end date: 2026-06-01 00:00:00 plugin: 'shell' - command: '$PWD/tests/files/scripts/icon.py' + command: 'scripts/icon.py' cli arguments: [ShellCliArgument(name='icon_restart', references_data_item=True, cli_option_of_data_item='--restart'), ShellCliArgument(name='initial_conditions', references_data_item=True, cli_option_of_data_item='--init')] - env source files: [] + env source files: ['data/dummy_source_file.sh'] - lastly: tasks: - cleanup: @@ -52,7 +60,8 @@ cycles: - icon [date: 2026-05-01 00:00:00] name: 'cleanup' coordinates: {} + computer: 'localhost' plugin: 'shell' - command: '$PWD/tests/files/scripts/cleanup.py' + command: 'scripts/cleanup.py' cli arguments: [] env source files: [] \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index e69de29b..4a579bb5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -0,0 +1 @@ +pytest_plugins = ["aiida.tools.pytest_fixtures"] diff --git a/tests/test_wc_workflow.py b/tests/test_wc_workflow.py index dd9f793b..0b3c3b9d 100644 --- a/tests/test_wc_workflow.py +++ b/tests/test_wc_workflow.py @@ -6,6 +6,7 @@ from sirocco.parsing._yaml_data_models import ConfigShellTask, ShellCliArgument from sirocco.pretty_print import PrettyPrinter from sirocco.vizgraph import VizGraph +from sirocco.workgraph import AiidaWorkGraph # configs that are tested for parsing @@ -29,6 +30,7 @@ def pprinter(): return PrettyPrinter() +# configs that are tested for parsing config_test_files = [ "tests/cases/small/config/test_config_small.yml", "tests/cases/large/config/test_config_large.yml", @@ -64,3 +66,26 @@ def test_serialize_workflow(config_paths, pprinter): def test_vizgraph(config_paths): VizGraph.from_yaml(config_paths["yml"]).draw(file_path=config_paths["svg"]) + + +# configs that are tested for running workgraph +@pytest.mark.parametrize( + "config_path", + [ + "tests/cases/small/config/test_config_small.yml", + "tests/cases/parameters/config/test_config_parameters.yml", + ], +) +def test_run_workgraph(config_path, aiida_computer): + """Tests end-to-end the parsing from file up to running the workgraph. + + Automatically uses the aiida_profile fixture to create a new profile. Note to debug the test with your profile + please run this in a separate file as the profile is deleted after test finishes. + """ + # some configs reference computer "localhost" which we need to create beforehand + aiida_computer("localhost").store() + + core_workflow = Workflow.from_yaml(config_path) + aiida_workflow = AiidaWorkGraph(core_workflow) + out = aiida_workflow.run() + assert out.get("execution_count", None).value == 1