From 85d371e5f6f56f80f8f242e63e01a8d390180501 Mon Sep 17 00:00:00 2001 From: German Date: Mon, 13 May 2024 22:43:16 +0200 Subject: [PATCH 01/29] First iteration --- src/ansys/mapdl/core/cli/__init__.py | 7 + src/ansys/mapdl/core/cli/hpc.py | 251 ++++++++++++++++++++ src/ansys/mapdl/core/hpc/__init__.py | 21 ++ src/ansys/mapdl/core/hpc/pyhps.py | 339 +++++++++++++++++++++++++++ 4 files changed, 618 insertions(+) create mode 100644 src/ansys/mapdl/core/cli/hpc.py create mode 100644 src/ansys/mapdl/core/hpc/__init__.py create mode 100644 src/ansys/mapdl/core/hpc/pyhps.py diff --git a/src/ansys/mapdl/core/cli/__init__.py b/src/ansys/mapdl/core/cli/__init__.py index fe013da660..e2cd48c965 100644 --- a/src/ansys/mapdl/core/cli/__init__.py +++ b/src/ansys/mapdl/core/cli/__init__.py @@ -39,6 +39,7 @@ def main(ctx): pass from ansys.mapdl.core.cli.convert import convert + from ansys.mapdl.core.cli.hpc import submit from ansys.mapdl.core.cli.list_instances import list_instances from ansys.mapdl.core.cli.start import start from ansys.mapdl.core.cli.stop import stop @@ -48,6 +49,12 @@ def main(ctx): main.add_command(stop) main.add_command(list_instances, name="list") + # HPC commands + # pymapdl hpc submit + # pymapdl hpc list + # pymapdl hpc stop + main.add_command(submit) + def old_pymapdl_convert_script_entry_point(): print( """This CLI function has been deprecated. Please use instead: diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py new file mode 100644 index 0000000000..cf6f89f48f --- /dev/null +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -0,0 +1,251 @@ +# Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Submit PyHPS jobs to a cluster""" + +import logging +import os +from typing import Optional, Union + +import click + +from ansys.mapdl.core.cli import main + +logger = logging.getLogger() +logging.basicConfig( + format="[%(asctime)s | %(levelname)s] %(message)s", level=logging.DEBUG +) + + +@main.command( + short_help="Submit jobs to an HPC cluster using PyHPS package.", + help="""Submit jobs to an HPC cluster using PyHPS package.""", +) +@click.argument("main_file") +@click.option( + "--name", + default=None, + type=str, + help="""Name of the PyHPS project to be created.""", +) +@click.option( + "--url", + default=None, + type=str, + help="""URL where the HPS cluster is deployed. For example: "https://myserver:3000/hps" """, +) +@click.option( + "--user", default=None, type=str, help="Username to login into the HPC cluster." +) +@click.option( + "--password", + default=None, + type=str, + help="Password used to login into the HPC cluster.", +) +@click.option( + "--python", + default=None, + type=str, + help="""Set python version to be used to create the virtual environment and +run the python file. By default it uses python3 (default in cluster).""", +) +@click.option( + "--output_files", + default=None, + type=str, + help="""Set the output files to be monitored. This is optional. """, +) +@click.option( + "--shell_file", + default=None, + type=str, + help="""If desired, you can provide a shell script to execute instead of +the python file. You can call your python file from it if you wish. By default, +it is not used.""", +) +@click.option( + "--requirements_file", + default=None, + type=str, + help="""If provided, the created virtual environment is installed with the +libraries specified in this file. If not, the activated virtual environment is +cloned through a temporary 'pip list' file. If you are using editable package, +it is recommended you attach your own requirement file. """, +) +@click.option( + "--extra_files", + default=None, + type=str, + help="""To upload extra files which can be called from your main python file +(or from the shell file).""", +) +@click.option( + "--config_file", + default=None, + type=str, + help="""To load job configuration from a file.""", +) +@click.option( + "--num_cores", + default=None, + type=str, + help=""" """, +) +@click.option( + "--memory", + default=None, + type=str, + help=""" """, +) +@click.option( + "--disk_space", + default=None, + type=str, + help=""" """, +) +@click.option( + "--exclusive", + default=None, + type=str, + help=""" """, +) +@click.option( + "--max_execution_time", + default=None, + type=str, + help=""" """, +) +@click.option( + "--wait", + default=None, + type=str, + help=""" """, +) +@click.option( + "--save_config_file", + default=False, + type=bool, + is_flag=False, + flag_value=True, + help="""If true, it also write the configuration to the config file, after successfully +submit the job. +It overwrites the configuration.""", +) +def submit( + main_file: str, + name: str = None, + url: str = None, + user: str = None, + password: str = None, + python: float = 3.9, + output_files: Optional[Union[str, list]] = None, + shell_file: str = None, + requirements_file: str = None, + extra_files: Optional[Union[str, list]] = None, + config_file: str = None, + num_cores: int = None, + memory: int = None, + disk_space: int = None, + exclusive: bool = None, + max_execution_time: int = None, + wait: bool = False, + save_config_file: bool = False, +): + """Example code: + pymapdl submit my_file.sh my_file_01.py my_file_02 --name="my job" --url="https://10.231.106.91:3000/hps" --user=repuser --password=repuser --python=3.9 + """ + import json + + from ansys.mapdl.core.hpc.pyhps import ( + create_pymapdl_pyhps_job, + get_value_from_json_or_default, + wait_for_completion, + ) + + if config_file is None: + config_file = os.path.join(os.getcwd(), "hps_config.json") + + url = get_value_from_json_or_default(url, config_file, "url", None) + user = get_value_from_json_or_default(user, config_file, "user", None) + password = get_value_from_json_or_default(password, config_file, "password", None) + python = get_value_from_json_or_default(python, config_file, "python", 3) + name = get_value_from_json_or_default(name, config_file, "name", "My PyMAPDL job") + + num_cores = get_value_from_json_or_default(num_cores, config_file, "num_cores", 1) + memory = get_value_from_json_or_default(memory, config_file, "memory", 100) + disk_space = get_value_from_json_or_default( + disk_space, config_file, "disk_space", 100 + ) + exclusive = get_value_from_json_or_default( + exclusive, config_file, "exclusive", False + ) + max_execution_time = get_value_from_json_or_default( + max_execution_time, config_file, "max_execution_time", 1000 + ) + + proj = create_pymapdl_pyhps_job( + main_file=main_file, + name=name, + url=url, + user=user, + password=password, + python=python, + output_files=output_files, + shell_file=shell_file, + requirements_file=requirements_file, + extra_files=extra_files, + config_file=config_file, + num_cores=num_cores, + memory=memory, + disk_space=disk_space, + exclusive=exclusive, + max_execution_time=max_execution_time, + ) + + if save_config_file: + config = { + "url": url, + "user": user, + "password": password, + "python": python, + "name": name, + "num_cores": num_cores, + "memory": memory, + "disk_space": disk_space, + "exclusive": exclusive, + "max_execution_time": max_execution_time, + } + with open(config_file, "w") as fid: + json.dump(config, fid) + + if wait: + print(f"Waiting for project {name} to be completed...") + wait_for_completion(proj, evaluated=True, failed=True) + + +def list_jobs(): + pass + + +def stop_job(): + pass diff --git a/src/ansys/mapdl/core/hpc/__init__.py b/src/ansys/mapdl/core/hpc/__init__.py new file mode 100644 index 0000000000..a95b96476b --- /dev/null +++ b/src/ansys/mapdl/core/hpc/__init__.py @@ -0,0 +1,21 @@ +# Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py new file mode 100644 index 0000000000..eaeef74538 --- /dev/null +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -0,0 +1,339 @@ +# Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""PyHPS interface to HPC clusters""" +import os +from typing import Optional, Union +from warnings import warn + +from ansys.hps.client import Client +from ansys.hps.client.jms import ( + File, + HpcResources, + JmsApi, + Job, + JobDefinition, + Project, + ProjectApi, + ResourceRequirements, + Software, + TaskDefinition, +) + + +def get_value_from_json_or_default( + arg: str, json_file: str, key: str, default_value: Optional[Union[str, Any]] = None +): + if arg is not None: + print(f"{arg}") + return arg + + if os.path.exists(json_file): + with open(config_file, "r") as fid: + content = fid.read() + + if content: + config = json.load(content) + if key in config: + print(config[key]) + return config[key] + + if default_value is None: + raise ValueError( + f"The argument {arg} is not given through the CLI or config file" + ) + + print(default_value) + return default_value + + +def create_project( + client, + name="My PyMAPDL job", +) -> Project: + jms_api = JmsApi(client) + + proj = Project(name=name, priority=1, active=True) + return jms_api.create_project(proj) + + +def add_files(project_api: ProjectApi, input_files: list, output_files: list): + + input_files_ = [os.path.basename(each) for each in input_files] + + input_files = [ + File( + name=os.path.basename(each_file), + evaluation_path=each_file, + type="text/plain", + ) + for each_file in input_files + ] + + output_files = [ + File( + name=os.path.basename(each_file), + evaluation_path=each_file, + type="text/plain", + collect=True, + monitor=True, + ) + for each_file in output_files + ] + + files = input_files + output_files + files = project_api.create_files(files) + + f_inp = {} + f_out = {} + for f in files: + if f.name in input_files_: + f_inp[f.name] = f.id + else: + f_out[f.name] = f.id + + return f_inp, f_out + + +def create_input_parameters(project_api, input_params=None): + if input_params is not None: + raise NotImplementedError("'Input_parameters' is not implemented") + else: + input_params = [] + return project_api.create_parameter_definitions(input_params) + + +def create_output_parameters(project_api, output_params=None): + if output_params is not None: + raise NotImplementedError("'Output_parameters' is not implemented") + else: + output_params = [] + return project_api.create_parameter_definitions(output_params) + + +def create_param_mappings(project_api, param_mappings=None): + if param_mappings is not None: + raise NotImplementedError("'param_mappings' is not implemented") + else: + param_mappings = [] + return project_api.create_parameter_mappings(param_mappings) + + +def create_task( + project_api, + main_file, + file_input_ids, + file_output_ids, + num_cores, + memory, + disk_space, + exclusive, + max_execution_time, +): + + software = Software(name="Bash", version="0.1") # Overwriting + execution_command = f"%executable% %file:{os.path.basename(main_file)}%" + + # Process step + task_def = TaskDefinition( + name="PyMAPDL_task", + software_requirements=[software], + execution_command=execution_command, + resource_requirements=ResourceRequirements( + num_cores=num_cores, + memory=memory * 1024 * 1024, + disk_space=disk_space * 1024 * 1024, + # distributed=True, + hpc_resources=HpcResources(exclusive=exclusive), + ), + max_execution_time=max_execution_time, + execution_level=0, + num_trials=1, + input_file_ids=file_input_ids, + output_file_ids=file_output_ids, + ) + print(task_def) + + return project_api.create_task_definitions([task_def])[0] + + +def create_job_definition( + project_api, + task_def, + input_params, + output_params, + param_mappings, +): + job_def = JobDefinition(name="JobDefinition.1", active=True) + params = input_params + output_params + + job_def.task_definition_ids = [task_def.id] + job_def.parameter_definition_ids = [pd.id for pd in params] + job_def.parameter_mapping_ids = [pm.id for pm in param_mappings] + + job_def = project_api.create_job_definitions([job_def])[0] + + # Refresh the parameters + params = project_api.get_parameter_definitions(id=job_def.parameter_definition_ids) + return job_def + + +def create_jobs(project_api, job_def): + jobs = [ + Job(name=f"Job", values={}, eval_status="pending", job_definition_id=job_def.id) + ] + return project_api.create_jobs(jobs) + + +def get_project_api(client, proj): + return ProjectApi(client, proj.id) + + +def wait_for_completion(project_api, evaluated=True, failed=False, running=False): + eval_status = [] + + if evaluated: + eval_status.append("evaluated") + + if failed: + eval_status.append("evaluated") + + if running: + eval_status.append("running") + + log.debug(f"Waiting on project {proj.id} with criteria: {eval_status}") + while not project_api.get_jobs(eval_status=eval_status): + time.sleep(2) + + +def create_tmp_file(file_name, content): + import tempfile + import uuid + + dir = tempfile.gettempdir() + sub_dir = str(uuid.uuid4()) + + tmp_file = os.path.join(dir, sub_dir, file_name) + os.makedirs(os.path.join(dir, sub_dir)) + + with open(tmp_file, "w") as fid: + fid.write(content) + + return tmp_file + + +def create_pymapdl_pyhps_job( + main_file: str, + name: str = None, + url: str = None, + user: str = None, + password: str = None, + python: float = None, + output_files: Optional[Union[str, list]] = None, + shell_file: str = None, + requirements_file: str = None, + extra_files: Optional[Union[str, list]] = None, + config_file: str = None, + num_cores: int = None, + memory: int = None, + disk_space: int = None, + exclusive: bool = None, + max_execution_time: int = None, +): + + if python not in [2.7, 3.5, 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12]: + warn("Version of Python might not be supported by the cluster") + + if not requirements_file: + import pkg_resources + + content = "\n".join( + [str(p.as_requirement()) for p in pkg_resources.working_set] + ) + requirements_file = create_tmp_file("requirements.txt", content) + + if not shell_file: + content = f""" +echo "Starting" + +# Starting venv +python{python} -m venv .venv +source .venv/bin/activate + +# Installing requirements +pip install -r {os.path.basename(requirements_file)} + +# Running script +python {os.path.basename(main_file)} + """ + + shell_file = create_tmp_file("main.sh", content) + + if isinstance(extra_files, str): + extra_files = extra_files.split(",") + elif extra_files is None: + extra_files = [] + + if extra_files and not all([os.path.exists(each) for each in extra_files]): + raise ValueError("One or more extra files does not exist.") + + input_files = extra_files + input_files.append(requirements_file) + input_files.append(shell_file) + input_files.append(main_file) + + if not output_files: + output_files = [] + elif isinstance(output_files, str): + output_files = output_files.split(",") + + # Login + client = Client(url=url, username=user, password=password) + + # Setting + proj = create_project(client, name) + + project_api = get_project_api(client, proj) + + file_input_ids, file_output_ids = add_files(project_api, input_files, output_files) + + input_params = create_input_parameters(project_api) + output_params = create_output_parameters(project_api) + param_mappings = create_param_mappings(project_api) + + task_def = create_task( + project_api, + main_file, + file_input_ids, + file_output_ids, + num_cores, + memory, + disk_space, + exclusive, + max_execution_time, + ) + + job_def = create_job_definition( + project_api, task_def, input_params, output_params, param_mappings + ) + + return proj From 1da9ca40cc9c58eca66253e54985f7691fdab467 Mon Sep 17 00:00:00 2001 From: German Date: Tue, 14 May 2024 16:49:30 +0200 Subject: [PATCH 02/29] Adding logging It works --- src/ansys/mapdl/core/cli/hpc.py | 42 +++++++++++------- src/ansys/mapdl/core/hpc/pyhps.py | 73 ++++++++++++++++++++++--------- 2 files changed, 80 insertions(+), 35 deletions(-) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index cf6f89f48f..e0f93e60f7 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -30,11 +30,6 @@ from ansys.mapdl.core.cli import main -logger = logging.getLogger() -logging.basicConfig( - format="[%(asctime)s | %(levelname)s] %(message)s", level=logging.DEBUG -) - @main.command( short_help="Submit jobs to an HPC cluster using PyHPS package.", @@ -73,7 +68,7 @@ "--output_files", default=None, type=str, - help="""Set the output files to be monitored. This is optional. """, + help="""Set the output files to be monitored. This is optional.""", ) @click.option( "--shell_file", @@ -109,37 +104,39 @@ "--num_cores", default=None, type=str, - help=""" """, + help="""Set the amount of CPU cores reserved for the job. By default it is 1 CPU.""", ) @click.option( "--memory", default=None, type=str, - help=""" """, + help="""Set the amount of memory RAM in MB reserved for the job. By default it is 100 MB.""", ) @click.option( "--disk_space", default=None, type=str, - help=""" """, + help="""Set the amount of hard drive space in MB reserved for the job. By default it is 100 MB.""", ) @click.option( "--exclusive", default=None, type=str, - help=""" """, + is_flag=False, + flag_value=True, + help="""Set the job to run in a machine exclusively, without sharing it with other jobs. By default it is False""", ) @click.option( "--max_execution_time", default=None, type=str, - help=""" """, + help="""Set the maximum execution time for the job. By default it is zero (unlimited).""", ) @click.option( "--wait", default=None, type=str, - help=""" """, + help="""If True, the terminal waits for job completion before return the control to the user. """, ) @click.option( "--save_config_file", @@ -151,6 +148,14 @@ submit the job. It overwrites the configuration.""", ) +@click.option( + "--debug", + default=False, + type=bool, + is_flag=False, + flag_value=True, + help="""If true, it prints the debug logging to the console output.""", +) def submit( main_file: str, name: str = None, @@ -170,6 +175,7 @@ def submit( max_execution_time: int = None, wait: bool = False, save_config_file: bool = False, + debug: bool = False, ): """Example code: pymapdl submit my_file.sh my_file_01.py my_file_02 --name="my job" --url="https://10.231.106.91:3000/hps" --user=repuser --password=repuser --python=3.9 @@ -182,6 +188,12 @@ def submit( wait_for_completion, ) + if debug: + logger = logging.getLogger() + logging.basicConfig( + format="[%(asctime)s | %(levelname)s] %(message)s", level=logging.DEBUG + ) + if config_file is None: config_file = os.path.join(os.getcwd(), "hps_config.json") @@ -200,10 +212,10 @@ def submit( exclusive, config_file, "exclusive", False ) max_execution_time = get_value_from_json_or_default( - max_execution_time, config_file, "max_execution_time", 1000 + max_execution_time, config_file, "max_execution_time", 0 ) - proj = create_pymapdl_pyhps_job( + proj, project_api = create_pymapdl_pyhps_job( main_file=main_file, name=name, url=url, @@ -239,7 +251,7 @@ def submit( json.dump(config, fid) if wait: - print(f"Waiting for project {name} to be completed...") + print(f"Waiting for project {name} (id: {proj.id}) to be completed...") wait_for_completion(proj, evaluated=True, failed=True) diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index eaeef74538..3a6d47d26d 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -21,8 +21,10 @@ # SOFTWARE. """PyHPS interface to HPC clusters""" +import json +import logging import os -from typing import Optional, Union +from typing import Any, Optional, Union from warnings import warn from ansys.hps.client import Client @@ -39,22 +41,23 @@ TaskDefinition, ) +logger = logging.getLogger() + def get_value_from_json_or_default( arg: str, json_file: str, key: str, default_value: Optional[Union[str, Any]] = None ): if arg is not None: - print(f"{arg}") + logger.debug(f"Using '{arg}' for {key}") return arg if os.path.exists(json_file): - with open(config_file, "r") as fid: - content = fid.read() + if os.path.getsize(json_file) > 0: + with open(json_file, "r") as fid: + config = json.load(fid) - if content: - config = json.load(content) if key in config: - print(config[key]) + logger.debug(f"Using '{config[key]}' for {key}") return config[key] if default_value is None: @@ -62,7 +65,7 @@ def get_value_from_json_or_default( f"The argument {arg} is not given through the CLI or config file" ) - print(default_value) + logger.debug(f"Using '{default_value}' for {key}") return default_value @@ -78,13 +81,18 @@ def create_project( def add_files(project_api: ProjectApi, input_files: list, output_files: list): + # Checks: + if not all([os.path.exists(each) for each in input_files]): + raise ValueError("One or more input files does not exist.") + input_files_ = [os.path.basename(each) for each in input_files] input_files = [ File( name=os.path.basename(each_file), - evaluation_path=each_file, + evaluation_path=os.path.basename(each_file), type="text/plain", + src=each_file, ) for each_file in input_files ] @@ -92,7 +100,7 @@ def add_files(project_api: ProjectApi, input_files: list, output_files: list): output_files = [ File( name=os.path.basename(each_file), - evaluation_path=each_file, + evaluation_path=os.path.basename(each_file), type="text/plain", collect=True, monitor=True, @@ -101,6 +109,10 @@ def add_files(project_api: ProjectApi, input_files: list, output_files: list): ] files = input_files + output_files + + for each in files: + logger.debug(each) + files = project_api.create_files(files) f_inp = {} @@ -111,6 +123,8 @@ def add_files(project_api: ProjectApi, input_files: list, output_files: list): else: f_out[f.name] = f.id + logger.debug(f"Input files ids: {f_inp}") + logger.debug(f"Output files ids: {f_out}") return f_inp, f_out @@ -119,6 +133,7 @@ def create_input_parameters(project_api, input_params=None): raise NotImplementedError("'Input_parameters' is not implemented") else: input_params = [] + logger.debug("Setting empty input parameters") return project_api.create_parameter_definitions(input_params) @@ -127,6 +142,7 @@ def create_output_parameters(project_api, output_params=None): raise NotImplementedError("'Output_parameters' is not implemented") else: output_params = [] + logger.debug("Setting empty output parameters") return project_api.create_parameter_definitions(output_params) @@ -135,6 +151,7 @@ def create_param_mappings(project_api, param_mappings=None): raise NotImplementedError("'param_mappings' is not implemented") else: param_mappings = [] + logger.debug("Setting empty parameter mappings") return project_api.create_parameter_mappings(param_mappings) @@ -152,6 +169,7 @@ def create_task( software = Software(name="Bash", version="0.1") # Overwriting execution_command = f"%executable% %file:{os.path.basename(main_file)}%" + logger.debug(f"Using executable: '{execution_command}'") # Process step task_def = TaskDefinition( @@ -159,19 +177,19 @@ def create_task( software_requirements=[software], execution_command=execution_command, resource_requirements=ResourceRequirements( - num_cores=num_cores, - memory=memory * 1024 * 1024, - disk_space=disk_space * 1024 * 1024, + num_cores=int(num_cores), + memory=int(memory) * 1024 * 1024, + disk_space=int(disk_space) * 1024 * 1024, # distributed=True, hpc_resources=HpcResources(exclusive=exclusive), ), max_execution_time=max_execution_time, execution_level=0, num_trials=1, - input_file_ids=file_input_ids, - output_file_ids=file_output_ids, + input_file_ids=list(file_input_ids.values()), + output_file_ids=list(file_output_ids.values()), ) - print(task_def) + logger.debug(f"Task definition: {task_def}") return project_api.create_task_definitions([task_def])[0] @@ -190,6 +208,7 @@ def create_job_definition( job_def.parameter_definition_ids = [pd.id for pd in params] job_def.parameter_mapping_ids = [pm.id for pm in param_mappings] + logger.debug(f"Job definition: {job_def}") job_def = project_api.create_job_definitions([job_def])[0] # Refresh the parameters @@ -201,6 +220,7 @@ def create_jobs(project_api, job_def): jobs = [ Job(name=f"Job", values={}, eval_status="pending", job_definition_id=job_def.id) ] + logger.debug(f"jobs: {jobs}") return project_api.create_jobs(jobs) @@ -263,6 +283,11 @@ def create_pymapdl_pyhps_job( if python not in [2.7, 3.5, 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12]: warn("Version of Python might not be supported by the cluster") + if not os.path.exists(main_file): + raise ValueError(f"The python file {main_file} must exists.") + + logger.debug(f"Main python file in: {main_file}") + if not requirements_file: import pkg_resources @@ -270,6 +295,7 @@ def create_pymapdl_pyhps_job( [str(p.as_requirement()) for p in pkg_resources.working_set] ) requirements_file = create_tmp_file("requirements.txt", content) + logger.debug(f"Requirements file in: {requirements_file}") if not shell_file: content = f""" @@ -287,6 +313,7 @@ def create_pymapdl_pyhps_job( """ shell_file = create_tmp_file("main.sh", content) + logger.debug(f"Shell file in: {shell_file}") if isinstance(extra_files, str): extra_files = extra_files.split(",") @@ -309,20 +336,22 @@ def create_pymapdl_pyhps_job( # Login client = Client(url=url, username=user, password=password) - # Setting + # Setting project proj = create_project(client, name) - project_api = get_project_api(client, proj) + # Setting files file_input_ids, file_output_ids = add_files(project_api, input_files, output_files) + # Setting parameters input_params = create_input_parameters(project_api) output_params = create_output_parameters(project_api) param_mappings = create_param_mappings(project_api) + # Setting tasks task_def = create_task( project_api, - main_file, + shell_file, file_input_ids, file_output_ids, num_cores, @@ -332,8 +361,12 @@ def create_pymapdl_pyhps_job( max_execution_time, ) + # Setting jobs job_def = create_job_definition( project_api, task_def, input_params, output_params, param_mappings ) + jobs = create_jobs(project_api, job_def) + + logger.debug("Project submitted.") - return proj + return proj, project_api From 4f323159c622a0356cf6f9e7807b58832af88287 Mon Sep 17 00:00:00 2001 From: German Date: Tue, 14 May 2024 17:47:02 +0200 Subject: [PATCH 03/29] fixes to the CLI help --- src/ansys/mapdl/core/cli/hpc.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index e0f93e60f7..d9c3ae8464 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -68,7 +68,7 @@ "--output_files", default=None, type=str, - help="""Set the output files to be monitored. This is optional.""", + help="""Set the output files to be monitored.""", ) @click.option( "--shell_file", @@ -85,7 +85,7 @@ help="""If provided, the created virtual environment is installed with the libraries specified in this file. If not, the activated virtual environment is cloned through a temporary 'pip list' file. If you are using editable package, -it is recommended you attach your own requirement file. """, +it is recommended you attach your own requirement file using ``pip freeze`` """, ) @click.option( "--extra_files", @@ -136,7 +136,9 @@ "--wait", default=None, type=str, - help="""If True, the terminal waits for job completion before return the control to the user. """, + is_flag=False, + flag_value=True, + help="""Set the terminal to wait for job completion before return the control to the user. """, ) @click.option( "--save_config_file", @@ -144,9 +146,9 @@ type=bool, is_flag=False, flag_value=True, - help="""If true, it also write the configuration to the config file, after successfully -submit the job. -It overwrites the configuration.""", + help="""Writes the configuration to the config file, after successfully +submit the job. It overwrites the configuration file. +The configuration file path is given using ``config_file`` argument.""", ) @click.option( "--debug", @@ -154,7 +156,7 @@ type=bool, is_flag=False, flag_value=True, - help="""If true, it prints the debug logging to the console output.""", + help="""Set PyMAPDL to prints the debug logging to the console output.""", ) def submit( main_file: str, From 08d20553dafecfe7b5ddc7366da663a98d9bdfb5 Mon Sep 17 00:00:00 2001 From: German Date: Tue, 14 May 2024 18:05:55 +0200 Subject: [PATCH 04/29] Fix codacy issues --- src/ansys/mapdl/core/cli/hpc.py | 10 ++++++++-- src/ansys/mapdl/core/hpc/pyhps.py | 13 +++++++------ 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index d9c3ae8464..a2a453c7c8 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -30,6 +30,8 @@ from ansys.mapdl.core.cli import main +logger = logging.getLogger() + @main.command( short_help="Submit jobs to an HPC cluster using PyHPS package.", @@ -191,13 +193,13 @@ def submit( ) if debug: - logger = logging.getLogger() logging.basicConfig( format="[%(asctime)s | %(levelname)s] %(message)s", level=logging.DEBUG ) if config_file is None: config_file = os.path.join(os.getcwd(), "hps_config.json") + logger.debug(f"Using default hps configuration file: {config_file}") url = get_value_from_json_or_default(url, config_file, "url", None) user = get_value_from_json_or_default(user, config_file, "user", None) @@ -217,7 +219,7 @@ def submit( max_execution_time, config_file, "max_execution_time", 0 ) - proj, project_api = create_pymapdl_pyhps_job( + proj, _ = create_pymapdl_pyhps_job( main_file=main_file, name=name, url=url, @@ -249,6 +251,10 @@ def submit( "exclusive": exclusive, "max_execution_time": max_execution_time, } + + logger.debug( + f"Saving the following configuration to the config file ({config_file}):\n{config}" + ) with open(config_file, "w") as fid: json.dump(config, fid) diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index 3a6d47d26d..eeef66c6b4 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -24,6 +24,7 @@ import json import logging import os +import time from typing import Any, Optional, Union from warnings import warn @@ -218,7 +219,7 @@ def create_job_definition( def create_jobs(project_api, job_def): jobs = [ - Job(name=f"Job", values={}, eval_status="pending", job_definition_id=job_def.id) + Job(name="Job", values={}, eval_status="pending", job_definition_id=job_def.id) ] logger.debug(f"jobs: {jobs}") return project_api.create_jobs(jobs) @@ -240,7 +241,7 @@ def wait_for_completion(project_api, evaluated=True, failed=False, running=False if running: eval_status.append("running") - log.debug(f"Waiting on project {proj.id} with criteria: {eval_status}") + logger.debug(f"Waiting on project {proj.id} with criteria: {eval_status}") while not project_api.get_jobs(eval_status=eval_status): time.sleep(2) @@ -249,11 +250,11 @@ def create_tmp_file(file_name, content): import tempfile import uuid - dir = tempfile.gettempdir() + dir_ = tempfile.gettempdir() sub_dir = str(uuid.uuid4()) - tmp_file = os.path.join(dir, sub_dir, file_name) - os.makedirs(os.path.join(dir, sub_dir)) + tmp_file = os.path.join(dir_, sub_dir, file_name) + os.makedirs(os.path.join(dir_, sub_dir)) with open(tmp_file, "w") as fid: fid.write(content) @@ -366,7 +367,7 @@ def create_pymapdl_pyhps_job( project_api, task_def, input_params, output_params, param_mappings ) jobs = create_jobs(project_api, job_def) - + logger.debug(f"Jobs: {jobs}") logger.debug("Project submitted.") return proj, project_api From d70b090633998d4a39f333bcfcacf24ae2e12df0 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Thu, 16 May 2024 12:35:26 +0200 Subject: [PATCH 05/29] Apply suggestions from code review Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> --- src/ansys/mapdl/core/cli/__init__.py | 2 +- src/ansys/mapdl/core/cli/hpc.py | 56 ++++++++++++++-------------- src/ansys/mapdl/core/hpc/pyhps.py | 40 ++++++++++---------- 3 files changed, 49 insertions(+), 49 deletions(-) diff --git a/src/ansys/mapdl/core/cli/__init__.py b/src/ansys/mapdl/core/cli/__init__.py index e2cd48c965..5b3a8c2400 100644 --- a/src/ansys/mapdl/core/cli/__init__.py +++ b/src/ansys/mapdl/core/cli/__init__.py @@ -57,7 +57,7 @@ def main(ctx): def old_pymapdl_convert_script_entry_point(): print( - """This CLI function has been deprecated. Please use instead: + """This CLI function has been deprecated. Use the following instead: pymapdl convert input_file.inp -o output_file.out ... diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index a2a453c7c8..f87fcfa0bf 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -34,15 +34,15 @@ @main.command( - short_help="Submit jobs to an HPC cluster using PyHPS package.", - help="""Submit jobs to an HPC cluster using PyHPS package.""", + short_help="Submit jobs to an HPC cluster using PyHPS.", + help="""Submit jobs to an HPC cluster using PyHPS.""", ) @click.argument("main_file") @click.option( "--name", default=None, type=str, - help="""Name of the PyHPS project to be created.""", + help="""Name of the PyHPS project to create.""", ) @click.option( "--url", @@ -57,68 +57,68 @@ "--password", default=None, type=str, - help="Password used to login into the HPC cluster.", + help="Password for logging into the HPC cluster.", ) @click.option( "--python", default=None, type=str, - help="""Set python version to be used to create the virtual environment and -run the python file. By default it uses python3 (default in cluster).""", + help="""Python version to use to create the virtual environment and +run the Python file. Python3 is used by default in the cluster.""", ) @click.option( "--output_files", default=None, type=str, - help="""Set the output files to be monitored.""", + help="""Output files to monitor.""", ) @click.option( "--shell_file", default=None, type=str, - help="""If desired, you can provide a shell script to execute instead of -the python file. You can call your python file from it if you wish. By default, -it is not used.""", + help="""Optional shell script to execute instead of +the Python file. You can call your Python file from it if you want. By default, +this option is not used.""", ) @click.option( "--requirements_file", default=None, type=str, - help="""If provided, the created virtual environment is installed with the -libraries specified in this file. If not, the activated virtual environment is -cloned through a temporary 'pip list' file. If you are using editable package, -it is recommended you attach your own requirement file using ``pip freeze`` """, + help="""Optional created virtual environment to install with the +libraries specified in this requirements file. If not, the activated virtual environment is +cloned through a temporary ``pip list`` file. If you are using an editable package, +you should attach your own requirement file using ``pip freeze`` """, ) @click.option( "--extra_files", default=None, type=str, - help="""To upload extra files which can be called from your main python file + help="""Extra files to upload that can be called from your main Python file (or from the shell file).""", ) @click.option( "--config_file", default=None, type=str, - help="""To load job configuration from a file.""", + help="""File to load the job configuration from.""", ) @click.option( "--num_cores", default=None, type=str, - help="""Set the amount of CPU cores reserved for the job. By default it is 1 CPU.""", + help="""Number of CPU cores reserved for the job. The default is ``1``""", ) @click.option( "--memory", default=None, type=str, - help="""Set the amount of memory RAM in MB reserved for the job. By default it is 100 MB.""", + help="""Amount of memory (RAM) in MB reserved for the job. The default is ``100 MB``.""", ) @click.option( "--disk_space", default=None, type=str, - help="""Set the amount of hard drive space in MB reserved for the job. By default it is 100 MB.""", + help="""Amount of hard drive space in MB reserved for the job. The default is ``100 MB``.""", ) @click.option( "--exclusive", @@ -126,13 +126,13 @@ type=str, is_flag=False, flag_value=True, - help="""Set the job to run in a machine exclusively, without sharing it with other jobs. By default it is False""", + help=""Whether the job is to run on a machine that is running no other jobs running. The default is ``False``.""", ) @click.option( "--max_execution_time", default=None, type=str, - help="""Set the maximum execution time for the job. By default it is zero (unlimited).""", + help="""Maximum execution time for the job. The default is zero (unlimited).""", ) @click.option( "--wait", @@ -140,7 +140,7 @@ type=str, is_flag=False, flag_value=True, - help="""Set the terminal to wait for job completion before return the control to the user. """, + help="""Whether the terminal is to wait for job completion before returning control to the user. """, ) @click.option( "--save_config_file", @@ -148,9 +148,9 @@ type=bool, is_flag=False, flag_value=True, - help="""Writes the configuration to the config file, after successfully -submit the job. It overwrites the configuration file. -The configuration file path is given using ``config_file`` argument.""", + help="""Whether to write the configuration to the configuration file after successfully +submitting the job. The default is ``False``. If ``True``, the configuration file is overwritten. +You use the ``config_file`` argument to give the path for the configuration file.""", ) @click.option( "--debug", @@ -158,7 +158,7 @@ type=bool, is_flag=False, flag_value=True, - help="""Set PyMAPDL to prints the debug logging to the console output.""", + help="""Whether PyMAPDL is to print debug logging to the console output.""", ) def submit( main_file: str, @@ -199,7 +199,7 @@ def submit( if config_file is None: config_file = os.path.join(os.getcwd(), "hps_config.json") - logger.debug(f"Using default hps configuration file: {config_file}") + logger.debug(f"Using default HPS configuration file: {config_file}") url = get_value_from_json_or_default(url, config_file, "url", None) user = get_value_from_json_or_default(user, config_file, "user", None) @@ -259,7 +259,7 @@ def submit( json.dump(config, fid) if wait: - print(f"Waiting for project {name} (id: {proj.id}) to be completed...") + print(f"Waiting for project {name} (id: {proj.id}) evaluation to complete...") wait_for_completion(proj, evaluated=True, failed=True) diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index eeef66c6b4..2c74363cef 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -63,7 +63,7 @@ def get_value_from_json_or_default( if default_value is None: raise ValueError( - f"The argument {arg} is not given through the CLI or config file" + f"The argument {arg} is not given through the CLI or config file." ) logger.debug(f"Using '{default_value}' for {key}") @@ -84,7 +84,7 @@ def add_files(project_api: ProjectApi, input_files: list, output_files: list): # Checks: if not all([os.path.exists(each) for each in input_files]): - raise ValueError("One or more input files does not exist.") + raise ValueError("One or more input files do not exist.") input_files_ = [os.path.basename(each) for each in input_files] @@ -124,35 +124,35 @@ def add_files(project_api: ProjectApi, input_files: list, output_files: list): else: f_out[f.name] = f.id - logger.debug(f"Input files ids: {f_inp}") - logger.debug(f"Output files ids: {f_out}") + logger.debug(f"Input files IDs: {f_inp}") + logger.debug(f"Output files IDs: {f_out}") return f_inp, f_out def create_input_parameters(project_api, input_params=None): if input_params is not None: - raise NotImplementedError("'Input_parameters' is not implemented") + raise NotImplementedError("'Input_parameters' is not implemented.") else: input_params = [] - logger.debug("Setting empty input parameters") + logger.debug("Setting empty input parameters.") return project_api.create_parameter_definitions(input_params) def create_output_parameters(project_api, output_params=None): if output_params is not None: - raise NotImplementedError("'Output_parameters' is not implemented") + raise NotImplementedError("'Output_parameters' is not implemented.") else: output_params = [] - logger.debug("Setting empty output parameters") + logger.debug("Setting empty output parameters.") return project_api.create_parameter_definitions(output_params) def create_param_mappings(project_api, param_mappings=None): if param_mappings is not None: - raise NotImplementedError("'param_mappings' is not implemented") + raise NotImplementedError("'param_mappings' is not implemented.") else: param_mappings = [] - logger.debug("Setting empty parameter mappings") + logger.debug("Setting empty parameter mappings.") return project_api.create_parameter_mappings(param_mappings) @@ -282,12 +282,12 @@ def create_pymapdl_pyhps_job( ): if python not in [2.7, 3.5, 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12]: - warn("Version of Python might not be supported by the cluster") + warn("Version of Python might not be supported by the cluster.") if not os.path.exists(main_file): - raise ValueError(f"The python file {main_file} must exists.") + raise ValueError(f"The Python file {main_file} must exist.") - logger.debug(f"Main python file in: {main_file}") + logger.debug(f"Main Python file is in: {main_file}.") if not requirements_file: import pkg_resources @@ -302,14 +302,14 @@ def create_pymapdl_pyhps_job( content = f""" echo "Starting" -# Starting venv +# Start venv python{python} -m venv .venv source .venv/bin/activate -# Installing requirements +# Install requirements pip install -r {os.path.basename(requirements_file)} -# Running script +# Run script python {os.path.basename(main_file)} """ @@ -334,7 +334,7 @@ def create_pymapdl_pyhps_job( elif isinstance(output_files, str): output_files = output_files.split(",") - # Login + # Log in client = Client(url=url, username=user, password=password) # Setting project @@ -344,12 +344,12 @@ def create_pymapdl_pyhps_job( # Setting files file_input_ids, file_output_ids = add_files(project_api, input_files, output_files) - # Setting parameters + # Set parameters input_params = create_input_parameters(project_api) output_params = create_output_parameters(project_api) param_mappings = create_param_mappings(project_api) - # Setting tasks + # Set tasks task_def = create_task( project_api, shell_file, @@ -362,7 +362,7 @@ def create_pymapdl_pyhps_job( max_execution_time, ) - # Setting jobs + # Set jobs job_def = create_job_definition( project_api, task_def, input_params, output_params, param_mappings ) From aa8a23cd58627763989bc846870d3aa1814312db Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Thu, 16 May 2024 12:45:56 +0200 Subject: [PATCH 06/29] Apply suggestions from code review --- src/ansys/mapdl/core/cli/hpc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index f87fcfa0bf..9db7f31e71 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -131,7 +131,7 @@ @click.option( "--max_execution_time", default=None, - type=str, + type=int, help="""Maximum execution time for the job. The default is zero (unlimited).""", ) @click.option( From 660e717332997b43eae25d484453b5abdc48a00a Mon Sep 17 00:00:00 2001 From: German Date: Thu, 16 May 2024 13:20:18 +0200 Subject: [PATCH 07/29] Adding suggestions --- src/ansys/mapdl/core/cli/hpc.py | 13 ++++++++----- src/ansys/mapdl/core/hpc/pyhps.py | 6 +++--- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index 9db7f31e71..383cd7e1b4 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -35,7 +35,13 @@ @main.command( short_help="Submit jobs to an HPC cluster using PyHPS.", - help="""Submit jobs to an HPC cluster using PyHPS.""", + help="""Submit jobs to an HPC cluster using PyHPS. + +Example +------- + +$ pymapdl submit my_file_01.py --requirements_file=requirements.txt --shell_file=main.sh --name="my job" --url="https://123.456.789.101:3000/hps" --user=user --password=password --python=3.9 +""", ) @click.argument("main_file") @click.option( @@ -126,7 +132,7 @@ type=str, is_flag=False, flag_value=True, - help=""Whether the job is to run on a machine that is running no other jobs running. The default is ``False``.""", + help="""Whether the job is to run on a machine that is running no other jobs running. The default is ``False``.""", ) @click.option( "--max_execution_time", @@ -181,9 +187,6 @@ def submit( save_config_file: bool = False, debug: bool = False, ): - """Example code: - pymapdl submit my_file.sh my_file_01.py my_file_02 --name="my job" --url="https://10.231.106.91:3000/hps" --user=repuser --password=repuser --python=3.9 - """ import json from ansys.mapdl.core.hpc.pyhps import ( diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index 2c74363cef..6b1978ff53 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -246,7 +246,7 @@ def wait_for_completion(project_api, evaluated=True, failed=False, running=False time.sleep(2) -def create_tmp_file(file_name, content): +def _create_tmp_file(file_name, content): import tempfile import uuid @@ -295,7 +295,7 @@ def create_pymapdl_pyhps_job( content = "\n".join( [str(p.as_requirement()) for p in pkg_resources.working_set] ) - requirements_file = create_tmp_file("requirements.txt", content) + requirements_file = _create_tmp_file("requirements.txt", content) logger.debug(f"Requirements file in: {requirements_file}") if not shell_file: @@ -313,7 +313,7 @@ def create_pymapdl_pyhps_job( python {os.path.basename(main_file)} """ - shell_file = create_tmp_file("main.sh", content) + shell_file = _create_tmp_file("main.sh", content) logger.debug(f"Shell file in: {shell_file}") if isinstance(extra_files, str): From 33ac15a09e3d01f3a46f6dbfde0da66e2a80a5e1 Mon Sep 17 00:00:00 2001 From: German Date: Fri, 17 May 2024 18:25:02 +0200 Subject: [PATCH 08/29] Removing warnings. Adding project url print --- src/ansys/mapdl/core/cli/hpc.py | 8 ++++++-- src/ansys/mapdl/core/hpc/pyhps.py | 6 +++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index 383cd7e1b4..8ab4b39c48 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -68,7 +68,7 @@ @click.option( "--python", default=None, - type=str, + type=float, help="""Python version to use to create the virtual environment and run the Python file. Python3 is used by default in the cluster.""", ) @@ -172,7 +172,7 @@ def submit( url: str = None, user: str = None, password: str = None, - python: float = 3.9, + python: Optional[float] = None, output_files: Optional[Union[str, list]] = None, shell_file: str = None, requirements_file: str = None, @@ -261,6 +261,10 @@ def submit( with open(config_file, "w") as fid: json.dump(config, fid) + print( + f"You can check your project by visiting: {url}/projects#/projects/{proj.id}/jobs" + ) + if wait: print(f"Waiting for project {name} (id: {proj.id}) evaluation to complete...") wait_for_completion(proj, evaluated=True, failed=True) diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index 6b1978ff53..3ab23d039d 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -281,8 +281,8 @@ def create_pymapdl_pyhps_job( max_execution_time: int = None, ): - if python not in [2.7, 3.5, 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12]: - warn("Version of Python might not be supported by the cluster.") + if python not in [2, 2.7, 3, 3.5, 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12]: + warn(f"Version of Python {python} might not be supported by the cluster.") if not os.path.exists(main_file): raise ValueError(f"The Python file {main_file} must exist.") @@ -335,7 +335,7 @@ def create_pymapdl_pyhps_job( output_files = output_files.split(",") # Log in - client = Client(url=url, username=user, password=password) + client = Client(url=url, username=user, password=password, verify=False) # Setting project proj = create_project(client, name) From 4055c60e1934a2c9d7a96c0f56c390913c7986c9 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Wed, 22 May 2024 11:48:38 +0200 Subject: [PATCH 09/29] Adding inputs and outputs (#3112) * Adding inputs and outputs * Changing arguments order * Apply suggestions from code review Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update src/ansys/mapdl/core/cli/hpc.py --------- Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> --- src/ansys/mapdl/core/cli/hpc.py | 59 ++++++--- src/ansys/mapdl/core/hpc/pyhps.py | 196 +++++++++++++++++++++++++----- 2 files changed, 210 insertions(+), 45 deletions(-) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index 8ab4b39c48..92f694e51f 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -35,12 +35,13 @@ @main.command( short_help="Submit jobs to an HPC cluster using PyHPS.", - help="""Submit jobs to an HPC cluster using PyHPS. + help=""" + Submit jobs to an HPC cluster using PyHPS. + Example -------- -$ pymapdl submit my_file_01.py --requirements_file=requirements.txt --shell_file=main.sh --name="my job" --url="https://123.456.789.101:3000/hps" --user=user --password=password --python=3.9 +$ pymapdl submit my_file_01.py --requirements_file=requirements.txt --shell_file=main.sh --name="my job" --user=user --password=password --url="https://123.456.789.101:3000/hps" """, ) @click.argument("main_file") @@ -72,11 +73,34 @@ help="""Python version to use to create the virtual environment and run the Python file. Python3 is used by default in the cluster.""", ) +@click.option( + "--inputs", + default=None, + type=str, + help=""" +Input arguments for the simulation. You can specify several arguments by +joining them with commas. Thus, strings defined in this way cannot contain +commas. Only integers, floats and strings are allowed. +PyMAPDL converts these inputs to integer or float values when possible. +Otherwise, they remain as strings. You can change these arguments on the +HPS website. For example, ``--inputs="force=123,value='mystring'"``. + """, +) +@click.option( + "--outputs", + default=None, + type=str, + help="""Output parameters. You can specify several arguments +by joining them with commas. +For example, ``--outputs="displacements,nodes"``.""", +) @click.option( "--output_files", default=None, type=str, - help="""Output files to monitor.""", + help="""Output files to monitor. Because you use commas to separate +the file names, the names cannot contain commas. For example, +``--output_files="results.out,data.xls"``.""", ) @click.option( "--shell_file", @@ -108,6 +132,17 @@ type=str, help="""File to load the job configuration from.""", ) +@click.option( + "--save_config_file", + default=False, + type=bool, + is_flag=False, + flag_value=True, + help=""" +Whether to write the configuration to the configuration file (specified +using the ``config_file`` argument) after the job has been successfully submitted. +The default is ``False``. If ``True``, and the file already exists, the configuration file is overwritten.""", +) @click.option( "--num_cores", default=None, @@ -148,16 +183,6 @@ flag_value=True, help="""Whether the terminal is to wait for job completion before returning control to the user. """, ) -@click.option( - "--save_config_file", - default=False, - type=bool, - is_flag=False, - flag_value=True, - help="""Whether to write the configuration to the configuration file after successfully -submitting the job. The default is ``False``. If ``True``, the configuration file is overwritten. -You use the ``config_file`` argument to give the path for the configuration file.""", -) @click.option( "--debug", default=False, @@ -173,18 +198,20 @@ def submit( user: str = None, password: str = None, python: Optional[float] = None, + inputs: Optional[str] = None, + outputs: Optional[str] = None, output_files: Optional[Union[str, list]] = None, shell_file: str = None, requirements_file: str = None, extra_files: Optional[Union[str, list]] = None, config_file: str = None, + save_config_file: bool = False, num_cores: int = None, memory: int = None, disk_space: int = None, exclusive: bool = None, max_execution_time: int = None, wait: bool = False, - save_config_file: bool = False, debug: bool = False, ): import json @@ -229,6 +256,8 @@ def submit( user=user, password=password, python=python, + inputs=inputs, + outputs=outputs, output_files=output_files, shell_file=shell_file, requirements_file=requirements_file, diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index 3ab23d039d..78f952d301 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -31,14 +31,17 @@ from ansys.hps.client import Client from ansys.hps.client.jms import ( File, + FloatParameterDefinition, HpcResources, JmsApi, Job, JobDefinition, + ParameterMapping, Project, ProjectApi, ResourceRequirements, Software, + StringParameterDefinition, TaskDefinition, ) @@ -129,31 +132,85 @@ def add_files(project_api: ProjectApi, input_files: list, output_files: list): return f_inp, f_out -def create_input_parameters(project_api, input_params=None): - if input_params is not None: - raise NotImplementedError("'Input_parameters' is not implemented.") - else: - input_params = [] +def create_parameters( + project_api, + inputs: Optional[list[str]] = None, + input_file_id: str = None, + outputs: Optional[list[str]] = None, + output_file_id: str = None, +): + if inputs is None: + inputs = [] logger.debug("Setting empty input parameters.") - return project_api.create_parameter_definitions(input_params) + def is_float(num): + try: + float(num) + return True + except ValueError: + return False + + input_params = [] + param_mappings = [] + + for each_input_parm in inputs: + name, value = each_input_parm.split("=") + if is_float(value): + parm = FloatParameterDefinition( + name=name, + display_text=name, + default=value, + ) + else: + parm = StringParameterDefinition( + name=name, + display_text=name, + default=value, + ) + + # Mapping + param_map = ParameterMapping( + key_string=name, + tokenizer="=", + parameter_definition_id=parm.id, + file_id=input_file_id, + ) -def create_output_parameters(project_api, output_params=None): - if output_params is not None: - raise NotImplementedError("'Output_parameters' is not implemented.") - else: - output_params = [] - logger.debug("Setting empty output parameters.") - return project_api.create_parameter_definitions(output_params) + logger.debug(f"Output parameter: {name}\n{parm}\nMapping: {param_map}") + input_params.append(parm) + param_mappings.append(param_map) + logger.debug(f"Input parameters:\n{input_params}") + input_params = project_api.create_parameter_definitions(input_params) -def create_param_mappings(project_api, param_mappings=None): - if param_mappings is not None: - raise NotImplementedError("'param_mappings' is not implemented.") - else: - param_mappings = [] - logger.debug("Setting empty parameter mappings.") - return project_api.create_parameter_mappings(param_mappings) + output_params = [] + for each_output_parm in outputs: + # output + name = each_output_parm + outparm = StringParameterDefinition(name=name, display_text=name) + + output_params.append(outparm) + + logger.debug(f"Output parameters:\n{output_params}") + output_params = project_api.create_parameter_definitions(output_params) + + for each_output_parm, outparm in zip(outputs, output_params): + name = each_output_parm + # mapping + parm_map = ParameterMapping( + key_string=name, + tokenizer="=", + parameter_definition_id=outparm.id, + file_id=output_file_id, + ) + logger.debug(f"Output parameter: {name}\n{outparm}\nMapping: {parm_map}") + + param_mappings.append(parm_map) + + logger.debug(f"Mapping parameters:\n{param_mappings}") + param_mappings = project_api.create_parameter_mappings(param_mappings) + + return input_params, output_params, param_mappings def create_task( @@ -269,6 +326,8 @@ def create_pymapdl_pyhps_job( user: str = None, password: str = None, python: float = None, + inputs: str = None, + outputs: str = None, output_files: Optional[Union[str, list]] = None, shell_file: str = None, requirements_file: str = None, @@ -289,6 +348,60 @@ def create_pymapdl_pyhps_job( logger.debug(f"Main Python file is in: {main_file}.") + if inputs is None: + inputs = [] + if outputs is None: + outputs = [] + + input_file = None + if inputs: + if isinstance(inputs, str): + inputs = inputs.split(",") + + if inputs: + input_file = "input.inp" + + content = "\n".join(inputs) + input_file = _create_tmp_file(input_file, content) + logger.debug(f"Input file in: {input_file}") + + output_parms_file = None + if outputs: + output_parms_file = "output.out" + if isinstance(outputs, str): + outputs = outputs.split(",") + + executed_pyscript = os.path.basename(main_file) + + if inputs or outputs: + wrapper_file = "main_execution.py" + executed_pyscript = wrapper_file + content = "" + + if inputs: + content += f""" +# Read inputs +exec(open("{os.path.basename(input_file)}").read()) +""" + + content += f""" +# Execute main file +exec(open("{os.path.basename(main_file)}").read()) +""" + + if outputs: + content += f""" +with open("{output_parms_file}", "w") as fid: +""" + b0 = "{" + b1 = "}" + + for each in outputs: + content += f""" fid.write(f"{each}={b0}{each}{b1}")\n""" + + wrapper_file = _create_tmp_file(wrapper_file, content) + logger.debug(f"Wrapper file in: {wrapper_file}") + if not requirements_file: import pkg_resources @@ -310,7 +423,7 @@ def create_pymapdl_pyhps_job( pip install -r {os.path.basename(requirements_file)} # Run script -python {os.path.basename(main_file)} +python {executed_pyscript} """ shell_file = _create_tmp_file("main.sh", content) @@ -321,18 +434,27 @@ def create_pymapdl_pyhps_job( elif extra_files is None: extra_files = [] + if not output_files: + output_files = [] + elif isinstance(output_files, str): + output_files = output_files.split(",") + if extra_files and not all([os.path.exists(each) for each in extra_files]): - raise ValueError("One or more extra files does not exist.") + raise ValueError("One or more extra files do not exist.") input_files = extra_files input_files.append(requirements_file) input_files.append(shell_file) input_files.append(main_file) - if not output_files: - output_files = [] - elif isinstance(output_files, str): - output_files = output_files.split(",") + if inputs: + input_files.append(input_file) + + if outputs: + output_files.append(output_parms_file) + + if inputs or outputs: + input_files.append(wrapper_file) # Log in client = Client(url=url, username=user, password=password, verify=False) @@ -341,13 +463,27 @@ def create_pymapdl_pyhps_job( proj = create_project(client, name) project_api = get_project_api(client, proj) - # Setting files + # Set files file_input_ids, file_output_ids = add_files(project_api, input_files, output_files) + if inputs: + input_file_id = file_input_ids[os.path.basename(input_file)] + else: + input_file_id = None + + if outputs: + output_parms_file_id = file_output_ids[os.path.basename(output_parms_file)] + else: + output_parms_file_id = None + # Set parameters - input_params = create_input_parameters(project_api) - output_params = create_output_parameters(project_api) - param_mappings = create_param_mappings(project_api) + input_params, output_params, param_mappings = create_parameters( + project_api, + inputs=inputs, + input_file_id=input_file_id, + outputs=outputs, + output_file_id=output_parms_file_id, + ) # Set tasks task_def = create_task( From f23da00bf97220bbf6f4872d0fc2435a54e087c5 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Wed, 22 May 2024 12:33:57 +0200 Subject: [PATCH 10/29] Adding APDL jobs support (#3111) * Adding inputs and outputs * Supporting pure APDL jobs * Changing arguments order * Allowing force mode * Apply suggestions from code review Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> --------- Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> --- src/ansys/mapdl/core/cli/hpc.py | 13 ++++++ src/ansys/mapdl/core/hpc/pyhps.py | 72 ++++++++++++++++++++++++++----- 2 files changed, 75 insertions(+), 10 deletions(-) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index 92f694e51f..1a96aefa3a 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -183,6 +183,17 @@ flag_value=True, help="""Whether the terminal is to wait for job completion before returning control to the user. """, ) +@click.option( + "--mode", + default=None, + type=str, + help=""" +Force the job submission to behave as if the main file was a Python, +shell, or APDL file, regardless of its extension type. Allowed values are +``"python"``, ``"shell"``, and ``"apdl"``. +By default, PyMAPDL detects the type of file from its extension. +""", +) @click.option( "--debug", default=False, @@ -213,6 +224,7 @@ def submit( max_execution_time: int = None, wait: bool = False, debug: bool = False, + mode: Optional[Union["python", "shell", "apdl"]] = None, ): import json @@ -268,6 +280,7 @@ def submit( disk_space=disk_space, exclusive=exclusive, max_execution_time=max_execution_time, + mode=mode, ) if save_config_file: diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index 78f952d301..7ad3e06a04 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -215,6 +215,7 @@ def is_float(num): def create_task( project_api, + mode, main_file, file_input_ids, file_output_ids, @@ -225,13 +226,19 @@ def create_task( max_execution_time, ): - software = Software(name="Bash", version="0.1") # Overwriting + if mode == "apdl": + software = Software(name="Ansys Mechanical APDL", version="2024 R2") + name = "MAPDL Task" + else: + software = Software(name="Bash", version="0.1") # Overwriting + name = "PyMAPDL Task" + execution_command = f"%executable% %file:{os.path.basename(main_file)}%" logger.debug(f"Using executable: '{execution_command}'") # Process step task_def = TaskDefinition( - name="PyMAPDL_task", + name=name, software_requirements=[software], execution_command=execution_command, resource_requirements=ResourceRequirements( @@ -338,15 +345,40 @@ def create_pymapdl_pyhps_job( disk_space: int = None, exclusive: bool = None, max_execution_time: int = None, + mode: Optional[Union["python", "shell", "apdl"]] = None, ): + """ + Workflow + + APDL mode: main_file + + Others: shell_file + + Wrapper + + main_file + """ if python not in [2, 2.7, 3, 3.5, 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12]: warn(f"Version of Python {python} might not be supported by the cluster.") if not os.path.exists(main_file): - raise ValueError(f"The Python file {main_file} must exist.") + raise ValueError(f"The file {main_file} must exist.") + + logger.debug(f"Main file is in: {main_file}.") + + _, file_extension = os.path.splitext(main_file) + + if mode is None: + if file_extension.lower() in [".sh"]: + mode = "shell" + elif file_extension.lower() in [".py"]: + mode = "python" + elif file_extension.lower() in [".inp", ".mac"]: + mode = "apdl" + else: + if mode.lower() not in ["python", "shell", "apdl"]: + raise Exception("File type is not supported.") - logger.debug(f"Main Python file is in: {main_file}.") + logger.debug( + f"Submission mode set to '{mode}' because of main file ({main_file}) extension." + ) if inputs is None: inputs = [] @@ -355,11 +387,14 @@ def create_pymapdl_pyhps_job( input_file = None if inputs: + if mode == "apdl": + raise ValueError("Inputs are not supported when using APDL files.") + if isinstance(inputs, str): inputs = inputs.split(",") if inputs: - input_file = "input.inp" + input_file = "input.inputs" content = "\n".join(inputs) input_file = _create_tmp_file(input_file, content) @@ -367,7 +402,9 @@ def create_pymapdl_pyhps_job( output_parms_file = None if outputs: - output_parms_file = "output.out" + if mode == "apdl": + raise ValueError("Outputs are not supported when using APDL files.") + output_parms_file = "output.output" if isinstance(outputs, str): outputs = outputs.split(",") @@ -391,6 +428,7 @@ def create_pymapdl_pyhps_job( if outputs: content += f""" +# Write output data with open("{output_parms_file}", "w") as fid: """ b0 = "{" @@ -402,7 +440,7 @@ def create_pymapdl_pyhps_job( wrapper_file = _create_tmp_file(wrapper_file, content) logger.debug(f"Wrapper file in: {wrapper_file}") - if not requirements_file: + if not requirements_file and mode == "python": import pkg_resources content = "\n".join( @@ -411,7 +449,7 @@ def create_pymapdl_pyhps_job( requirements_file = _create_tmp_file("requirements.txt", content) logger.debug(f"Requirements file in: {requirements_file}") - if not shell_file: + if not shell_file and mode == "python": content = f""" echo "Starting" @@ -429,6 +467,14 @@ def create_pymapdl_pyhps_job( shell_file = _create_tmp_file("main.sh", content) logger.debug(f"Shell file in: {shell_file}") + elif shell_file and mode == "shell": + raise ValueError( + "You cannot use a shell file and specify a shell file as a main file. Avoid specifying the '--shell_file' argument." + ) + + elif not shell_file and mode == "shell": + shell_file = main_file + if isinstance(extra_files, str): extra_files = extra_files.split(",") elif extra_files is None: @@ -443,8 +489,13 @@ def create_pymapdl_pyhps_job( raise ValueError("One or more extra files do not exist.") input_files = extra_files - input_files.append(requirements_file) - input_files.append(shell_file) + if mode == "python": + input_files.append(requirements_file) + input_files.append(shell_file) + else: + # we are going to refer to this from now on + shell_file = main_file + input_files.append(main_file) if inputs: @@ -488,6 +539,7 @@ def create_pymapdl_pyhps_job( # Set tasks task_def = create_task( project_api, + mode, shell_file, file_input_ids, file_output_ids, From 0f80a474b5aa3408cf963603b7426f9e25b2e793 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Fri, 31 May 2024 17:29:48 +0200 Subject: [PATCH 11/29] Refactoring PyHPS implementation (#3117) * Adding inputs and outputs * Supporting pure APDL jobs * Allowing force mode * Initial implementation. Making task object detachable * Apply suggestions from code review Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Added job definition bypasser * Adapting CLI * Adding option for output and wait * fixing python and cpu issues * Using dict for taskdefinition Allowing outputs in apdl mode * Adding `close_client` method --------- Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> --- src/ansys/mapdl/core/cli/hpc.py | 50 +- src/ansys/mapdl/core/hpc/pyhps.py | 1172 ++++++++++++++++++++--------- 2 files changed, 833 insertions(+), 389 deletions(-) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index 1a96aefa3a..87e6abf09b 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -194,6 +194,14 @@ By default, PyMAPDL detects the type of file from its extension. """, ) +@click.option( + "--output_to_json", + default=None, + type=str, + is_flag=False, + flag_value=True, + help="""Print the output values to the terminal as json. It requires to use ``--wait`` value too. """, +) @click.option( "--debug", default=False, @@ -225,13 +233,13 @@ def submit( wait: bool = False, debug: bool = False, mode: Optional[Union["python", "shell", "apdl"]] = None, + output_to_json: Optional[bool] = False, ): import json from ansys.mapdl.core.hpc.pyhps import ( - create_pymapdl_pyhps_job, + PyMAPDLJobSubmission, get_value_from_json_or_default, - wait_for_completion, ) if debug: @@ -261,28 +269,29 @@ def submit( max_execution_time, config_file, "max_execution_time", 0 ) - proj, _ = create_pymapdl_pyhps_job( - main_file=main_file, - name=name, + job = PyMAPDLJobSubmission( url=url, user=user, password=password, - python=python, + main_file=main_file, + mode=mode, inputs=inputs, outputs=outputs, - output_files=output_files, - shell_file=shell_file, requirements_file=requirements_file, + shell_file=shell_file, extra_files=extra_files, - config_file=config_file, + output_files=output_files, + python=python, num_cores=num_cores, memory=memory, disk_space=disk_space, exclusive=exclusive, max_execution_time=max_execution_time, - mode=mode, + name=name, ) + job.submit() + if save_config_file: config = { "url": url, @@ -303,13 +312,24 @@ def submit( with open(config_file, "w") as fid: json.dump(config, fid) - print( - f"You can check your project by visiting: {url}/projects#/projects/{proj.id}/jobs" - ) + proj = job.project + if not output_to_json: + print( + f"You can check your project by visiting: {url}/projects#/projects/{proj.id}/jobs" + ) if wait: - print(f"Waiting for project {name} (id: {proj.id}) evaluation to complete...") - wait_for_completion(proj, evaluated=True, failed=True) + if not output_to_json: + print( + f"Waiting for project {name} (id: {proj.id}) evaluation to complete..." + ) + job.wait_for_completion(evaluated=True, failed=True) + + if output_to_json and wait: + if len(job.outputs) == 1: + print(job.output_values[0][job.outputs[0]]) + else: + print(json.dumps(job.output_values)) def list_jobs(): diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index 7ad3e06a04..6cf0de8787 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -73,489 +73,913 @@ def get_value_from_json_or_default( return default_value -def create_project( - client, - name="My PyMAPDL job", -) -> Project: - jms_api = JmsApi(client) +# consider not using inheritance +PYTHONTASK = { + "name": "Python Task", + "software_requirements": [ + Software(name="Bash", version="0.1"), + Software( + name="Python", version="3.9" + ), # this should be adapted to the python version used in the class + ], + "execution_command": "%executable% %file:{executable}%", +} + + +APDLTASK = { + "name": "APDL Task", + "software_requirements": [ + Software(name="Ansys Mechanical APDL", version="2024 R2") + ], + "execution_command": "%executable% -b -i {executable} -o apdl_output.out", +} + + +SHELLTASK = { + "name": "Shell Task", + "software_requirements": [Software(name="Bash", version="0.1")], + "execution_command": "%executable% %file:{executable}%", +} + + +class JobSubmission: + + def __init__( + self, + url, + user, + password, + main_file, + mode: Optional[str] = None, + inputs: Optional[Union[list[str]]] = None, + outputs: Optional[Union[list[str]]] = None, + requirements_file: Optional[str] = None, + shell_file: Optional[str] = None, + extra_files: Optional[Union[list[str]]] = None, + output_files: Optional[Union[list[str]]] = None, + python: Optional[float] = None, + num_cores: Optional[int] = None, + memory: Optional[int] = None, + disk_space: Optional[int] = None, + exclusive: Optional[bool] = None, + max_execution_time: Optional[int] = None, + name: Optional[str] = None, + ): + self._url = url + self._user = user + self._password = password + self._main_file = self._validate_main_file(main_file) + self._mode = self._validate_mode(mode) + + self._output_parms_file = "output.output" + self._input_file = "input.inputs" + self._wrapper_python_file = "python_wrapper.py" + self.input_files = [] + + self._task_definitions = None + self._job_definitions = None + self._jobs = None + self._output_values = None + + # Pre-populating + self._inputs = self._validate_inputs(inputs) + self._outputs = self._validate_outputs(outputs) + self._requirements_file = self._validate_requirements_file(requirements_file) + self._shell_file = self._validate_shell_file(shell_file) + self._extra_files = self._validate_extra_files(extra_files) + self._python = self._validate_python(python) + self._num_cores = self._validate_num_cores(num_cores) + self._memory = self._validate_memory(memory) + self._disk_space = self._validate_disk_space(disk_space) + self._exclusive = self._validate_exclusive(exclusive) + self._max_execution_time = self._validate_max_execution_time(max_execution_time) + self._name = self._validate_name(name) + self._output_files = self._validate_output_files(output_files) + + @property + def url(self): + return self._url + + @property + def user(self): + return self._user + + @property + def password(self): + return self._password + + @property + def mode(self): + return self._mode + + @property + def inputs(self): + return self._inputs + + @inputs.setter + def inputs(self, inputs: Union[str, list[str]]): + self._inputs = self._validate_inputs(inputs) + + @property + def outputs(self): + return self._outputs + + @outputs.setter + def outputs(self, outputs: Union[str, list[str]]): + self._outputs = self._validate_outputs(outputs) + + @property + def main_file(self): + return self._main_file + + @main_file.setter + def main_file(self, main_file: str): + self._main_file = self._validate_main_file(main_file) + + @property + def requirements_file(self): + return self._requirements_file + + @requirements_file.setter + def requirements_file(self, requirements_file: str): + self._requirements_file = self._validate_requirements_file(requirements_file) + + @property + def shell_file(self): + return self._shell_file + + @shell_file.setter + def shell_file(self, shell_file): + self._shell_file = self._validate_shell_file(shell_file) + + @property + def extra_files(self): + return self._extra_files + + @extra_files.setter + def extra_files(self, extra_files): + self._extra_files = self._validate_extra_files(extra_files) + + @property + def output_files(self): + return self._output_files + + @output_files.setter + def output_files(self, output_files): + self._output_files = self._validate_output_files(output_files) + + @property + def python(self): + return self._python + + @python.setter + def python(self, python: float): + self._python = self._validate_python(python) + + @property + def num_cores(self): + return self._num_cores + + @num_cores.setter + def num_cores(self, num_cores: int): + self._num_cores = self._validate_num_cores(num_cores) + + @property + def memory(self): + return self._memory - proj = Project(name=name, priority=1, active=True) - return jms_api.create_project(proj) + @memory.setter + def memory(self, memory: int): + self._memory = self._validate_memory(memory) + @property + def disk_space(self): + return self._disk_space -def add_files(project_api: ProjectApi, input_files: list, output_files: list): + @disk_space.setter + def disk_space(self, disk_space: int): + self._disk_space = self._validate_disk_space(disk_space) - # Checks: - if not all([os.path.exists(each) for each in input_files]): - raise ValueError("One or more input files do not exist.") + @property + def exclusive(self): + return self._exclusive - input_files_ = [os.path.basename(each) for each in input_files] + @exclusive.setter + def exclusive(self, exclusive: bool): + self._exclusive = self._validate_exclusive(exclusive) - input_files = [ - File( - name=os.path.basename(each_file), - evaluation_path=os.path.basename(each_file), - type="text/plain", - src=each_file, - ) - for each_file in input_files - ] - - output_files = [ - File( - name=os.path.basename(each_file), - evaluation_path=os.path.basename(each_file), - type="text/plain", - collect=True, - monitor=True, - ) - for each_file in output_files - ] + @property + def max_execution_time(self): + return self._max_execution_time - files = input_files + output_files + @max_execution_time.setter + def max_execution_time(self, max_execution_time: int): + self._max_execution_time = self._validate_max_execution_time(max_execution_time) - for each in files: - logger.debug(each) + @property + def name(self): + return self._name - files = project_api.create_files(files) + @name.setter + def name(self, name: str): + self._name = self._validate_name(name) - f_inp = {} - f_out = {} - for f in files: - if f.name in input_files_: - f_inp[f.name] = f.id - else: - f_out[f.name] = f.id + ## To bypass implemented tasks, job definitions and jobs. + @property + def task_definitions(self): + return self._task_definitions - logger.debug(f"Input files IDs: {f_inp}") - logger.debug(f"Output files IDs: {f_out}") - return f_inp, f_out + @task_definitions.setter + def task_definitions(self, task_definitions: list[TaskDefinition]): + self._task_definitions = task_definitions + @property + def job_definitions(self): + return self._job_definitions -def create_parameters( - project_api, - inputs: Optional[list[str]] = None, - input_file_id: str = None, - outputs: Optional[list[str]] = None, - output_file_id: str = None, -): - if inputs is None: - inputs = [] - logger.debug("Setting empty input parameters.") - - def is_float(num): - try: - float(num) - return True - except ValueError: - return False - - input_params = [] - param_mappings = [] - - for each_input_parm in inputs: - name, value = each_input_parm.split("=") - if is_float(value): - parm = FloatParameterDefinition( - name=name, - display_text=name, - default=value, + @job_definitions.setter + def job_definitions(self, job_definitions: list[JobDefinition]): + self._job_definitions = job_definitions + + @property + def jobs(self): + return self._jobs + + @jobs.setter + def jobs(self, jobs: list[JobDefinition]): + self._jobs = jobs + + @property + def project(self): + return self._proj + + @project.setter + def project(self, proj: Project): + self._proj = proj + + @property + def project_api(self): + return self._project_api + + @project_api.setter + def project_api(self, project_api: ProjectApi): + self._project_api = project_api + + @property + def output_values(self): + if not self._output_values and self.outputs: + self._load_results() + + return self._output_values + + ## Validate inputs + def _validate_inputs(self, inputs): + """Validate inputs inputs""" + + if inputs and self.mode != "python": + raise ValueError("Inputs are not supported when using APDL or shell files.") + + if inputs is None: + inputs = [] + + if isinstance(inputs, str): + inputs = inputs.split(",") + + return inputs + + def _validate_outputs(self, outputs): + """Validate outputs inputs""" + + if outputs and self.mode != "python": + warn( + f"""Outputs are not directly supported when using APDL or shell files. + However, you can still write the parameters you want to the + file {self._output_parms_file} using the Python format + ('PARAMETER=VALUE').""", + UserWarning, ) + + if outputs is None: + outputs = [] + + if isinstance(outputs, str): + outputs = outputs.split(",") + + return outputs + + def _validate_main_file(self, main_file): + """Validate the main file.""" + + if not os.path.exists(main_file): + raise ValueError(f"The file {main_file} must exist.") + + logger.debug(f"Main file is in: {main_file}.") + return main_file + + def _validate_shell_file(self, shell_file): + """Validate the shell file.""" + return shell_file + + def _validate_extra_files(self, extra_files): + """Validate all extra files.""" + if isinstance(extra_files, str): + extra_files = extra_files.split(",") + elif extra_files is None: + extra_files = [] + + # Expanding real path + extra_files = [os.path.realpath(each) for each in extra_files] + + if extra_files and not all([os.path.exists(each) for each in extra_files]): + raise ValueError("One or more extra files do not exist.") + + return extra_files + + def _validate_output_files(self, output_files): + """Validate output files.""" + if not output_files: + output_files = [] + elif isinstance(output_files, str): + output_files = output_files.split(",") + + return output_files + + def _validate_requirements_file(self, requirements_file): + """Validate the requirements file.""" + return requirements_file + + def _validate_python(self, python): + """Validate Python version.""" + if python is None: + return 3 + elif python not in [2, 2.7, 3, 3.5, 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12]: + warn(f"Python {python} might not be supported by the cluster.") + return python + + def _validate_num_cores(self, num_cores: int): + """Validate num_cores inputs""" + if not num_cores: + num_cores = 1 + + return int(num_cores) + + def _validate_memory(self, memory: int): + """Validate the memory.""" + if not memory: + memory = 0 + + return int(memory) + + def _validate_disk_space(self, disk_space: float): + """Validate disk_space inputs""" + if not disk_space: + disk_space = 0 + + return float(disk_space) + + def _validate_exclusive(self, exclusive: bool): + """Validate exclusive argument.""" + if exclusive is None: + exclusive = False + + return bool(exclusive) + + def _validate_max_execution_time(self, max_execution_time: float): + """Validate inputs for the maximum execution time.""" + if not max_execution_time: + return None + + return float(max_execution_time) + + def _validate_name(self, name: str): + """Validate name""" + if name is None: + if self.mode == "python": + name = "My PyMAPDL project" + elif self.mode == "apdl": + name = "My APDL project" + elif self.mode == "shell": + name = "My shell project" + else: + name = "My project" + return name + + def _validate_mode(self, mode: str): + _, file_extension = os.path.splitext(self._main_file) + + if mode is None: + if file_extension.lower() in [".sh"]: + mode = "shell" + elif file_extension.lower() in [".py"]: + mode = "python" + elif file_extension.lower() in [".inp", ".mac"]: + mode = "apdl" else: - parm = StringParameterDefinition( - name=name, - display_text=name, - default=value, - ) + if mode.lower() not in ["python", "shell", "apdl"]: + raise Exception("File type is not supported.") - # Mapping - param_map = ParameterMapping( - key_string=name, - tokenizer="=", - parameter_definition_id=parm.id, - file_id=input_file_id, + logger.debug( + f"Mode '{mode}' because of main file ({self.main_file}) extension." ) - logger.debug(f"Output parameter: {name}\n{parm}\nMapping: {param_map}") - input_params.append(parm) - param_mappings.append(param_map) + return mode + + def submit(self): + if self.inputs: + self._prepare_input_file() + + self._executed_pyscript = os.path.basename(self.main_file) + + # Prepare Python wrapper file for input/output injection/extraction + if self.mode == "python" and (self.inputs or self.outputs): + self._executed_pyscript = self._wrapper_python_file + self._prepare_python_wrapper() + + # Prepare requirement file + if self.mode == "python" and not self.requirements_file: + self._prepare_requirements_file() + elif self.mode != "python" and self.requirements_file: + raise ValueError( + """You can use a requirement file only when the main file is + a Python file. + Avoid specifying the '--requirement_file' argument.""" + ) + + # Prepare shell file + if self.mode == "python" and not self.shell_file: + self._prepare_shell_file() + + # Shell cases + elif self.mode == "shell": + if self.shell_file: + raise ValueError( + """You cannot use a shell file and specify a shell file as the main file. + Avoid specifying the '--shell_file' argument.""" + ) + else: + self.shell_file = self.main_file - logger.debug(f"Input parameters:\n{input_params}") - input_params = project_api.create_parameter_definitions(input_params) + self._add_files() - output_params = [] - for each_output_parm in outputs: - # output - name = each_output_parm - outparm = StringParameterDefinition(name=name, display_text=name) + # TODO: To check this + if self.mode != "python": + self.shell_file = self.main_file - output_params.append(outparm) + # Log in + self._connect_client() - logger.debug(f"Output parameters:\n{output_params}") - output_params = project_api.create_parameter_definitions(output_params) + # Initialize project + self._proj = self._create_project() + self._project_api = self._get_project_api() - for each_output_parm, outparm in zip(outputs, output_params): - name = each_output_parm - # mapping - parm_map = ParameterMapping( - key_string=name, - tokenizer="=", - parameter_definition_id=outparm.id, - file_id=output_file_id, + # Set files + file_input_ids, file_output_ids = self._add_files_to_project() + + if self.inputs: + self._input_file_id = file_input_ids[os.path.basename(self._input_file)] + else: + self._input_file_id = None + + if self.outputs: + self._output_parms_file_id = file_output_ids[ + os.path.basename(self._output_parms_file) + ] + else: + self._output_parms_file_id = None + + # Set parameters + self._input_params, self._output_params, self._param_mappings = ( + self._create_parameters( + self._project_api, + inputs=self.inputs, + input_file_id=self._input_file_id, + outputs=self.outputs, + output_file_id=self._output_parms_file_id, + ) ) - logger.debug(f"Output parameter: {name}\n{outparm}\nMapping: {parm_map}") - param_mappings.append(parm_map) + self._create_task(file_input_ids, file_output_ids) + + # Set jobs + self._create_job_definition() + self._create_jobs() + logger.debug(f"Jobs: {self._jobs}") + logger.debug("Project submitted.") + + return None + + def _create_jobs(self): + if not self.jobs: + self.jobs = [ + Job( + name="Job", + values={}, + eval_status="pending", + job_definition_id=self.job_definitions[0].id, + ) + ] + + logger.debug(f"jobs: {self.jobs}") + self.jobs = self._project_api.create_jobs(self.jobs) + + def _create_job_definition(self): + if not self.job_definitions: + self.job_definitions = [JobDefinition(name="JobDefinition.1", active=True)] + params = self._input_params + self._output_params + + self.job_definitions[0].task_definition_ids = [self.task_definitions[0].id] + self.job_definitions[0].parameter_definition_ids = [pd.id for pd in params] + self.job_definitions[0].parameter_mapping_ids = [ + pm.id for pm in self._param_mappings + ] + + self.job_definitions = self._project_api.create_job_definitions( + self.job_definitions + ) - logger.debug(f"Mapping parameters:\n{param_mappings}") - param_mappings = project_api.create_parameter_mappings(param_mappings) + # Refresh the parameters + params = self._project_api.get_parameter_definitions( + id=self.job_definitions[0].parameter_definition_ids + ) - return input_params, output_params, param_mappings + logger.debug(f"Job definition: {self.job_definitions}") + def _create_task(self, file_input_ids, file_output_ids): -def create_task( - project_api, - mode, - main_file, - file_input_ids, - file_output_ids, - num_cores, - memory, - disk_space, - exclusive, - max_execution_time, -): + if self.mode == "apdl": + task_class = APDLTASK + executable = self.main_file + elif self.mode == "python": + task_class = PYTHONTASK + executable = self.shell_file + else: + task_class = SHELLTASK + executable = self.shell_file - if mode == "apdl": - software = Software(name="Ansys Mechanical APDL", version="2024 R2") - name = "MAPDL Task" - else: - software = Software(name="Bash", version="0.1") # Overwriting - name = "PyMAPDL Task" - - execution_command = f"%executable% %file:{os.path.basename(main_file)}%" - logger.debug(f"Using executable: '{execution_command}'") - - # Process step - task_def = TaskDefinition( - name=name, - software_requirements=[software], - execution_command=execution_command, - resource_requirements=ResourceRequirements( - num_cores=int(num_cores), - memory=int(memory) * 1024 * 1024, - disk_space=int(disk_space) * 1024 * 1024, - # distributed=True, - hpc_resources=HpcResources(exclusive=exclusive), - ), - max_execution_time=max_execution_time, - execution_level=0, - num_trials=1, - input_file_ids=list(file_input_ids.values()), - output_file_ids=list(file_output_ids.values()), - ) - logger.debug(f"Task definition: {task_def}") + task_class_ = task_class.copy() - return project_api.create_task_definitions([task_def])[0] + execution_command = task_class_.pop("execution_command").format( + executable=os.path.basename(executable) + ) + print(f"Using executable: '{execution_command}'") + logger.debug(f"Using executable: '{execution_command}'") + + # Process step + if not self.task_definitions: + self.task_definitions = [ + TaskDefinition( + execution_command=execution_command, + resource_requirements=ResourceRequirements( + num_cores=self.num_cores, + memory=self.memory * 1024 * 1024, + disk_space=self.disk_space * 1024 * 1024, + # distributed=True, + hpc_resources=HpcResources( + exclusive=self.exclusive, + # queue="qlarge" + ), + ), + max_execution_time=self.max_execution_time, + execution_level=0, + num_trials=1, + input_file_ids=list(file_input_ids.values()), + output_file_ids=list(file_output_ids.values()), + **task_class_, + ) + ] + + logger.debug(f"Task definition: {self.task_definitions }") + self.task_definitions = self._project_api.create_task_definitions( + self.task_definitions + ) -def create_job_definition( - project_api, - task_def, - input_params, - output_params, - param_mappings, -): - job_def = JobDefinition(name="JobDefinition.1", active=True) - params = input_params + output_params + def _create_parameters( + self, + project_api, + inputs: Optional[list[str]] = None, + input_file_id: str = None, + outputs: Optional[list[str]] = None, + output_file_id: str = None, + ): + if inputs is None: + inputs = [] + logger.debug("Setting empty input parameters.") + + def is_float(num): + try: + float(num) + return True + except ValueError: + return False + + input_params = [] + param_mappings = [] + + for each_input_parm in inputs: + name, value = each_input_parm.split("=") + if is_float(value): + parm = FloatParameterDefinition( + name=name, + display_text=name, + default=value, + ) + else: + parm = StringParameterDefinition( + name=name, + display_text=name, + default=value, + ) + + # Mapping + param_map = ParameterMapping( + key_string=name, + tokenizer="=", + parameter_definition_id=parm.id, + file_id=input_file_id, + ) - job_def.task_definition_ids = [task_def.id] - job_def.parameter_definition_ids = [pd.id for pd in params] - job_def.parameter_mapping_ids = [pm.id for pm in param_mappings] + logger.debug(f"Output parameter: {name}\n{parm}\nMapping: {param_map}") + input_params.append(parm) + param_mappings.append(param_map) + + logger.debug(f"Input parameters:\n{input_params}") + input_params = project_api.create_parameter_definitions(input_params) + + output_params = [] + for each_output_parm in outputs: + # output + name = each_output_parm + # outparm = StringParameterDefinition(name=name, display_text=name) + outparm = FloatParameterDefinition(name=name, display_text=name) + + output_params.append(outparm) + + logger.debug(f"Output parameters:\n{output_params}") + output_params = project_api.create_parameter_definitions(output_params) + + for each_output_parm, outparm in zip(outputs, output_params): + name = each_output_parm + # mapping + parm_map = ParameterMapping( + key_string=name, + tokenizer="=", + parameter_definition_id=outparm.id, + file_id=output_file_id, + # string_quote="'", + ) + logger.debug(f"Output parameter: {name}\n{outparm}\nMapping: {parm_map}") - logger.debug(f"Job definition: {job_def}") - job_def = project_api.create_job_definitions([job_def])[0] + param_mappings.append(parm_map) - # Refresh the parameters - params = project_api.get_parameter_definitions(id=job_def.parameter_definition_ids) - return job_def + logger.debug(f"Mapping parameters:\n{param_mappings}") + param_mappings = project_api.create_parameter_mappings(param_mappings) + return input_params, output_params, param_mappings -def create_jobs(project_api, job_def): - jobs = [ - Job(name="Job", values={}, eval_status="pending", job_definition_id=job_def.id) - ] - logger.debug(f"jobs: {jobs}") - return project_api.create_jobs(jobs) + def _add_files_to_project(self): + # Checks: + if not all([os.path.exists(each) for each in self.input_files]): + raise ValueError("One or more input files do not exist.") + input_files_ = [os.path.basename(each) for each in self.input_files] -def get_project_api(client, proj): - return ProjectApi(client, proj.id) + workdir = os.path.dirname(os.path.realpath(self._main_file)) + input_files = [] + for each_file in self.input_files: + if workdir in each_file: + # File in the same location as the main file or in a + # subdirectory + file_path = os.path.relpath(each_file, workdir) + else: + file_path = os.path.basename(each_file) -def wait_for_completion(project_api, evaluated=True, failed=False, running=False): - eval_status = [] + file = File( + name=os.path.basename(each_file), + evaluation_path=file_path, + type="text/plain", + src=each_file, + ) - if evaluated: - eval_status.append("evaluated") + input_files.append(file) - if failed: - eval_status.append("evaluated") + output_files = [ + File( + name=os.path.basename(each_file), + evaluation_path=os.path.basename(each_file), + type="text/plain", + collect=True, + monitor=True, + ) + for each_file in self.output_files + ] - if running: - eval_status.append("running") + files = input_files + output_files - logger.debug(f"Waiting on project {proj.id} with criteria: {eval_status}") - while not project_api.get_jobs(eval_status=eval_status): - time.sleep(2) + for each in files: + logger.debug(f"Added file:\n{each}") + files = self._project_api.create_files(files) -def _create_tmp_file(file_name, content): - import tempfile - import uuid + # Getting IDs + f_inp = {} + f_out = {} + for f in files: + if f.name in input_files_: + f_inp[f.name] = f.id + else: + f_out[f.name] = f.id - dir_ = tempfile.gettempdir() - sub_dir = str(uuid.uuid4()) + logger.debug(f"Input file IDs: {f_inp}") + logger.debug(f"Output file IDs: {f_out}") + return f_inp, f_out - tmp_file = os.path.join(dir_, sub_dir, file_name) - os.makedirs(os.path.join(dir_, sub_dir)) + def _get_project_api(self): + return ProjectApi(self._client, self._proj.id) - with open(tmp_file, "w") as fid: - fid.write(content) + def _create_project(self) -> Project: + jms_api = JmsApi(self._client) + proj = Project(name=self.name, priority=1, active=True) + return jms_api.create_project(proj) - return tmp_file + def _add_files(self): + # Reset + self.input_files = [] + self.input_files.append(self.main_file) -def create_pymapdl_pyhps_job( - main_file: str, - name: str = None, - url: str = None, - user: str = None, - password: str = None, - python: float = None, - inputs: str = None, - outputs: str = None, - output_files: Optional[Union[str, list]] = None, - shell_file: str = None, - requirements_file: str = None, - extra_files: Optional[Union[str, list]] = None, - config_file: str = None, - num_cores: int = None, - memory: int = None, - disk_space: int = None, - exclusive: bool = None, - max_execution_time: int = None, - mode: Optional[Union["python", "shell", "apdl"]] = None, -): - """ - Workflow - + APDL mode: main_file - + Others: shell_file - + Wrapper - + main_file + if self.inputs: + self.input_files.append(self._input_file) - """ - if python not in [2, 2.7, 3, 3.5, 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12]: - warn(f"Version of Python {python} might not be supported by the cluster.") + if self.outputs: + self.output_files.append(self._output_parms_file) - if not os.path.exists(main_file): - raise ValueError(f"The file {main_file} must exist.") + if self.mode == "python": + self.input_files.append(self.requirements_file) + self.input_files.append(self.shell_file) - logger.debug(f"Main file is in: {main_file}.") + if self.mode == "python" and (self.inputs or self.outputs): + self.input_files.append(self._wrapper_python_file) - _, file_extension = os.path.splitext(main_file) + if self.mode == "apdl": + self.output_files.append("apdl_output.out") - if mode is None: - if file_extension.lower() in [".sh"]: - mode = "shell" - elif file_extension.lower() in [".py"]: - mode = "python" - elif file_extension.lower() in [".inp", ".mac"]: - mode = "apdl" - else: - if mode.lower() not in ["python", "shell", "apdl"]: - raise Exception("File type is not supported.") + if self.extra_files: + self.input_files.extend(self.extra_files) - logger.debug( - f"Submission mode set to '{mode}' because of main file ({main_file}) extension." - ) + def _prepare_shell_file(self): + content = f""" +echo "Starting" - if inputs is None: - inputs = [] - if outputs is None: - outputs = [] +# Start venv +python{self.python} -m venv .venv +source .venv/bin/activate - input_file = None - if inputs: - if mode == "apdl": - raise ValueError("Inputs are not supported when using APDL files.") +# Install requirements +pip install -r {os.path.basename(self.requirements_file)} - if isinstance(inputs, str): - inputs = inputs.split(",") +# Run script +python {self._executed_pyscript} + """ - if inputs: - input_file = "input.inputs" + self.shell_file = self._create_tmp_file("main.sh", content) + logger.debug(f"Shell file in: {self.shell_file}") - content = "\n".join(inputs) - input_file = _create_tmp_file(input_file, content) - logger.debug(f"Input file in: {input_file}") + def _prepare_requirements_file(self): + import pkg_resources - output_parms_file = None - if outputs: - if mode == "apdl": - raise ValueError("Outputs are not supported when using APDL files.") - output_parms_file = "output.output" - if isinstance(outputs, str): - outputs = outputs.split(",") + content = "\n".join( + [str(p.as_requirement()) for p in pkg_resources.working_set] + ) + self.requirements_file = self._create_tmp_file("requirements.txt", content) + logger.debug(f"Requirements file is in: {self.requirements_file}") - executed_pyscript = os.path.basename(main_file) + def _prepare_python_wrapper(self): - if inputs or outputs: - wrapper_file = "main_execution.py" - executed_pyscript = wrapper_file content = "" - if inputs: + if self.inputs: content += f""" # Read inputs -exec(open("{os.path.basename(input_file)}").read()) +exec(open("{os.path.basename(self._input_file)}").read()) """ content += f""" # Execute main file -exec(open("{os.path.basename(main_file)}").read()) +exec(open("{os.path.basename(self.main_file)}").read()) """ - if outputs: + if self.outputs: content += f""" -# Write output data -with open("{output_parms_file}", "w") as fid: +# Writing output data +with open("{self._output_parms_file}", "w") as fid: """ b0 = "{" b1 = "}" - for each in outputs: + for each in self.outputs: content += f""" fid.write(f"{each}={b0}{each}{b1}")\n""" - wrapper_file = _create_tmp_file(wrapper_file, content) - logger.debug(f"Wrapper file in: {wrapper_file}") - - if not requirements_file and mode == "python": - import pkg_resources - - content = "\n".join( - [str(p.as_requirement()) for p in pkg_resources.working_set] + self._wrapper_python_file = self._create_tmp_file( + self._wrapper_python_file, content ) - requirements_file = _create_tmp_file("requirements.txt", content) - logger.debug(f"Requirements file in: {requirements_file}") + logger.debug(f"Wrapper file in: {self._wrapper_python_file}") - if not shell_file and mode == "python": - content = f""" -echo "Starting" + def _prepare_input_file(self): + if self.inputs: + content = "\n".join(self.inputs) + self._input_file = self._create_tmp_file(self._input_file, content) + logger.debug(f"Input file in: {self._input_file}") -# Start venv -python{python} -m venv .venv -source .venv/bin/activate + def _create_tmp_file(self, file_name, content): + import tempfile + import uuid -# Install requirements -pip install -r {os.path.basename(requirements_file)} + dir_ = tempfile.gettempdir() + sub_dir = str(uuid.uuid4()) -# Run script -python {executed_pyscript} - """ + tmp_file = os.path.join(dir_, sub_dir, file_name) + os.makedirs(os.path.join(dir_, sub_dir)) - shell_file = _create_tmp_file("main.sh", content) - logger.debug(f"Shell file in: {shell_file}") + with open(tmp_file, "w") as fid: + fid.write(content) - elif shell_file and mode == "shell": - raise ValueError( - "You cannot use a shell file and specify a shell file as a main file. Avoid specifying the '--shell_file' argument." - ) + return tmp_file - elif not shell_file and mode == "shell": - shell_file = main_file + def wait_for_completion(self, evaluated=True, failed=False, running=False): + eval_status = [] - if isinstance(extra_files, str): - extra_files = extra_files.split(",") - elif extra_files is None: - extra_files = [] + if evaluated: + eval_status.append("evaluated") - if not output_files: - output_files = [] - elif isinstance(output_files, str): - output_files = output_files.split(",") + if failed: + eval_status.append("failed") - if extra_files and not all([os.path.exists(each) for each in extra_files]): - raise ValueError("One or more extra files do not exist.") + if running: + eval_status.append("running") - input_files = extra_files - if mode == "python": - input_files.append(requirements_file) - input_files.append(shell_file) - else: - # we are going to refer to this from now on - shell_file = main_file + logger.debug( + f"Waiting on project {self.project.id} with criteria: {eval_status}" + ) + while not self.project_api.get_jobs(eval_status=eval_status): + time.sleep(2) - input_files.append(main_file) + def _load_results(self): + self._connect_client() - if inputs: - input_files.append(input_file) + jobs = self.project_api.get_jobs(eval_status=["evaluated"]) - if outputs: - output_files.append(output_parms_file) + if not jobs: + return None - if inputs or outputs: - input_files.append(wrapper_file) + self._output_values = [] + for each_job in jobs: + self._output_values.append(each_job.values) - # Log in - client = Client(url=url, username=user, password=password, verify=False) + def _connect_client(self): + self._client: Client = Client( + url=self.url, username=self.user, password=self.password, verify=False + ) - # Setting project - proj = create_project(client, name) - project_api = get_project_api(client, proj) + def close_client(self): + self._client.session.close() - # Set files - file_input_ids, file_output_ids = add_files(project_api, input_files, output_files) - if inputs: - input_file_id = file_input_ids[os.path.basename(input_file)] - else: - input_file_id = None +class PyMAPDLJobSubmission(JobSubmission): + pass - if outputs: - output_parms_file_id = file_output_ids[os.path.basename(output_parms_file)] - else: - output_parms_file_id = None - # Set parameters - input_params, output_params, param_mappings = create_parameters( - project_api, - inputs=inputs, - input_file_id=input_file_id, - outputs=outputs, - output_file_id=output_parms_file_id, - ) +if __name__ == "__main__": - # Set tasks - task_def = create_task( - project_api, - mode, - shell_file, - file_input_ids, - file_output_ids, - num_cores, - memory, - disk_space, - exclusive, - max_execution_time, + logging.basicConfig( + format="[%(asctime)s | %(levelname)s] %(message)s", level=logging.DEBUG ) - # Set jobs - job_def = create_job_definition( - project_api, task_def, input_params, output_params, param_mappings + # from ansys.mapdl.core.hpc import PyMAPDLJobSubmission + # Test 1 + main_file = "/Users/german.ayuso/pymapdl/src/ansys/mapdl/core/hpc/main.py" + job = PyMAPDLJobSubmission( + url="https://10.231.106.91:3000/hps", + user="repuser", + password="repuser", + main_file=main_file, + ) + job.extra_files.append( + "/Users/german.ayuso/pymapdl/src/ansys/mapdl/core/hpc/tmp/tmp2.py" ) - jobs = create_jobs(project_api, job_def) - logger.debug(f"Jobs: {jobs}") - logger.debug("Project submitted.") - return proj, project_api + job.submit() + + # Test2 + + # main_file = "main.py" + # job1 = PyMAPDLJobSubmission( + # url="https://10.231.106.91:3000/hps", + # user="repuser", + # password="repuser", + # main_file=main_file + # ) + # job1.shell_file = "shell_script.py" + # job1.requirements_file = "requirements.txt" + # job.extra_files.append("module/tmp2.py") From dff728d078ceef0bc3f369d29e0e57f9db8fa7ea Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Tue, 18 Jun 2024 11:17:03 +0200 Subject: [PATCH 12/29] Apply suggestions from Kathy's code review Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> --- src/ansys/mapdl/core/cli/__init__.py | 2 +- src/ansys/mapdl/core/cli/hpc.py | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/ansys/mapdl/core/cli/__init__.py b/src/ansys/mapdl/core/cli/__init__.py index 7e80805ff5..b2936f4558 100644 --- a/src/ansys/mapdl/core/cli/__init__.py +++ b/src/ansys/mapdl/core/cli/__init__.py @@ -61,7 +61,7 @@ def old_pymapdl_convert_script_entry_point(): pymapdl convert input_file.inp -o output_file.out ... -For more information please visit: https://mapdl.docs.pyansys.com/version/dev/user_guide/cli.html +For more information, see `PyMAPDL command line interface `_. """ ) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index 87e6abf09b..e3d26e0026 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -58,7 +58,7 @@ help="""URL where the HPS cluster is deployed. For example: "https://myserver:3000/hps" """, ) @click.option( - "--user", default=None, type=str, help="Username to login into the HPC cluster." + "--user", default=None, type=str, help="Username for logging into the HPC cluster." ) @click.option( "--password", @@ -78,9 +78,9 @@ default=None, type=str, help=""" -Input arguments for the simulation. You can specify several arguments by -joining them with commas. Thus, strings defined in this way cannot contain -commas. Only integers, floats and strings are allowed. +Input arguments for the simulation. Because you can specify several arguments by +joining them with commas, strings defined in this way cannot contain +commas. Only integers, floats, and strings are allowed. PyMAPDL converts these inputs to integer or float values when possible. Otherwise, they remain as strings. You can change these arguments on the HPS website. For example, ``--inputs="force=123,value='mystring'"``. @@ -99,7 +99,7 @@ default=None, type=str, help="""Output files to monitor. Because you use commas to separate -the file names, the names cannot contain commas. For example, +filenames, the names cannot contain commas. For example, ``--output_files="results.out,data.xls"``.""", ) @click.option( @@ -117,7 +117,7 @@ help="""Optional created virtual environment to install with the libraries specified in this requirements file. If not, the activated virtual environment is cloned through a temporary ``pip list`` file. If you are using an editable package, -you should attach your own requirement file using ``pip freeze`` """, +you should attach your own requirement file using ``pip freeze``.""", ) @click.option( "--extra_files", @@ -208,7 +208,7 @@ type=bool, is_flag=False, flag_value=True, - help="""Whether PyMAPDL is to print debug logging to the console output.""", + help="""Whether PyMAPDL is to print debug logging to the console.""", ) def submit( main_file: str, @@ -307,7 +307,7 @@ def submit( } logger.debug( - f"Saving the following configuration to the config file ({config_file}):\n{config}" + f"Saving the following configuration to the config file ({config_file}):\n{config}." ) with open(config_file, "w") as fid: json.dump(config, fid) From 445949adc3a3cd76883eaf755b69f1ef10988830 Mon Sep 17 00:00:00 2001 From: pyansys-ci-bot Date: Tue, 18 Jun 2024 13:28:09 +0000 Subject: [PATCH 13/29] Adding changelog entry: 3091.miscellaneous.md --- doc/changelog.d/3091.miscellaneous.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/changelog.d/3091.miscellaneous.md diff --git a/doc/changelog.d/3091.miscellaneous.md b/doc/changelog.d/3091.miscellaneous.md new file mode 100644 index 0000000000..aa826d0c62 --- /dev/null +++ b/doc/changelog.d/3091.miscellaneous.md @@ -0,0 +1 @@ +Add PyHPS CLI \ No newline at end of file From 4556c49b0162a30de7c2f102f95e25b7b6473a45 Mon Sep 17 00:00:00 2001 From: pyansys-ci-bot Date: Tue, 18 Jun 2024 14:27:37 +0000 Subject: [PATCH 14/29] Adding changelog entry: 3091.miscellaneous.md --- doc/changelog.d/3091.miscellaneous.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/changelog.d/3091.miscellaneous.md b/doc/changelog.d/3091.miscellaneous.md index aa826d0c62..caa7bd8b77 100644 --- a/doc/changelog.d/3091.miscellaneous.md +++ b/doc/changelog.d/3091.miscellaneous.md @@ -1 +1 @@ -Add PyHPS CLI \ No newline at end of file +feat: add PyHPS CLI \ No newline at end of file From ac875f1bd0f3b3bf642390b6af6ebdeff312049c Mon Sep 17 00:00:00 2001 From: German Date: Tue, 18 Jun 2024 17:19:00 +0200 Subject: [PATCH 15/29] Renaming argument ``to_json``. Making ``wait`` ``true`` if `output_to_json` --- src/ansys/mapdl/core/cli/hpc.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index e3d26e0026..ac9b589d8b 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -195,7 +195,7 @@ """, ) @click.option( - "--output_to_json", + "--to_json", default=None, type=str, is_flag=False, @@ -233,10 +233,15 @@ def submit( wait: bool = False, debug: bool = False, mode: Optional[Union["python", "shell", "apdl"]] = None, - output_to_json: Optional[bool] = False, + to_json: Optional[bool] = False, ): import json + if to_json: + import json + + wait = True + from ansys.mapdl.core.hpc.pyhps import ( PyMAPDLJobSubmission, get_value_from_json_or_default, @@ -313,19 +318,19 @@ def submit( json.dump(config, fid) proj = job.project - if not output_to_json: + if not to_json: print( f"You can check your project by visiting: {url}/projects#/projects/{proj.id}/jobs" ) if wait: - if not output_to_json: + if not to_json: print( f"Waiting for project {name} (id: {proj.id}) evaluation to complete..." ) job.wait_for_completion(evaluated=True, failed=True) - if output_to_json and wait: + if to_json: if len(job.outputs) == 1: print(job.output_values[0][job.outputs[0]]) else: From 1911729f770aa339a08a6f4864e0a8d3f47e951f Mon Sep 17 00:00:00 2001 From: German Date: Tue, 18 Jun 2024 17:20:42 +0200 Subject: [PATCH 16/29] rewriting docstring --- src/ansys/mapdl/core/cli/hpc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index ac9b589d8b..0d79ed4ddb 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -200,7 +200,7 @@ type=str, is_flag=False, flag_value=True, - help="""Print the output values to the terminal as json. It requires to use ``--wait`` value too. """, + help="""Print the output values to the terminal as json. It automatically set ``--wait`` to ``True``.""", ) @click.option( "--debug", From 0279ffc66fe07bc63dd5de5b08e20ad4ca80c36e Mon Sep 17 00:00:00 2001 From: pyansys-ci-bot Date: Mon, 24 Jun 2024 15:47:58 +0000 Subject: [PATCH 17/29] Adding changelog entry: 3091.added.md --- doc/changelog.d/{3091.miscellaneous.md => 3091.added.md} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename doc/changelog.d/{3091.miscellaneous.md => 3091.added.md} (100%) diff --git a/doc/changelog.d/3091.miscellaneous.md b/doc/changelog.d/3091.added.md similarity index 100% rename from doc/changelog.d/3091.miscellaneous.md rename to doc/changelog.d/3091.added.md From 4145688d6becd289b415e86f6bbf62fcf5334218 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Wed, 26 Jun 2024 18:23:36 +0200 Subject: [PATCH 18/29] feat: Detaching logging from main logic (#3205) * First approach to login * Adding HPS dependencies * Adding changelog entry: 3205.miscellaneous.md * feat: Coupling login code to current implementation. Allowing login using token which is now the preferred method. * coupling cli * fix: command name in CLI * Adding changelog entry: 3205.added.md * fix: wrong argument that avoid having the input file as argument * chore: checking config file in submit function. * feat: avoid venv creation of ``requirements_file`` is False. * feat: login CLI finished * feat: making sure we don't get import errors when PyHPS is not installed. * feat: Adding docstrings * chore: renaming 'access' to 'get_token_access'. * fix: failing piping on the CLI. --------- Co-authored-by: pyansys-ci-bot --- doc/changelog.d/3205.added.md | 1 + pyproject.toml | 5 + src/ansys/mapdl/core/cli/__init__.py | 24 +- src/ansys/mapdl/core/cli/convert.py | 3 + src/ansys/mapdl/core/cli/hpc.py | 54 +++- src/ansys/mapdl/core/cli/list_instances.py | 1 + src/ansys/mapdl/core/cli/login.py | 284 +++++++++++++++++ src/ansys/mapdl/core/hpc/login.py | 348 +++++++++++++++++++++ src/ansys/mapdl/core/hpc/pyhps.py | 88 ++++-- 9 files changed, 765 insertions(+), 43 deletions(-) create mode 100644 doc/changelog.d/3205.added.md create mode 100644 src/ansys/mapdl/core/cli/login.py create mode 100644 src/ansys/mapdl/core/hpc/login.py diff --git a/doc/changelog.d/3205.added.md b/doc/changelog.d/3205.added.md new file mode 100644 index 0000000000..44cf5dc554 --- /dev/null +++ b/doc/changelog.d/3205.added.md @@ -0,0 +1 @@ +feat: Detaching logging from main logic \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index f5cff755ce..2226a16a50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -104,6 +104,11 @@ doc = [ "vtk==9.3.0", ] +hps =[ + "ansys-hps-client==0.8.0", + "keyring==25.2.1", +] + [tool.flit.module] name = "ansys.mapdl.core" diff --git a/src/ansys/mapdl/core/cli/__init__.py b/src/ansys/mapdl/core/cli/__init__.py index b2936f4558..f0e70da863 100644 --- a/src/ansys/mapdl/core/cli/__init__.py +++ b/src/ansys/mapdl/core/cli/__init__.py @@ -29,6 +29,13 @@ _HAS_CLICK = False +try: + from ansys.hps.client import Client + + _HAS_HPS = True +except ModuleNotFoundError: + _HAS_HPS = False + if _HAS_CLICK: ################################### # PyMAPDL CLI @@ -39,7 +46,6 @@ def main(ctx): pass from ansys.mapdl.core.cli.convert import convert - from ansys.mapdl.core.cli.hpc import submit from ansys.mapdl.core.cli.list_instances import list_instances from ansys.mapdl.core.cli.start import start from ansys.mapdl.core.cli.stop import stop @@ -50,10 +56,18 @@ def main(ctx): main.add_command(list_instances, name="list") # HPC commands - # pymapdl hpc submit - # pymapdl hpc list - # pymapdl hpc stop - main.add_command(submit) + # pymapdl (hpc) login + # pymapdl (hpc) submit + # pymapdl (hpc) list #To be implemented + # pymapdl (hpc) stop #To be implemented + + if _HAS_HPS: + from ansys.mapdl.core.cli.hpc import submit + from ansys.mapdl.core.cli.login import login, logout + + main.add_command(login) + main.add_command(submit) + main.add_command(logout) def old_pymapdl_convert_script_entry_point(): print( diff --git a/src/ansys/mapdl/core/cli/convert.py b/src/ansys/mapdl/core/cli/convert.py index 61224e49e2..4e6c8c09bf 100644 --- a/src/ansys/mapdl/core/cli/convert.py +++ b/src/ansys/mapdl/core/cli/convert.py @@ -212,6 +212,9 @@ def convert( ) else: + if not filename_in: + raise ValueError("A file path must be provided.") + convert_script( filename_in, filename_out, diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/hpc.py index 0d79ed4ddb..a3b765dc58 100644 --- a/src/ansys/mapdl/core/cli/hpc.py +++ b/src/ansys/mapdl/core/cli/hpc.py @@ -28,12 +28,10 @@ import click -from ansys.mapdl.core.cli import main - logger = logging.getLogger() -@main.command( +@click.command( short_help="Submit jobs to an HPC cluster using PyHPS.", help=""" Submit jobs to an HPC cluster using PyHPS. @@ -55,16 +53,35 @@ "--url", default=None, type=str, - help="""URL where the HPS cluster is deployed. For example: "https://myserver:3000/hps" """, + help="""URL where the HPS cluster is deployed. For example: "https://myserver:3000/hps". +If it is not input, there is a chain of places where PyMAPDL looks for an URL. +First, it checks if the URL is given in the file specified by the argument ``--config_file``. +If that file does not have an URL or does not exist, then it checks the default user credentials stored with ``pymapdl login --default`` CLI command. +If no URL is found, an exception is raised.""", ) @click.option( - "--user", default=None, type=str, help="Username for logging into the HPC cluster." + "--user", + default=None, + type=str, + help="""Username for logging into the HPC cluster. +If it is not input, there is a chain of places where PyMAPDL looks for an username. +First, it checks if the username is given in the file specified by the argument ``--config_file``. +If that file does not have an username or does not exist, then it checks the username configured using ``pymapdl login`` CLI command, for the given HPS cluster URL. +If there is no user credential stored for that HPS cluster URL, then it checks the default user credentials stored with ``pymapdl login --default`` CLI command. +If no user is found, an exception is raised. +""", ) @click.option( "--password", default=None, type=str, - help="Password for logging into the HPC cluster.", + help="""Password for logging into the HPC cluster. +If it is not input, there is a chain of places where PyMAPDL looks for a password. +First, it checks if the password is given in the file specified by the argument ``--config_file``. +If that file does not have a password or does not exist, then it checks the password configured using ``pymapdl login`` CLI command, for the given HPS cluster URL. +If there is no user credential stored for that HPS cluster URL, then it checks the default user credentials stored with ``pymapdl login --default`` CLI command. +If no password is found, an exception is raised. +""", ) @click.option( "--python", @@ -235,7 +252,7 @@ def submit( mode: Optional[Union["python", "shell", "apdl"]] = None, to_json: Optional[bool] = False, ): - import json + from ansys.mapdl.core.hpc.login import get_default_url, get_token_access if to_json: import json @@ -254,11 +271,25 @@ def submit( if config_file is None: config_file = os.path.join(os.getcwd(), "hps_config.json") + if not os.path.exists(config_file): + config_file = None logger.debug(f"Using default HPS configuration file: {config_file}") - url = get_value_from_json_or_default(url, config_file, "url", None) - user = get_value_from_json_or_default(user, config_file, "user", None) - password = get_value_from_json_or_default(password, config_file, "password", None) + # Getting cluster login configuration from CLI or file + url = get_value_from_json_or_default( + url, config_file, "url", None, raise_if_none=False + ) + url = url or get_default_url() # using default URL stored. + + # allow retrieving user from the configuration + user = get_value_from_json_or_default( + user, config_file, "user", raise_if_none=False + ) + + # Getting access token + token = get_token_access(url, user, password) + + # Getting other configuration from CLI or file python = get_value_from_json_or_default(python, config_file, "python", 3) name = get_value_from_json_or_default(name, config_file, "name", "My PyMAPDL job") @@ -276,8 +307,7 @@ def submit( job = PyMAPDLJobSubmission( url=url, - user=user, - password=password, + token=token, main_file=main_file, mode=mode, inputs=inputs, diff --git a/src/ansys/mapdl/core/cli/list_instances.py b/src/ansys/mapdl/core/cli/list_instances.py index a98c72ec89..f9ebf4ab25 100644 --- a/src/ansys/mapdl/core/cli/list_instances.py +++ b/src/ansys/mapdl/core/cli/list_instances.py @@ -28,6 +28,7 @@ @main.command( short_help="List MAPDL running instances.", help="""This command list MAPDL instances""", + name="list", ) @click.option( "--instances", diff --git a/src/ansys/mapdl/core/cli/login.py b/src/ansys/mapdl/core/cli/login.py new file mode 100644 index 0000000000..4e9e1e9fc7 --- /dev/null +++ b/src/ansys/mapdl/core/cli/login.py @@ -0,0 +1,284 @@ +# Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Login into a PyHPS cluster""" +from getpass import getpass +import logging +from typing import Optional + +import click + +logging.basicConfig( + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", +) +logger = logging.getLogger() + + +@click.command( + name="login", + short_help="Login into an HPS cluster.", + help="""Login into an HPS cluster. + +It does store credentials (cluster url, password and username) in the OS credential manager. +If you want to change any credential, just issue the command again with the new values. + +Examples +-------- + +Prompt the values for user, password and HPC cluster URL: + +$ pymapdl login +Username: myuser +Password: mypassword +HPS cluster URL: https://123.456.789.1:3000/hps +Stored credentials: + User : 'user' + Cluster URL : 'https://123.456.789.1:3000/hps' + +Use the CLI arguments to supply the values + +$ pymapdl login --user myuser --password mypassword --url "https://123.456.789.1:3000/hps" +Stored credentials: + User : 'user' + Cluster URL : 'https://123.456.789.1:3000/hps' + +Set the defaults user, password and URL. They will be used when one of them is +missing. + +$ pymapdl login --default --user myuser --password mypassword --url "https://123.456.789.1:3000/hps" +Stored default credentials. + +It is possible to input some arguments using the CLI arguments, and other using +the prompt: + +$ pymapdl login --user myuser --url "https://123.456.789.1:3000/hps" +Password: mypassword +Stored credentials: + User : 'user' + Cluster URL : 'https://123.456.789.1:3000/hps' + +""", +) +@click.option("--user", default=None, type=str, help="The username to login.") +@click.option("--password", default=None, type=str, help="The password to login.") +@click.option( + "--url", + default=None, + type=str, + help="The HPS cluster URL. For instance 'https://123.456.789.1:3000/hps'.", +) +@click.option( + "--default", + default=False, + type=bool, + is_flag=False, + flag_value=True, + help="""Set the default user, password and URL. These credentials are not tested against any HPC.""", +) +@click.option( + "--test_token", + default=False, + type=bool, + is_flag=False, + flag_value=True, + help="""Test if the token is valid. This argument is ignored if '--default' argument is ``True``.""", +) +@click.option( + "--quiet", + default=False, + type=bool, + is_flag=False, + flag_value=True, + help="""Suppress all console printout.""", +) +@click.option( + "--debug", + default=False, + type=bool, + is_flag=False, + flag_value=True, + help="""Activate debugging printout. It might show the input password!""", +) +def login( + user: Optional[str] = None, + password: Optional[str] = None, + url: Optional[str] = None, + default: bool = False, + test_token: bool = False, + quiet: bool = False, + debug: bool = False, +): + from ansys.mapdl.core.hpc.login import login_in_cluster, store_credentials + + if debug: + logger.setLevel(logging.DEBUG) + + if quiet: + import urllib3 + + urllib3.disable_warnings() + + logger.debug("Storing non-default credentials.") + if not user: + user = click.prompt("Username") + + if not user: + raise ValueError("No user was provided.") + + if not password: + password = getpass("Password: ") + if not password: + raise ValueError("No password was provided.") + + if not default and not url: + url = click.prompt("HPS cluster URL") + if not url: + raise ValueError("No password was provided.") + + token = login_in_cluster(user, password, url) + logger.debug(f"Login successful") + + if test_token: + logger.debug("Testing token") + from requests import ConnectionError + + from ansys.mapdl.core.hpc.login import token_is_valid + + if not token_is_valid(url, token): + raise ConnectionError("The retrieved token is not valid.") + else: + if not quiet: + click.echo("Token has been verified with the HPC cluster.") + + logger.info(f"Stored credentials: {user}, {password}, {url}") + store_credentials(user, password, url, default=default) + + if not quiet: + if default: + click.echo("Stored default credentials.") + else: + click.echo( + f"Stored credentials:\n User : '{user}'\n Cluster URL : '{url}'" + ) + + +@click.command( + short_help="Logout from an HPS cluster.", + help="""Logout from an HPS cluster. + +It deletes credentials stored on the system. + +Examples +-------- + +Delete the credentials associated to an specific URL + +$ pymapdl logout --url "https://123.456.789.1:3000/hps" +The HPS cluster 'https://123.456.789.1:3000/hps' credentials have been deleted. + +Delete the default credentials. + +$ pymapdl logout --default +The default credentials have been deleted. + +Notes +----- +- If the credentials do not exist, the CLI notifies and exits cleanly. + No exception is raised. If you want to raise an exception (exit 1), then pass + the argument ``--strict``. +""", +) +@click.option( + "--url", + default=None, + type=str, + help="The HPS cluster URL. For instance 'https://10.231.106.1:3000/hps'.", +) +@click.option( + "--default", + default=False, + type=bool, + is_flag=False, + flag_value=True, + help="""Deletes the default login configuration.""", +) +@click.option( + "--quiet", + default=False, + type=bool, + is_flag=False, + flag_value=True, + help="""Suppress all console printout.""", +) +@click.option( + "--debug", + default=False, + type=bool, + is_flag=False, + flag_value=True, + help="""Activate debugging printout.""", +) +@click.option( + "--strict", + default=False, + type=bool, + is_flag=False, + flag_value=True, + help="""Raise an issue if the credentials do not exist.""", +) +def logout(url, default, quiet, debug, strict): + + # TODO: keyrings library seems to not being able to list the credentials + # under a service name. We might need to keep track of those in a file or + # something. + + import keyring + + from ansys.mapdl.core.hpc.login import delete_credentials + + if debug: + logger.setLevel(logging.DEBUG) + + if not url and not default: + raise ValueError("An URL needs to be used.") + + if url and default: + raise ValueError("The argument '--default' cannot be used with an URL.") + + if default: + logger.debug("Deleting credentials for the default profile.") + url = None + + try: + delete_credentials(url) + success_message = "The {0} credentials have been deleted.".format( + "default" if default else f"HPS cluster '{url}'" + ) + except keyring.errors.PasswordDeleteError: + success_message = "The {0} credentials do not exist.".format( + "default" if default else f"HPS cluster '{url}'" + ) + if strict: + raise keyring.errors.PasswordDeleteError(success_message) + + if not quiet: + click.echo(success_message) diff --git a/src/ansys/mapdl/core/hpc/login.py b/src/ansys/mapdl/core/hpc/login.py new file mode 100644 index 0000000000..ccee2a5ab3 --- /dev/null +++ b/src/ansys/mapdl/core/hpc/login.py @@ -0,0 +1,348 @@ +# Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import logging +import time +from typing import Optional + +try: + from ansys.hps.client import AuthApi, Client + from ansys.hps.client.authenticate import authenticate + import keyring + from requests import ConnectionError +except ModuleNotFoundError: + raise ModuleNotFoundError( + """Some of the dependencies required for login into an HPS cluster are not installed. +Please install them using "pip install 'ansys-mapdl-core[hps]".""" + ) + +DEFAULT_IDENTIFIER = "defaultconfig" +SERVICE_NAME = "pymapdl-pyhps" +EXPIRATION_TIME = 4 * 24 * 60 # 2 days in minutes + +logger = logging.getLogger() + + +def get_password(*args, **kwargs): + logger.debug(f"Getting password from service '{args[0]}' and key '{args[1]}'") + return keyring.get_password(*args, **kwargs) + + +def set_password(*args, **kwargs): + logger.debug(f"Setting password from service '{args[0]}' and key '{args[1]}'") + return keyring.set_password(*args, **kwargs) + + +def login_in_cluster(user, password, url): + """ + Authenticate with the server and return the access token. + + Parameters + ---------- + user : str + Username. + password : str + Password. + url : str + URL. + + Returns + ------- + str + Access token. + """ + logger.debug(f"Authenticating on cluster '{url}' using user '{user}'.") + access_token = authenticate( + url=url, username=user, password=password, scope="openid", verify=False + )["access_token"] + return access_token + + +def store_credentials( + user: str = None, + password: str = None, + url: str = None, + default=False, + expiration_time: float = EXPIRATION_TIME, +): + """ + Store user credentials and the current timestamp in the keyring. + + If ``default`` argument is ``True``, you can store a default password, + default user, or/and default URL. + + Parameters + ---------- + user : str, optional + Username. + password : str, optional + Password + url : str, optional + URL of the HPS cluster + + """ + if default: + identifier = DEFAULT_IDENTIFIER + else: + identifier = url + logger.debug(f"Using identifier: '{identifier}'") + + if not default and (not url or not user or not password): + raise ValueError( + "To store non-default credentials, an URL, an user and a password are needed." + ) + + if url: + set_password(SERVICE_NAME, f"{identifier}_url", url) + if user: + set_password(SERVICE_NAME, f"{identifier}_user", user) + if password: + set_password(SERVICE_NAME, f"{identifier}_password", password) + if expiration_time: + set_password( + SERVICE_NAME, f"{identifier}_expiration_time", str(expiration_time) + ) + + set_password(SERVICE_NAME, f"{identifier}_timestamp", str(time.time())) + + +def get_stored_credentials(identifier: str): + """ + Retrieve stored credentials, timestamp and expiration time from the keyring. + + Parameters + ---------- + identifier: str + Identifier for the credentials + + Returns + ------- + tuple + (user, password, timestamp) or (None, None, None) if not found + """ + logger.debug(f"Retrieving info for '{identifier}'") + url = get_password(SERVICE_NAME, f"{identifier}_url") + user = get_password(SERVICE_NAME, f"{identifier}_user") + password = get_password(SERVICE_NAME, f"{identifier}_password") + timestamp = get_password(SERVICE_NAME, f"{identifier}_timestamp") + expiration_time = get_password(SERVICE_NAME, f"{identifier}_expiration_time") + + if timestamp: + timestamp = float(timestamp) + if expiration_time: + expiration_time = float(expiration_time) + + logger.debug( + f"Retrieved info for '{identifier}': {url}, {user}, {password}, {timestamp}, {expiration_time} " + ) + return url, user, password, timestamp, expiration_time + + +def credentials_expired(timestamp: float, expiration_time: float = EXPIRATION_TIME): + """ + Check if the stored credentials have expired. + + Parameters + ---------- + timestamp, float + Timestamp of when the credentials were stored + + expiration_time float + Amount of time before the credentials expires. + + Returns + ------- + bool + True if credentials have expired, False otherwise + """ + return time.time() - timestamp > expiration_time * 60 + + +def delete_credentials(identifier: Optional[str] = None): + """ + Delete stored credentials. + + Parameters + ---------- + identifier: str, Optional + Identifier for the credentials. If it is ``None``, the + default credentials are deleted. + + Returns + ------- + None + """ + if not identifier: + identifier = DEFAULT_IDENTIFIER + + logger.debug(f"Deleting credentials for identifier: {identifier}") + + keyring.delete_password(SERVICE_NAME, f"{identifier}_url") + keyring.delete_password(SERVICE_NAME, f"{identifier}_user") + keyring.delete_password(SERVICE_NAME, f"{identifier}_password") + keyring.delete_password(SERVICE_NAME, f"{identifier}_timestamp") + keyring.delete_password(SERVICE_NAME, f"{identifier}_expiration_time") + + +def token_is_valid(url, token): + """Check if a token is valid. + + The validation is performed by requesting the number of users to the HPS cluster. + + Parameters + ---------- + url : str + HPS cluster URL. + token : str + Authentication token. + + Returns + ------- + bool + Whether the token is valid or not. + """ + client = Client(url=url, access_token=token, verify=False) + auth_api = AuthApi(client) + + try: + auth_api.get_users() + return True + except ConnectionError: + return False + except Exception as e: + raise e + + +def get_token_access(url: str = None, user: str = None, password: str = None): + """ + Access an HPS cluster by logging in with the provided or stored credentials. + + This function attempts to log in to a cluster using the provided URL, username, + and password. + If any of these parameters are not provided, it attempts to retrieve stored + credentials associated with the given URL. + If no URL is provided, then it retrieves the default credentials. + + If the credentials are expired or not found, appropriate errors are raised. + + Parameters + ---------- + url : str, optional + The URL of the cluster to log in to. If not provided, a stored URL + associated with the default or given identifier is used. + user : str, optional + The username for logging in. If not provided, a stored username associated + with the default or given identifier is used. + password : str, optional + The password for logging in. If not provided, a stored password associated + with the default or given identifier is used. + + Returns + ------- + str + It returns the authentication token for the session. + + Raises + ------ + ConnectionError + If there are no stored credentials for the given identifier, or if the stored credentials + are expired. + ValueError + If a URL, username, or password is not provided and cannot be found in the stored + credentials. + + Notes + ----- + - If credentials are expired, they are deleted from storage. + - The credentials can be stored using ``pymapdl login`` CLI. + + Examples + -------- + Using url, user and password: + + >>> get_token_access(url='https://cluster.example.com', user='admin', password='securepass') + 'eyJhbGciOiJSUzI1NiIsI...' + + Using the stored credential for that URL. If those credentials do not exists, + the default credentials are used. + + >>> get_token_access(url='https://cluster.example.com') + 'bGciOiJSeyJhUzI1NiIsI...' + + Login using the default stored credentials: + >>> get_token_access() + 'iJSeyJhUzI1bGciONiIsI...' + """ + if not url or not user or not password: + if not url: + identifier = DEFAULT_IDENTIFIER + else: + identifier = url + + ( + url_default, + user_default, + password_default, + timestamp_default, + expiration_default, + ) = get_stored_credentials(identifier=identifier) + + if not url_default or not user_default or not password_default: + raise ConnectionError( + f"There are no credentials stored for '{identifier}'." + ) + + if credentials_expired(timestamp_default, expiration_time=expiration_default): + delete_credentials(identifier) + + raise ConnectionError(f"The stored '{identifier}' credentials are expired.") + + if not url: + if url_default: + url = url_default + else: + raise ValueError( + f"No 'URL' is given nor stored for '{identifier}'. You must input one." + ) + + if not user: + if user_default: + user = user_default + else: + raise ValueError( + f"No 'user' is given nor stored for '{identifier}'. You must input one." + ) + + if not password: + if password_default: + password = password_default + else: + raise ValueError( + f"No 'password' is given nor stored for '{identifier}'. You must input one." + ) + + return login_in_cluster(user=user, password=password, url=url) + + +def get_default_url(): + """Return the default credentials URL""" + return get_password(SERVICE_NAME, f"{DEFAULT_IDENTIFIER}_url") diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index 6cf0de8787..457a83950d 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -28,34 +28,45 @@ from typing import Any, Optional, Union from warnings import warn -from ansys.hps.client import Client -from ansys.hps.client.jms import ( - File, - FloatParameterDefinition, - HpcResources, - JmsApi, - Job, - JobDefinition, - ParameterMapping, - Project, - ProjectApi, - ResourceRequirements, - Software, - StringParameterDefinition, - TaskDefinition, -) +try: + from ansys.hps.client import Client + from ansys.hps.client.jms import ( + File, + FloatParameterDefinition, + HpcResources, + JmsApi, + Job, + JobDefinition, + ParameterMapping, + Project, + ProjectApi, + ResourceRequirements, + Software, + StringParameterDefinition, + TaskDefinition, + ) + +except ModuleNotFoundError: + raise ModuleNotFoundError( + """Some of the dependencies required for submit jobs into an HPS cluster are not installed. +Please install them using "pip install 'ansys-mapdl-core[hps]".""" + ) logger = logging.getLogger() def get_value_from_json_or_default( - arg: str, json_file: str, key: str, default_value: Optional[Union[str, Any]] = None + arg: str, + json_file: Optional[str], + key: str, + default_value: Optional[Union[str, Any]] = None, + raise_if_none: Optional[bool] = True, ): if arg is not None: logger.debug(f"Using '{arg}' for {key}") return arg - if os.path.exists(json_file): + if json_file and os.path.exists(json_file): if os.path.getsize(json_file) > 0: with open(json_file, "r") as fid: config = json.load(fid) @@ -64,7 +75,7 @@ def get_value_from_json_or_default( logger.debug(f"Using '{config[key]}' for {key}") return config[key] - if default_value is None: + if default_value is None and raise_if_none: raise ValueError( f"The argument {arg} is not given through the CLI or config file." ) @@ -106,10 +117,11 @@ class JobSubmission: def __init__( self, - url, - user, - password, main_file, + url, + user: Optional[str] = None, + password: Optional[str] = None, + token: Optional[str] = None, mode: Optional[str] = None, inputs: Optional[Union[list[str]]] = None, outputs: Optional[Union[list[str]]] = None, @@ -125,9 +137,15 @@ def __init__( max_execution_time: Optional[int] = None, name: Optional[str] = None, ): + + if not token and (not user or not password): + raise ValueError("An access token or an user-password pair must be used.") + self._url = url self._user = user self._password = password + self._token = token + self._main_file = self._validate_main_file(main_file) self._mode = self._validate_mode(mode) @@ -622,7 +640,6 @@ def _create_task(self, file_input_ids, file_output_ids): executable=os.path.basename(executable) ) - print(f"Using executable: '{execution_command}'") logger.debug(f"Using executable: '{execution_command}'") # Process step @@ -631,6 +648,7 @@ def _create_task(self, file_input_ids, file_output_ids): TaskDefinition( execution_command=execution_command, resource_requirements=ResourceRequirements( + platform="linux", num_cores=self.num_cores, memory=self.memory * 1024 * 1024, disk_space=self.disk_space * 1024 * 1024, @@ -816,7 +834,8 @@ def _add_files(self): self.output_files.append(self._output_parms_file) if self.mode == "python": - self.input_files.append(self.requirements_file) + if self._requirements_file is not False: + self.input_files.append(self.requirements_file) self.input_files.append(self.shell_file) if self.mode == "python" and (self.inputs or self.outputs): @@ -831,14 +850,18 @@ def _add_files(self): def _prepare_shell_file(self): content = f""" echo "Starting" +""" + if self._requirements_file: + content += f""" # Start venv python{self.python} -m venv .venv source .venv/bin/activate # Install requirements pip install -r {os.path.basename(self.requirements_file)} - +""" + content += f""" # Run script python {self._executed_pyscript} """ @@ -847,6 +870,10 @@ def _prepare_shell_file(self): logger.debug(f"Shell file in: {self.shell_file}") def _prepare_requirements_file(self): + + if self._requirements_file is False: + return + import pkg_resources content = "\n".join( @@ -938,8 +965,17 @@ def _load_results(self): self._output_values.append(each_job.values) def _connect_client(self): + if not self._token: + logger.debug("Getting a valid token") + from ansys.mapdl.core.hpc.login import get_token_access + + self._token = get_token_access( + url=self.url, user=self.user, password=self.password + ) + + logger.debug("Using a token to authenticate the user.") self._client: Client = Client( - url=self.url, username=self.user, password=self.password, verify=False + url=self._url, access_token=self._token, verify=False ) def close_client(self): From 680793485957b24b08672185e00d52eb473e6359 Mon Sep 17 00:00:00 2001 From: German Date: Wed, 26 Jun 2024 18:25:34 +0200 Subject: [PATCH 19/29] fix: codecov suggestions --- src/ansys/mapdl/core/cli/login.py | 2 +- tests/test_cli.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ansys/mapdl/core/cli/login.py b/src/ansys/mapdl/core/cli/login.py index 4e9e1e9fc7..0ec94ea51a 100644 --- a/src/ansys/mapdl/core/cli/login.py +++ b/src/ansys/mapdl/core/cli/login.py @@ -155,7 +155,7 @@ def login( raise ValueError("No password was provided.") token = login_in_cluster(user, password, url) - logger.debug(f"Login successful") + logger.debug("Login successful") if test_token: logger.debug("Testing token") diff --git a/tests/test_cli.py b/tests/test_cli.py index 8bd0c3e8a1..97d600c171 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -206,7 +206,7 @@ def test_convert(run_cli, tmpdir): @requires("click") def test_convert_pipe(): - cmd = """echo "/prep7" | pymapdl convert """ + cmd = """echo "/prep7" | pymapdl convert""" out = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) stdout = out.stdout.read().decode() From 1b530478a2ec710844a11cf3f81a987c14232523 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Mon, 15 Jul 2024 12:38:08 +0200 Subject: [PATCH 20/29] feat: renaming PyMAPDLJobSubmissionDefinition class --- src/ansys/mapdl/core/hpc/pyhps.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index 457a83950d..0cc38f6b34 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -113,7 +113,7 @@ def get_value_from_json_or_default( } -class JobSubmission: +class JobSubmissionDefinition: def __init__( self, @@ -982,7 +982,7 @@ def close_client(self): self._client.session.close() -class PyMAPDLJobSubmission(JobSubmission): +class PyMAPDLJobSubmissionDefinition(JobSubmissionDefinition): pass @@ -992,10 +992,10 @@ class PyMAPDLJobSubmission(JobSubmission): format="[%(asctime)s | %(levelname)s] %(message)s", level=logging.DEBUG ) - # from ansys.mapdl.core.hpc import PyMAPDLJobSubmission + # from ansys.mapdl.core.hpc import PyMAPDLJobSubmissionDefinition # Test 1 main_file = "/Users/german.ayuso/pymapdl/src/ansys/mapdl/core/hpc/main.py" - job = PyMAPDLJobSubmission( + job = PyMAPDLJobSubmissionDefinition( url="https://10.231.106.91:3000/hps", user="repuser", password="repuser", @@ -1010,7 +1010,7 @@ class PyMAPDLJobSubmission(JobSubmission): # Test2 # main_file = "main.py" - # job1 = PyMAPDLJobSubmission( + # job1 = PyMAPDLJobSubmissionDefinition( # url="https://10.231.106.91:3000/hps", # user="repuser", # password="repuser", From a29589dba314d9ae59c210291704304bf1d7cc72 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Tue, 16 Jul 2024 12:18:17 +0200 Subject: [PATCH 21/29] docs: improved docstring --- src/ansys/mapdl/core/hpc/pyhps.py | 196 +++++++++++++++++++++++++++++- 1 file changed, 190 insertions(+), 6 deletions(-) diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index 0cc38f6b34..7ffab0fbc6 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -114,21 +114,205 @@ def get_value_from_json_or_default( class JobSubmissionDefinition: + """JobSubmissionDefinition Create a JobSubmission class. + + Create a JobSubmission class used to submit Python, APDL or Shell jobs + to an HPS cluster. + + Parameters + ---------- + main_file : str + Main execution file. It can be a Python, shell or APDL file. + url : str + URL of the HPS cluster. It should include the full address, for + example: https://123.4.5.6:3000/hps + user : Optional[str], optional + Username, by default ``None``. If no user and no password is used, + then, a token must be used. + password : Optional[str], optional + Password, by default ``None``. If no user and no password is used, + then, a token must be used. + token : Optional[str], optional + Authentication token, by default ``None``. If no user and no password is used, + then, a token must be used. + mode : Optional[str], optional + To force execution of main file as Python (``mode`` = ``python``), + shell (``shell``) or APDL (``apdl``). By default this value is ``None`` + and type of file is inferred from the ``main_file`` extension. + inputs : Optional[Union[str, list[str]]], optional + Input parameters as a string (for instance ``"mypar=24,mypar2='asdf'"``) + or as a list of strings. Inputs are only supported by Python jobs. + By default ``None``. + outputs : Optional[Union[str, list[str]]], optional + Output parameters as a string or list of string. By default ``None``. + requirements_file : Optional[str], optional + File path to the requirements file. Only used if ``mode`` is ``python``. + If using ``False``, then no virtual environment is generated. This is + useful when the job requires no libraries but the already included by + the Python installation. + By default is ``None``, then a requirements file is generated on-the-fly + with the packages installed on the activated virtual environment. + This might causes some issues if some of the packages are installed + in editable mode, because those changes are not ported to the HPS server. + shell_file : Optional[str], optional + If specified, PyMAPDL runs this file only, so you need to make sure + that include statements that perform the actions you intend to, for + instance create the virtual environments and/or run the Python script. + This argument bypass the ``mode`` selected. By default ``None``. + extra_files : Optional[Union[str, list[str]]], optional + To include extra files as part of the job. It support relative paths which it is replicated on the server side. By default ``None``. + output_files : Optional[Union[str, list[str]]], optional + Specify which files are considered output by HPS. by default ``None``. + python : Optional[float], optional + Specify which minor version of python to use. If using Python ``mode``, the virtual environment is generated using this Python version., by default ``None``. + num_cores : Optional[int], optional + Number of cores used for the job, by default ``None`` which means that + this configuration is set by the server. + memory : Optional[int], optional + Amount of memory RAM used for the job, by default ``None`` which means + that this configuration is set by the server. + disk_space : Optional[int], optional + Amount of disk space reserved for the job, by default ``None`` which + means that this configuration is set by the server. + exclusive : Optional[bool], optional + Use the machines exclusively for this job, by default ``None`` which + means that this configuration is set by the server. + max_execution_time : Optional[int], optional + Set a time limit for the job to run, by default ``None`` which means + that this configuration is set by the server. + name : Optional[str], optional + Name of the project in HPS, by default ``None`` which means that + this configuration is set by the server. + + Examples + -------- + + **Simplest case:** Submit a python file to be executed and wait for it + to finish. + + >>> from ansys.mapdl.core.hpc.pyhps import PyMAPDLJobSubmissionDefinition + >>> job = PyMAPDLJobSubmissionDefinition( + name="My Python submission", + url="https://myhpscluster:3000/hps", + user="myuser", + password="mypass", + main_file="my_python_file.py" + ) + >>> job.submit() + >>> job.wait_for_completion() + + **Specifying inputs and outputs:** Submit a python file with inputs and outputs. + + >>> job = PyMAPDLJobSubmissionDefinition( + name="My Python submission", + url="https://myhpscluster:3000/hps", + user="myuser", + password="mypass", + main_file="my_python_file.py", + inputs=["radius=0.4", "EX=100000000", "nu=0.3"] + outputs="stress_max,strain_max" + ) + >>> job.submit() + + where ``my_python_file.py`` file looks like: + + .. code:: py + + def calculate(radius, elastic_modulus, nu): + # Calculate maximum stress and strains + + ... + return stress_max, strain_max + + + stress_max, strain_max = calculate(radius, EX, nu) + + PyMAPDL automatically reads the values of ``stress_max`` and ``strain_max``, + and write the values to an output file called ``output.output`` which is located + on the HPS cluster job working directory. Additionally, the output can be read + in the script using: + + >>> job_results = output_values[0] # result set for the first job in our submission. + >>> job.outputs[0] # + >>> output = float(job.output_values[0][job.outputs[0]]) + + **Setting the number of cores and amount of memory:** Setting the number of + cores to 4, reserving 1Gb of RAM, and using Python3.11. + + >>> job = PyMAPDLJobSubmissionDefinition( + name="My Python submission", + url="https://myhpscluster:3000/hps", + user="myuser", + password="mypass", + main_file="my_python_file.py", + num_cores=4, + memory=1024, #mb + python=3.11 + ) + >>> job.submit() + + **Running a shell job:** Our shell script can be anything, and does not need + to call Python or MAPDL. Executing a shell script can be useful on many scenarios, + for instance copy files to another location after finalizing the job. + This file can look like: + + .. code:: bash + + # Setting virtual environment + python3.10 -m venv .venv + source .venv/bin/activate + + # Installing PyMAPDL + pip install 'ansys-mapdl-core' + + # Running main script + python my_python_script.py + + # Backup + cp ./my_output /home/user/backup + + Then you can run that shell script using: + + >>> job = PyMAPDLJobSubmissionDefinition( + name="My shell script submission", + url="https://myhpscluster:3000/hps", + user="myuser", + password="mypass", + main_file="my_shell_script.sh", + extra_files = ['my_python_script.py'], + python=3.10 + ) + >>> job.submit() + + **Run an MAPDL job**. Run an MAPDL input deck and use a token to authenticate. The credentials must been previously stored using ``pymapdl login`` CLI. + + >>> from ansys.mapdl.core.hpc.login import get_token_access + >>> token = get_token_access() + >>> job = PyMAPDLJobSubmissionDefinition( + name="My APDL input submission", + url="https://myhpscluster:3000/hps", + user="myuser", + password="mypass", + main_file="my_apdl_code.inp", + ) + >>> job.submit() + + """ def __init__( self, - main_file, - url, + main_file: str, + url: str, user: Optional[str] = None, password: Optional[str] = None, token: Optional[str] = None, mode: Optional[str] = None, - inputs: Optional[Union[list[str]]] = None, - outputs: Optional[Union[list[str]]] = None, + inputs: Optional[Union[str, list[str]]] = None, + outputs: Optional[Union[str, list[str]]] = None, requirements_file: Optional[str] = None, shell_file: Optional[str] = None, - extra_files: Optional[Union[list[str]]] = None, - output_files: Optional[Union[list[str]]] = None, + extra_files: Optional[Union[str, list[str]]] = None, + output_files: Optional[Union[str, list[str]]] = None, python: Optional[float] = None, num_cores: Optional[int] = None, memory: Optional[int] = None, From b9a6c981cdbc2eecbcc1611fad152cbd65bd5da1 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Tue, 16 Jul 2024 12:23:34 +0200 Subject: [PATCH 22/29] feat: renaming to submission. --- src/ansys/mapdl/core/hpc/pyhps.py | 65 ++++++++++++++++--------------- 1 file changed, 33 insertions(+), 32 deletions(-) diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index 7ffab0fbc6..1acc8b3896 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -113,10 +113,10 @@ def get_value_from_json_or_default( } -class JobSubmissionDefinition: - """JobSubmissionDefinition Create a JobSubmission class. +class SubmissionDefinition: + """SubmissionDefinition - Create a JobSubmission class used to submit Python, APDL or Shell jobs + Create a JobSubmission object used to submit Python, APDL or Shell projects/jobs to an HPS cluster. Parameters @@ -148,7 +148,7 @@ class JobSubmissionDefinition: requirements_file : Optional[str], optional File path to the requirements file. Only used if ``mode`` is ``python``. If using ``False``, then no virtual environment is generated. This is - useful when the job requires no libraries but the already included by + useful when the submission requires no libraries but the already included by the Python installation. By default is ``None``, then a requirements file is generated on-the-fly with the packages installed on the activated virtual environment. @@ -160,25 +160,25 @@ class JobSubmissionDefinition: instance create the virtual environments and/or run the Python script. This argument bypass the ``mode`` selected. By default ``None``. extra_files : Optional[Union[str, list[str]]], optional - To include extra files as part of the job. It support relative paths which it is replicated on the server side. By default ``None``. + To include extra files as part of the submission. It support relative paths which it is replicated on the server side. By default ``None``. output_files : Optional[Union[str, list[str]]], optional Specify which files are considered output by HPS. by default ``None``. python : Optional[float], optional Specify which minor version of python to use. If using Python ``mode``, the virtual environment is generated using this Python version., by default ``None``. num_cores : Optional[int], optional - Number of cores used for the job, by default ``None`` which means that + Number of cores used for the submission, by default ``None`` which means that this configuration is set by the server. memory : Optional[int], optional - Amount of memory RAM used for the job, by default ``None`` which means + Amount of memory RAM used for the submission, by default ``None`` which means that this configuration is set by the server. disk_space : Optional[int], optional - Amount of disk space reserved for the job, by default ``None`` which + Amount of disk space reserved for the submission, by default ``None`` which means that this configuration is set by the server. exclusive : Optional[bool], optional - Use the machines exclusively for this job, by default ``None`` which + Use the machines exclusively for this submission, by default ``None`` which means that this configuration is set by the server. max_execution_time : Optional[int], optional - Set a time limit for the job to run, by default ``None`` which means + Set a time limit for the submission to run, by default ``None`` which means that this configuration is set by the server. name : Optional[str], optional Name of the project in HPS, by default ``None`` which means that @@ -190,20 +190,20 @@ class JobSubmissionDefinition: **Simplest case:** Submit a python file to be executed and wait for it to finish. - >>> from ansys.mapdl.core.hpc.pyhps import PyMAPDLJobSubmissionDefinition - >>> job = PyMAPDLJobSubmissionDefinition( + >>> from ansys.mapdl.core.hpc.pyhps import PyMAPDLSubmissionDefinition + >>> submission = PyMAPDLSubmissionDefinition( name="My Python submission", url="https://myhpscluster:3000/hps", user="myuser", password="mypass", main_file="my_python_file.py" ) - >>> job.submit() - >>> job.wait_for_completion() + >>> submission.submit() + >>> submission.wait_for_completion() **Specifying inputs and outputs:** Submit a python file with inputs and outputs. - >>> job = PyMAPDLJobSubmissionDefinition( + >>> submission = PyMAPDLSubmissionDefinition( name="My Python submission", url="https://myhpscluster:3000/hps", user="myuser", @@ -212,7 +212,7 @@ class JobSubmissionDefinition: inputs=["radius=0.4", "EX=100000000", "nu=0.3"] outputs="stress_max,strain_max" ) - >>> job.submit() + >>> submission.submit() where ``my_python_file.py`` file looks like: @@ -232,14 +232,15 @@ def calculate(radius, elastic_modulus, nu): on the HPS cluster job working directory. Additionally, the output can be read in the script using: - >>> job_results = output_values[0] # result set for the first job in our submission. - >>> job.outputs[0] # - >>> output = float(job.output_values[0][job.outputs[0]]) + >>> job_results = submission.output_values[0] # result set for the first job + >>> output_key = submission.outputs[0] # "stress_max" + >>> output = float(job_results[output_key]) + 1.345E9 **Setting the number of cores and amount of memory:** Setting the number of cores to 4, reserving 1Gb of RAM, and using Python3.11. - >>> job = PyMAPDLJobSubmissionDefinition( + >>> submission = PyMAPDLSubmissionDefinition( name="My Python submission", url="https://myhpscluster:3000/hps", user="myuser", @@ -249,11 +250,11 @@ def calculate(radius, elastic_modulus, nu): memory=1024, #mb python=3.11 ) - >>> job.submit() + >>> submission.submit() - **Running a shell job:** Our shell script can be anything, and does not need + **Running a shell submission:** Our shell script can be anything, and does not need to call Python or MAPDL. Executing a shell script can be useful on many scenarios, - for instance copy files to another location after finalizing the job. + for instance copy files to another location after finalizing the submission. This file can look like: .. code:: bash @@ -273,7 +274,7 @@ def calculate(radius, elastic_modulus, nu): Then you can run that shell script using: - >>> job = PyMAPDLJobSubmissionDefinition( + >>> submission = PyMAPDLSubmissionDefinition( name="My shell script submission", url="https://myhpscluster:3000/hps", user="myuser", @@ -282,20 +283,20 @@ def calculate(radius, elastic_modulus, nu): extra_files = ['my_python_script.py'], python=3.10 ) - >>> job.submit() + >>> submission.submit() - **Run an MAPDL job**. Run an MAPDL input deck and use a token to authenticate. The credentials must been previously stored using ``pymapdl login`` CLI. + **Run an MAPDL submission**. Run an MAPDL input deck and use a token to authenticate. The credentials must been previously stored using ``pymapdl login`` CLI. >>> from ansys.mapdl.core.hpc.login import get_token_access >>> token = get_token_access() - >>> job = PyMAPDLJobSubmissionDefinition( + >>> submission = PyMAPDLSubmissionDefinition( name="My APDL input submission", url="https://myhpscluster:3000/hps", user="myuser", password="mypass", main_file="my_apdl_code.inp", ) - >>> job.submit() + >>> submission.submit() """ @@ -1166,7 +1167,7 @@ def close_client(self): self._client.session.close() -class PyMAPDLJobSubmissionDefinition(JobSubmissionDefinition): +class PyMAPDLSubmissionDefinition(SubmissionDefinition): pass @@ -1176,10 +1177,10 @@ class PyMAPDLJobSubmissionDefinition(JobSubmissionDefinition): format="[%(asctime)s | %(levelname)s] %(message)s", level=logging.DEBUG ) - # from ansys.mapdl.core.hpc import PyMAPDLJobSubmissionDefinition + # from ansys.mapdl.core.hpc import PyMAPDLSubmissionDefinition # Test 1 main_file = "/Users/german.ayuso/pymapdl/src/ansys/mapdl/core/hpc/main.py" - job = PyMAPDLJobSubmissionDefinition( + job = PyMAPDLSubmissionDefinition( url="https://10.231.106.91:3000/hps", user="repuser", password="repuser", @@ -1194,7 +1195,7 @@ class PyMAPDLJobSubmissionDefinition(JobSubmissionDefinition): # Test2 # main_file = "main.py" - # job1 = PyMAPDLJobSubmissionDefinition( + # job1 = PyMAPDLSubmissionDefinition( # url="https://10.231.106.91:3000/hps", # user="repuser", # password="repuser", From e1b85b810556f527b65224723b5aaca6398f7dd2 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Tue, 16 Jul 2024 13:04:22 +0200 Subject: [PATCH 23/29] fix: doc example --- src/ansys/mapdl/core/hpc/pyhps.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/ansys/mapdl/core/hpc/pyhps.py b/src/ansys/mapdl/core/hpc/pyhps.py index 1acc8b3896..9837b1c00a 100644 --- a/src/ansys/mapdl/core/hpc/pyhps.py +++ b/src/ansys/mapdl/core/hpc/pyhps.py @@ -288,12 +288,11 @@ def calculate(radius, elastic_modulus, nu): **Run an MAPDL submission**. Run an MAPDL input deck and use a token to authenticate. The credentials must been previously stored using ``pymapdl login`` CLI. >>> from ansys.mapdl.core.hpc.login import get_token_access - >>> token = get_token_access() + >>> token = get_token_access(url='https://cluster.example.com', user='admin', password='securepass') >>> submission = PyMAPDLSubmissionDefinition( name="My APDL input submission", url="https://myhpscluster:3000/hps", - user="myuser", - password="mypass", + token=token, main_file="my_apdl_code.inp", ) >>> submission.submit() From e0bd844514cc9f240dae1ca33ccf29b636ca921b Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Tue, 16 Jul 2024 13:12:53 +0200 Subject: [PATCH 24/29] docs:improve docstring examples --- src/ansys/mapdl/core/hpc/login.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/ansys/mapdl/core/hpc/login.py b/src/ansys/mapdl/core/hpc/login.py index ccee2a5ab3..76e583de82 100644 --- a/src/ansys/mapdl/core/hpc/login.py +++ b/src/ansys/mapdl/core/hpc/login.py @@ -274,6 +274,17 @@ def get_token_access(url: str = None, user: str = None, password: str = None): ----- - If credentials are expired, they are deleted from storage. - The credentials can be stored using ``pymapdl login`` CLI. + Alternatively you can use ``store_credentials`` as: + + .. code:: py + + from ansys.mapdl.core.hpc.login import store_credentials + + user = "myuser" + password = "mypass" + url = "https://cluster.example.com" + store_credentials(user, password, url) + Examples -------- From 6cfa2eb19d12b9f7f9ae5bb13aa5eb99c4302574 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Tue, 16 Jul 2024 13:20:24 +0200 Subject: [PATCH 25/29] feat: rename file to match main function 'submit' --- src/ansys/mapdl/core/cli/{hpc.py => submit.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/ansys/mapdl/core/cli/{hpc.py => submit.py} (100%) diff --git a/src/ansys/mapdl/core/cli/hpc.py b/src/ansys/mapdl/core/cli/submit.py similarity index 100% rename from src/ansys/mapdl/core/cli/hpc.py rename to src/ansys/mapdl/core/cli/submit.py From 3b55c893b98af9e518683aa2da4f1f239ef65dff Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Tue, 16 Jul 2024 13:29:12 +0200 Subject: [PATCH 26/29] feat: adding option to pass token to CLI. --- src/ansys/mapdl/core/cli/submit.py | 48 +++++++++++++++++++++--------- 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/src/ansys/mapdl/core/cli/submit.py b/src/ansys/mapdl/core/cli/submit.py index a3b765dc58..160e03042f 100644 --- a/src/ansys/mapdl/core/cli/submit.py +++ b/src/ansys/mapdl/core/cli/submit.py @@ -63,7 +63,7 @@ "--user", default=None, type=str, - help="""Username for logging into the HPC cluster. + help="""Username for logging into the HPS cluster. If it is not input, there is a chain of places where PyMAPDL looks for an username. First, it checks if the username is given in the file specified by the argument ``--config_file``. If that file does not have an username or does not exist, then it checks the username configured using ``pymapdl login`` CLI command, for the given HPS cluster URL. @@ -75,7 +75,7 @@ "--password", default=None, type=str, - help="""Password for logging into the HPC cluster. + help="""Password for logging into the HPS cluster. If it is not input, there is a chain of places where PyMAPDL looks for a password. First, it checks if the password is given in the file specified by the argument ``--config_file``. If that file does not have a password or does not exist, then it checks the password configured using ``pymapdl login`` CLI command, for the given HPS cluster URL. @@ -83,6 +83,14 @@ If no password is found, an exception is raised. """, ) +@click.option( + "--token", + default=None, + type=str, + help="""Authentication token for logging into the HPS cluster. +If used, it bypasses the ``user`` and ``password`` arguments. +""", +) @click.option( "--python", default=None, @@ -156,9 +164,15 @@ is_flag=False, flag_value=True, help=""" +**WARNING:** This option stores the password un-cyphered on a file! + Whether to write the configuration to the configuration file (specified using the ``config_file`` argument) after the job has been successfully submitted. -The default is ``False``. If ``True``, and the file already exists, the configuration file is overwritten.""", +The default is ``False``. If ``True``, and the file already exists, the configuration +file is overwritten. +You can always delete from the configuration file the fields you are not interested +in storing them, for instance the password and username. +""", ) @click.option( "--num_cores", @@ -233,6 +247,7 @@ def submit( url: str = None, user: str = None, password: str = None, + token: str = None, python: Optional[float] = None, inputs: Optional[str] = None, outputs: Optional[str] = None, @@ -275,19 +290,23 @@ def submit( config_file = None logger.debug(f"Using default HPS configuration file: {config_file}") - # Getting cluster login configuration from CLI or file - url = get_value_from_json_or_default( - url, config_file, "url", None, raise_if_none=False - ) - url = url or get_default_url() # using default URL stored. + if not token: + # Getting cluster login configuration from CLI or file + url = get_value_from_json_or_default( + url, config_file, "url", None, raise_if_none=False + ) + url = url or get_default_url() # using default URL stored. - # allow retrieving user from the configuration - user = get_value_from_json_or_default( - user, config_file, "user", raise_if_none=False - ) + # allow retrieving user from the configuration + user = get_value_from_json_or_default( + user, config_file, "user", raise_if_none=False + ) - # Getting access token - token = get_token_access(url, user, password) + # Getting access token + token = get_token_access(url, user, password) + logger.debug(f"Token retrieved from {url}, using '{user}' user.") + else: + logger.debug(f"Using token supplied by the command line: '{token}'") # Getting other configuration from CLI or file python = get_value_from_json_or_default(python, config_file, "python", 3) @@ -344,6 +363,7 @@ def submit( logger.debug( f"Saving the following configuration to the config file ({config_file}):\n{config}." ) + logger.warn(f"The password is stored un-cyphered in {config_file}") with open(config_file, "w") as fid: json.dump(config, fid) From ef24249061cb0e5d2e22b50d433549c7a70460fa Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Wed, 17 Jul 2024 13:15:26 +0200 Subject: [PATCH 27/29] docs: adding API docs --- doc/source/api/hpc.rst | 22 ++++++++++++++++++++++ doc/source/api/index.rst | 1 + 2 files changed, 23 insertions(+) create mode 100644 doc/source/api/hpc.rst diff --git a/doc/source/api/hpc.rst b/doc/source/api/hpc.rst new file mode 100644 index 0000000000..7185e81fa6 --- /dev/null +++ b/doc/source/api/hpc.rst @@ -0,0 +1,22 @@ +.. _ref_hpc_api: + +.. currentmodule:: ansys.mapdl.core.hpc + +HPC Submissions +=============== + +.. autosummary:: + :toctree: _autosummary + + pyhps.SubmissionDefinition + + +HPC login +========= + +.. autosummary:: + :toctree: _autosummary + + login.get_token_access + + diff --git a/doc/source/api/index.rst b/doc/source/api/index.rst index 459792ce43..b53f052865 100644 --- a/doc/source/api/index.rst +++ b/doc/source/api/index.rst @@ -23,6 +23,7 @@ PyMAPDL, see :ref:`ref_mapdl_commands`. database geometry helper + hpc inline krylov launcher From 7af929817ba4cdd02a8d726a666bf450f7b2361e Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Wed, 17 Jul 2024 13:34:43 +0200 Subject: [PATCH 28/29] docs: using other type of reference --- doc/source/api/hpc.rst | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/doc/source/api/hpc.rst b/doc/source/api/hpc.rst index 7185e81fa6..dbf9d4808f 100644 --- a/doc/source/api/hpc.rst +++ b/doc/source/api/hpc.rst @@ -1,6 +1,6 @@ .. _ref_hpc_api: -.. currentmodule:: ansys.mapdl.core.hpc +.. currentmodule:: ansys.mapdl.core.hpc.pyhps HPC Submissions =============== @@ -8,15 +8,18 @@ HPC Submissions .. autosummary:: :toctree: _autosummary - pyhps.SubmissionDefinition + SubmissionDefinition + +.. currentmodule:: ansys.mapdl.core.hpc.login + HPC login ========= .. autosummary:: :toctree: _autosummary - login.get_token_access + get_token_access From a78aa4c8e2c985fc29c6d457b76bf0070d2b9d79 Mon Sep 17 00:00:00 2001 From: German <28149841+germa89@users.noreply.github.com> Date: Wed, 17 Jul 2024 13:35:36 +0200 Subject: [PATCH 29/29] feat: adding imports to hpc.__init__ --- src/ansys/mapdl/core/hpc/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/ansys/mapdl/core/hpc/__init__.py b/src/ansys/mapdl/core/hpc/__init__.py index a95b96476b..701be4f56b 100644 --- a/src/ansys/mapdl/core/hpc/__init__.py +++ b/src/ansys/mapdl/core/hpc/__init__.py @@ -19,3 +19,6 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. + +from ansys.mapdl.core.hpc.login import get_token_access +from ansys.mapdl.core.hpc.pyhps import PyMAPDLSubmissionDefinition