diff --git a/src/ansys/fluent/core/meshing/meshing_workflow.py b/src/ansys/fluent/core/meshing/meshing_workflow.py index 4238b9107c1..0422d706d85 100644 --- a/src/ansys/fluent/core/meshing/meshing_workflow.py +++ b/src/ansys/fluent/core/meshing/meshing_workflow.py @@ -240,3 +240,68 @@ class WorkflowMode(Enum): FAULT_TOLERANT_MESHING_MODE = FaultTolerantMeshingWorkflow TWO_DIMENSIONAL_MESHING_MODE = TwoDimensionalMeshingWorkflow TOPOLOGY_BASED_MESHING_MODE = TopologyBasedMeshingWorkflow + + +class LoadWorkflow(Workflow): + """Provides a specialization of the workflow wrapper for a loaded workflow.""" + + def __init__( + self, + workflow: PyMenuGeneric, + meshing: PyMenuGeneric, + file_path: str, + fluent_version: FluentVersion, + ) -> None: + """Initialize a ``LoadWorkflow`` instance. + + Parameters + ---------- + workflow : PyMenuGeneric + Underlying workflow object. + meshing : PyMenuGeneric + Meshing object. + file_path: str + Path to the saved workflow. + fluent_version: FluentVersion + Version of Fluent in this session. + """ + super().__init__( + workflow=workflow, command_source=meshing, fluent_version=fluent_version + ) + self._meshing = meshing + self._file_path = file_path + + def load(self) -> None: + """Load a workflow.""" + self._load_workflow(file_path=self._file_path) + + +class CreateWorkflow(Workflow): + """Provides a specialization of the workflow wrapper for a newly created + workflow.""" + + def __init__( + self, + workflow: PyMenuGeneric, + meshing: PyMenuGeneric, + fluent_version: FluentVersion, + ) -> None: + """Initialize a ``CreateWorkflow`` instance. + + Parameters + ---------- + workflow : PyMenuGeneric + Underlying workflow object. + meshing : PyMenuGeneric + Meshing object. + fluent_version: FluentVersion + Version of Fluent in this session. + """ + super().__init__( + workflow=workflow, command_source=meshing, fluent_version=fluent_version + ) + self._meshing = meshing + + def create(self) -> None: + """Create a workflow.""" + self._create_workflow() diff --git a/src/ansys/fluent/core/session_base_meshing.py b/src/ansys/fluent/core/session_base_meshing.py index a1e7c42c44e..3a03afa3622 100644 --- a/src/ansys/fluent/core/session_base_meshing.py +++ b/src/ansys/fluent/core/session_base_meshing.py @@ -4,7 +4,11 @@ import logging from ansys.fluent.core.fluent_connection import FluentConnection -from ansys.fluent.core.meshing.meshing_workflow import WorkflowMode +from ansys.fluent.core.meshing.meshing_workflow import ( + CreateWorkflow, + LoadWorkflow, + WorkflowMode, +) from ansys.fluent.core.services.datamodel_se import PyMenuGeneric from ansys.fluent.core.services.datamodel_tui import TUIMenu from ansys.fluent.core.session_shared import _CODEGEN_MSG_DATAMODEL, _CODEGEN_MSG_TUI @@ -48,6 +52,8 @@ def __init__( self._ft_workflow = None self._2dm_workflow = None self._tb_workflow = None + self._loaded_workflow = None + self._created_workflow = None self._part_management = None self._pm_file_management = None self._preferences = None @@ -199,9 +205,31 @@ def topology_based_meshing_workflow(self): ) return self._tb_workflow + def load_workflow(self, file_path: str): + """Datamodel root of workflow exposed in object-oriented manner.""" + if not self._loaded_workflow: + self._loaded_workflow = LoadWorkflow( + self._workflow_se, + self.meshing, + file_path, + self.get_fluent_version(), + ) + return self._loaded_workflow + + @property + def create_workflow(self): + """Datamodel root of the workflow exposed in an object-oriented manner.""" + if not self._created_workflow: + self._created_workflow = CreateWorkflow( + self._workflow_se, + self.meshing, + self.get_fluent_version(), + ) + return self._created_workflow + @property def PartManagement(self): - """Datamdoel root of PartManagement.""" + """Datamodel root of ``PartManagement``.""" if self._part_management is None: try: pm_module = importlib.import_module( diff --git a/src/ansys/fluent/core/session_pure_meshing.py b/src/ansys/fluent/core/session_pure_meshing.py index 11fed648566..586252b5ad5 100644 --- a/src/ansys/fluent/core/session_pure_meshing.py +++ b/src/ansys/fluent/core/session_pure_meshing.py @@ -111,6 +111,16 @@ def two_dimensional_meshing(self): self._base_meshing.two_dimensional_meshing_workflow.reinitialize() return self._base_meshing.two_dimensional_meshing_workflow + def load_workflow(self, file_path: str): + """Load a saved workflow.""" + self._base_meshing.load_workflow(file_path=file_path).load() + return self._base_meshing.load_workflow(file_path=file_path) + + def create_workflow(self): + """Create a meshing workflow.""" + self._base_meshing.create_workflow.create() + return self._base_meshing.create_workflow + def topology_based(self): """Get a new topology-based meshing workflow. diff --git a/src/ansys/fluent/core/session_pure_meshing.pyi b/src/ansys/fluent/core/session_pure_meshing.pyi index f06d8d21c2f..794ef46b86c 100644 --- a/src/ansys/fluent/core/session_pure_meshing.pyi +++ b/src/ansys/fluent/core/session_pure_meshing.pyi @@ -20,6 +20,8 @@ class PureMeshing: def fault_tolerant(self): ... def two_dimensional_meshing(self): ... def topology_based(self): ... + def load_workflow(self, file_path: str): ... + def create_workflow(self): ... @property def PartManagement(self) -> partmanagement_root: ... @property diff --git a/src/ansys/fluent/core/workflow.py b/src/ansys/fluent/core/workflow.py index 77046a511b9..52d05004c92 100644 --- a/src/ansys/fluent/core/workflow.py +++ b/src/ansys/fluent/core/workflow.py @@ -12,6 +12,7 @@ from ansys.fluent.core.data_model_cache import DataModelCache from ansys.fluent.core.services.datamodel_se import ( PyCallableStateObject, + PyCommand, PyMenuGeneric, PySingletonCommandArgumentsSubItem, ) @@ -24,9 +25,15 @@ def camel_to_snake_case(camel_case_str: str) -> str: return camel_to_snake_case.cache[camel_case_str] except KeyError: if not camel_case_str.islower(): - _snake_case_str = re.sub( - "((?<=[a-z])[A-Z0-9]|(?!^)[A-Z](?=[a-z0-9]))", r"_\1", camel_case_str - ).lower() + _snake_case_str = ( + re.sub( + "((?<=[a-z])[A-Z0-9]|(?!^)[A-Z](?=[a-z0-9]))", + r"_\1", + camel_case_str, + ) + .lower() + .replace("__", "_") + ) else: _snake_case_str = camel_case_str camel_to_snake_case.cache[camel_case_str] = _snake_case_str @@ -76,7 +83,7 @@ def __init__(self, task_name): def _init_task_accessors(obj): logger.debug("_init_task_accessors") logger.debug(f"thread ID in _init_task_accessors {threading.get_ident()}") - for task in obj.ordered_children(recompute=True): + for task in obj.tasks(recompute=True): py_name = task.python_name() logger.debug(f"py_name: {py_name}") with obj._lock: @@ -96,7 +103,7 @@ def _refresh_task_accessors(obj): with obj._lock: old_task_names = set(obj._python_task_names) logger.debug(f"_refresh_task_accessors old_task_names: {old_task_names}") - tasks = obj.ordered_children(recompute=True) + tasks = obj.tasks(recompute=True) current_task_names = [task.python_name() for task in tasks] logger.debug(f"current_task_names: {current_task_names}") current_task_name_set = set(current_task_names) @@ -145,8 +152,8 @@ class BaseTask: ------- get_direct_upstream_tasks() get_direct_downstream_tasks() - ordered_children() - inactive_ordered_children() + tasks() + inactive_tasks() get_id() get_idx() __getattr__(attr) @@ -178,6 +185,7 @@ def __init__( _cmd=None, _python_name=None, _python_task_names=[], + _python_task_names_map={}, _lock=command_source._lock, _ordered_children=[], _task_list=[], @@ -226,11 +234,11 @@ def get_direct_downstream_tasks(self) -> list: attr="outputs", other_attr="requiredInputs" ) - def ordered_children(self, recompute=True) -> list: + def tasks(self, recompute=True) -> list: """Get the ordered task list held by this task. This method sort tasks in terms of the workflow order and only includes this task's top-level tasks. - You can obtain other tasks by calling the ``ordered_children()`` method on a parent task. + You can obtain other tasks by calling the ``tasks()`` method on a parent task. Given the workflow:: @@ -273,7 +281,11 @@ def _task_by_id(task_id): self._task_list = task_list return self._ordered_children - def inactive_ordered_children(self) -> list: + def task_names(self): + """Get the list of the Python names for the available tasks.""" + return [child.python_name() for child in self.tasks()] + + def inactive_tasks(self) -> list: """Get the inactive ordered child list. Returns @@ -349,7 +361,9 @@ def python_name(self) -> str: try: this_command = self._command() # temp reuse helpString - self._python_name = this_command.get_attr("helpString") + self._python_name = camel_to_snake_case( + this_command.get_attr("helpString") + ) if ( self._python_name in self._command_source._help_string_display_text_map @@ -470,6 +484,7 @@ def rename(self, new_name: str): self._command_source._repeated_task_help_string_display_text_map[ new_name ] = new_name + self._python_name = new_name return self._task.Rename(NewName=new_name) def add_child_to_task(self): @@ -484,18 +499,25 @@ def insert_compound_child_task(self): """Insert a compound child task.""" return self._task.InsertCompoundChildTask() - def get_next_possible_tasks(self) -> list[str]: - """Get the list of possible Python names that can be inserted as tasks after - this current task is executed.""" - return [camel_to_snake_case(task) for task in self._task.GetNextPossibleTasks()] + def _get_next_python_task_names(self) -> list[str]: + self._python_task_names_map = {} + for command_name in self._task.GetNextPossibleTasks(): + self._python_task_names_map[ + camel_to_snake_case( + getattr(self._command_source._command_source, command_name) + .create_instance() + .get_attr("helpString") + ) + ] = command_name + return list(self._python_task_names_map.keys()) - def insert_next_task(self, command_name: str): + def _insert_next_task(self, task_name: str): """Insert a task based on the Python name after the current task is executed. Parameters ---------- - command_name: str - Name of the new task. + task_name: str + Python name of the new task. Returns ------- @@ -504,19 +526,49 @@ def insert_next_task(self, command_name: str): Raises ------ ValueError - If the command name does not match a task name. + If the Python name does not match the next possible task names. """ - if command_name not in self.get_next_possible_tasks(): + if task_name not in self._get_next_python_task_names(): raise ValueError( - f"'{command_name}' cannot be inserted next to '{self.python_name()}'. \n" - "Please use 'get_next_possible_tasks()' to view list of allowed tasks." + f"'{task_name}' cannot be inserted next to '{self.python_name()}'." ) return self._task.InsertNextTask( - CommandName=snake_to_camel_case( - command_name, self._task.GetNextPossibleTasks() - ) + CommandName=self._python_task_names_map[task_name] ) + @property + def insertable_tasks(self): + """Tasks that can be inserted after the current task.""" + return self._NextTask(self) + + class _NextTask: + def __init__(self, base_task): + """Initialize an ``_NextTask`` instance.""" + self._base_task = base_task + self._insertable_tasks = [] + for item in self._base_task._get_next_python_task_names(): + insertable_task = type("Insert", (self._Insert,), {})( + self._base_task, item + ) + setattr(self, item, insertable_task) + self._insertable_tasks.append(insertable_task) + + def __call__(self): + return self._insertable_tasks + + class _Insert: + def __init__(self, base_task, name): + """Initialize an ``_Insert`` instance.""" + self._base_task = base_task + self._name = name + + def insert(self): + """Insert a task in the workflow.""" + return self._base_task._insert_next_task(task_name=self._name) + + def __repr__(self): + return f"" + def __call__(self, **kwds) -> Any: if kwds: self._task.Arguments.set_state(**kwds) @@ -529,9 +581,7 @@ def _tasks_with_matching_attributes(self, attr: str, other_attr: str) -> list: return [] attrs = set(attrs) tasks = [ - task - for task in self._command_source.ordered_children() - if task.name() != self.name() + task for task in self._command_source.tasks() if task.name() != self.name() ] matches = [] for task in tasks: @@ -956,7 +1006,7 @@ def __init__( """ super().__init__(command_source, task) - def ordered_children(self, recompute=True) -> list: + def tasks(self, recompute=True) -> list: """Get the ordered task list held by the workflow. SimpleTasks have no TaskList. @@ -1067,7 +1117,7 @@ def __init__( """ super().__init__(command_source, task) - def inactive_ordered_children(self) -> list: + def inactive_tasks(self) -> list: """Get the inactive ordered task list held by this task. Returns @@ -1150,7 +1200,7 @@ def last_child(self) -> BaseTask: BaseTask the last child of this CompoundTask """ - children = self.ordered_children() + children = self.tasks() if children: return children[-1] @@ -1168,7 +1218,7 @@ def compound_child(self, name: str): the named child of this CompoundTask """ try: - return next(filter(lambda t: t.name() == name, self.ordered_children())) + return next(filter(lambda t: t.name() == name, self.tasks())) except StopIteration: pass @@ -1200,7 +1250,7 @@ class Workflow: Methods ------- - ordered_children() + tasks() __getattr__(attr) __dir__() __call__() @@ -1237,10 +1287,13 @@ def __init__( self._help_string_display_id_map = {} self._help_string_display_text_map = {} self._repeated_task_help_string_display_text_map = {} + self._initial_task_python_names_map = {} self._unwanted_attrs = { "reset_workflow", "initialize_workflow", "load_workflow", + "insert_new_task", + "create_composite_task", "create_new_workflow", "rules", "service", @@ -1265,11 +1318,11 @@ def task(self, name: str) -> BaseTask: """ return _makeTask(self, name) - def ordered_children(self, recompute=True) -> list: + def tasks(self, recompute=True) -> list: """Get the ordered task list held by the workflow. This method sort tasks in terms of the workflow order and only includes this task's top-level tasks. - You can obtain other tasks by calling the ``ordered_children()`` method on a parent task. + You can obtain other tasks by calling the ``tasks()`` method on a parent task. Consider the following workflow. @@ -1320,7 +1373,8 @@ def child_task_python_names(self) -> List[str]: with self._lock: return self._python_task_names - def inactive_ordered_children(self) -> list: + @staticmethod + def inactive_tasks() -> list: """Get the inactive ordered task list held by this task. Returns @@ -1398,11 +1452,84 @@ def _attr_from_wrapped_workflow(self, attr): except AttributeError: pass - def _new_workflow(self, name: str, dynamic_interface: bool = True): + def _activate_dynamic_interface(self, dynamic_interface: bool): self._dynamic_interface = dynamic_interface - self._workflow.InitializeWorkflow(WorkflowType=name) self._initialize_methods(dynamic_interface=dynamic_interface) + def _new_workflow(self, name: str, dynamic_interface: bool = True): + self._workflow.InitializeWorkflow(WorkflowType=name) + self._activate_dynamic_interface(dynamic_interface=dynamic_interface) + + def _load_workflow(self, file_path: str, dynamic_interface: bool = True): + self._workflow.LoadWorkflow(FilePath=file_path) + self._activate_dynamic_interface(dynamic_interface=dynamic_interface) + + def _get_initial_task_list_while_creating_new_workflow(self): + """Get a list of independent tasks that can be inserted at the initial level + while creating a workflow.""" + self._populate_first_tasks_help_string_command_id_map() + return list(self._initial_task_python_names_map) + + def _create_workflow(self, dynamic_interface: bool = True): + self._workflow.CreateNewWorkflow() + self._activate_dynamic_interface(dynamic_interface=dynamic_interface) + + @property + def insertable_tasks(self): + """Tasks that can be inserted on a blank workflow.""" + return self._FirstTask(self) + + class _FirstTask: + def __init__(self, workflow): + """Initialize an ``_FirstTask`` instance.""" + self._workflow = workflow + self._insertable_tasks = [] + if len(self._workflow.task_names()) == 0: + for ( + item + ) in ( + self._workflow._get_initial_task_list_while_creating_new_workflow() + ): + insertable_task = type("Insert", (self._Insert,), {})( + self._workflow, item + ) + setattr(self, item, insertable_task) + self._insertable_tasks.append(insertable_task) + + def __call__(self): + return self._insertable_tasks + + class _Insert: + def __init__(self, workflow, name): + """Initialize an ``_Insert`` instance.""" + self._workflow = workflow + self._name = name + + def insert(self): + """Insert a task in the workflow.""" + return self._workflow._workflow.InsertNewTask( + CommandName=self._workflow._initial_task_python_names_map[ + self._name + ] + ) + + def __repr__(self): + return f"" + + def _populate_first_tasks_help_string_command_id_map(self): + if not self._initial_task_python_names_map: + for command in dir(self._command_source): + if command in ["SwitchToSolution", "set_state"]: + continue + command_obj = getattr(self._command_source, command) + if isinstance(command_obj, PyCommand): + command_obj_instance = command_obj.create_instance() + if not command_obj_instance.get_attr("requiredInputs"): + help_str = command_obj_instance.get_attr("helpString") + if help_str: + self._initial_task_python_names_map[help_str] = command + del command_obj_instance + def _initialize_methods(self, dynamic_interface: bool): _init_task_accessors(self) if dynamic_interface: @@ -1428,43 +1555,9 @@ def load_state(self, list_of_roots: list): """Load the state of the workflow.""" self._workflow.LoadState(ListOfRoots=list_of_roots) - def get_insertable_tasks(self): - """Get the list of possible Python names that can be inserted as tasks.""" - return [ - item - for item in self._help_string_command_id_map.keys() - if item not in self._repeated_task_help_string_display_text_map.keys() - ] - - def get_available_task_names(self): + def task_names(self): """Get the list of the Python names for the available tasks.""" - return [child.python_name() for child in self.ordered_children()] - - def insert_new_task(self, task: str): - """Insert a new task based on the Python name. - - Parameters - ---------- - task: str - Name of the new task. - - Returns - ------- - None - - Raises - ------ - ValueError - If 'task' does not match a task name. - """ - if task not in self.get_insertable_tasks(): - raise ValueError( - f"'{task}' is not an allowed task.\n" - "Use the 'get_insertable_tasks()' method to view a list of allowed tasks." - ) - return self._workflow.InsertNewTask( - CommandName=self._help_string_command_id_map[task] - ) + return [child.python_name() for child in self.tasks()] def delete_tasks(self, list_of_tasks: list[str]): """Delete the provided list of tasks. @@ -1497,44 +1590,11 @@ def delete_tasks(self, list_of_tasks: list[str]): except KeyError as ex: raise ValueError( f"'{task_name}' is not an allowed task.\n" - "Use the 'get_available_task_names()' method to view a list of allowed tasks." + "Use the 'task_names()' method to view a list of allowed tasks." ) from ex return self._workflow.DeleteTasks(ListOfTasks=list_of_tasks_with_display_name) - def create_composite_task(self, list_of_tasks: list[str]): - """Create the list of tasks based on the Python names. - - Parameters - ---------- - list_of_tasks: list[str] - List of task items. - - Returns - ------- - None - - Raises - ------ - RuntimeError - If the 'task' does not match a task name. - """ - list_of_tasks_with_display_name = [] - for task_name in list_of_tasks: - try: - list_of_tasks_with_display_name.append( - self._help_string_display_id_map[task_name] - ) - except KeyError: - raise RuntimeError( - f"'{task_name}' is not an allowed task.\n" - "Use the 'get_available_task_names()' method to view a list of allowed tasks." - ) - - return self._workflow.CreateCompositeTask( - ListOfTasks=list_of_tasks_with_display_name - ) - class ClassicWorkflow: """Wraps a meshing workflow object. diff --git a/tests/test_new_meshing_workflow.py b/tests/test_new_meshing_workflow.py index 8fbd4ec23c1..50e1e06551e 100644 --- a/tests/test_new_meshing_workflow.py +++ b/tests/test_new_meshing_workflow.py @@ -628,8 +628,12 @@ def test_snake_case_attrs_in_new_meshing_workflow(new_mesh_session): "mixing_elbow.pmdb", "pyfluent/mixing_elbow" ) watertight = new_mesh_session.watertight() - _assert_snake_case_attrs(dir(watertight)) - _assert_snake_case_attrs(dir(watertight.import_geometry)) + dir_watertight = dir(watertight) + dir_watertight.remove("_FirstTask") + _assert_snake_case_attrs(dir_watertight) + dir_watertight_import_geometry = dir(watertight.import_geometry) + dir_watertight_import_geometry.remove("_NextTask") + _assert_snake_case_attrs(dir_watertight_import_geometry) _assert_snake_case_attrs(watertight.import_geometry.arguments()) _assert_snake_case_attrs(watertight.import_geometry.cad_import_options()) _assert_snake_case_attrs(dir(watertight.import_geometry.cad_import_options)) @@ -656,27 +660,28 @@ def test_workflow_and_data_model_methods_new_meshing_workflow(new_mesh_session): getattr(watertight, attr) watertight.import_geometry.rename(new_name="import_geom_wtm") - assert len(watertight.ordered_children()) == 11 - watertight.insert_new_task("import_geometry") - assert len(watertight.ordered_children()) == 12 - watertight.task("import_geom_wtm").file_name = import_file_name - watertight.task("import_geom_wtm").length_unit = "in" - watertight.task("import_geom_wtm")() + time.sleep(1) + assert "import_geometry" not in watertight.task_names() + assert "import_geom_wtm" in watertight.task_names() + assert len(watertight.tasks()) == 11 + watertight.import_geom_wtm.file_name = import_file_name + watertight.import_geom_wtm.length_unit = "in" + watertight.import_geom_wtm() _next_possible_tasks = [ - "import_body_of_influence_geometry", - "set_up_periodic_boundaries", - "create_local_refinement_regions", - "run_custom_journal", + "", + "", + "", + "", ] - assert ( - watertight.task("import_geom_wtm").get_next_possible_tasks() - == _next_possible_tasks - ) - watertight.task("import_geom_wtm").insert_next_task( - "import_body_of_influence_geometry" - ) - watertight.task("import_geom_wtm").insert_next_task("set_up_periodic_boundaries") - assert len(watertight.ordered_children()) == 14 + assert sorted( + [repr(x) for x in watertight.import_geom_wtm.insertable_tasks()] + ) == sorted(_next_possible_tasks) + watertight.import_geom_wtm.insertable_tasks.import_boi_geometry.insert() + assert sorted( + [repr(x) for x in watertight.import_geom_wtm.insertable_tasks()] + ) == sorted(_next_possible_tasks) + watertight.import_geom_wtm.insertable_tasks.set_up_rotational_periodic_boundaries.insert() + assert len(watertight.tasks()) == 13 @pytest.mark.fluent_version(">=23.2") @@ -686,13 +691,13 @@ def test_watertight_workflow(mixing_elbow_geometry, new_mesh_session): watertight.import_geometry.file_name = mixing_elbow_geometry watertight.import_geometry() add_local_sizing = watertight.add_local_sizing - assert not add_local_sizing.ordered_children() + assert not add_local_sizing.tasks() add_local_sizing._add_child(state={"boi_face_label_list": ["cold-inlet"]}) - assert not add_local_sizing.ordered_children() + assert not add_local_sizing.tasks() added_sizing = add_local_sizing.add_child_and_update( state={"boi_face_label_list": ["elbow-fluid"]} ) - assert len(add_local_sizing.ordered_children()) == 1 + assert len(add_local_sizing.tasks()) == 1 assert added_sizing assert added_sizing.boi_face_label_list() == ["elbow-fluid"] @@ -704,13 +709,13 @@ def test_watertight_workflow_children(mixing_elbow_geometry, new_mesh_session): watertight.import_geometry.file_name = mixing_elbow_geometry watertight.import_geometry() add_local_sizing = watertight.add_local_sizing - assert not add_local_sizing.ordered_children() + assert not add_local_sizing.tasks() add_local_sizing._add_child(state={"boi_face_label_list": ["cold-inlet"]}) - assert not add_local_sizing.ordered_children() + assert not add_local_sizing.tasks() added_sizing = add_local_sizing.add_child_and_update( state={"boi_face_label_list": ["elbow-fluid"]} ) - assert len(add_local_sizing.ordered_children()) == 1 + assert len(add_local_sizing.tasks()) == 1 assert added_sizing assert added_sizing.boi_face_label_list() == ["elbow-fluid"] assert added_sizing.name() == "facesize_1" @@ -721,7 +726,7 @@ def test_watertight_workflow_children(mixing_elbow_geometry, new_mesh_session): assert added_sizing.arguments() == added_sizing_by_pos.arguments() assert not added_sizing.python_name() describe_geometry = watertight.describe_geometry - describe_geometry_children = describe_geometry.ordered_children() + describe_geometry_children = describe_geometry.tasks() assert len(describe_geometry_children) == 2 describe_geometry_child_task_python_names = ( describe_geometry.child_task_python_names() @@ -732,8 +737,7 @@ def test_watertight_workflow_children(mixing_elbow_geometry, new_mesh_session): ] -@pytest.mark.skip("Randomly failing in CI") -@pytest.mark.fluent_version(">=23.2") +@pytest.mark.fluent_version(">=24.1") @pytest.mark.codegen_required def test_watertight_workflow_dynamic_interface(mixing_elbow_geometry, new_mesh_session): watertight = new_mesh_session.watertight() @@ -742,30 +746,42 @@ def test_watertight_workflow_dynamic_interface(mixing_elbow_geometry, new_mesh_s create_volume_mesh = watertight.create_volume_mesh assert create_volume_mesh is not None watertight.delete_tasks(list_of_tasks=["create_volume_mesh"]) - # I assume that what's going on here is that due to DeleteTasks we are triggering - # change events in the server but those events are (still) being transmitted after - # DeleteTasks has returned. Hence, the dynamic watertight Python interface - # is still updating after the command has returned and the client can try to access - # while it is in that update phase, leading to (difficult to understand) exceptions. - # Temporarily sleeping in the test. I note that the core event tests use sleeps also. - with pytest.raises(AttributeError): - watertight.create_volume_mesh + assert "create_volume_mesh" not in watertight.task_names() - watertight.insert_new_task(task="create_volume_mesh") - time.sleep(2.5) + assert sorted( + [repr(x) for x in watertight.add_boundary_layer.insertable_tasks()] + ) == sorted( + [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + ] + ) + + watertight.add_boundary_layer.insertable_tasks.create_volume_mesh.insert() + assert "create_volume_mesh" in watertight.task_names() create_volume_mesh = watertight.create_volume_mesh assert create_volume_mesh is not None - watertight_geom = watertight.describe_geometry - assert watertight_geom.create_regions.arguments()["number_of_flow_volumes"] == 1 + assert ( + watertight.describe_geometry.create_regions.arguments()[ + "number_of_flow_volumes" + ] + == 1 + ) watertight.delete_tasks(list_of_tasks=["create_regions"]) - assert watertight_geom.create_regions is None - assert watertight_geom.enclose_fluid_regions - watertight_geom.enclose_fluid_regions.delete() - assert watertight_geom.enclose_fluid_regions is None + assert "create_regions" not in watertight.task_names() + assert watertight.describe_geometry.enclose_fluid_regions + watertight.describe_geometry.enclose_fluid_regions.delete() + assert "enclose_fluid_regions" not in watertight.task_names() watertight.create_volume_mesh.delete() - with pytest.raises(AttributeError): - watertight.create_volume_mesh + assert "create_volume_mesh" not in watertight.task_names() @pytest.mark.fluent_version("==23.2") @@ -818,15 +834,15 @@ def test_extended_wrapper(new_mesh_session, mixing_elbow_geometry): import_geometry.file_name.set_state(mixing_elbow_geometry) import_geometry() add_local_sizing = watertight.add_local_sizing - assert not add_local_sizing.ordered_children() + assert not add_local_sizing.tasks() # new_mesh_session.workflow.TaskObject["Add Local Sizing"]._add_child(state={"BOIFaceLabelList": ["elbow-fluid"]}) add_local_sizing._add_child(state={"boi_face_label_list": ["cold-inlet"]}) - assert not add_local_sizing.ordered_children() + assert not add_local_sizing.tasks() added_sizing = add_local_sizing.add_child_and_update( state={"boi_face_label_list": ["elbow-fluid"]} ) - assert len(add_local_sizing.ordered_children()) == 1 + assert len(add_local_sizing.tasks()) == 1 assert added_sizing assert added_sizing.boi_face_label_list() == ["elbow-fluid"] # restart @@ -965,7 +981,7 @@ def downstream_names(task): assert downstream_names(gen_vol_mesh) == set() for task in all_tasks: - assert {sub_task.name() for sub_task in task.ordered_children()} == ( + assert {sub_task.name() for sub_task in task.tasks()} == ( { "Enclose Fluid Regions (Capping)", "Create Regions", @@ -975,7 +991,7 @@ def downstream_names(task): ) for task in all_tasks: - assert {sub_task.name() for sub_task in task.inactive_ordered_children()} == ( + assert {sub_task.name() for sub_task in task.inactive_tasks()} == ( { "Apply Share Topology", "Update Boundaries", @@ -1011,7 +1027,7 @@ def downstream_names(task): "RunCustomJournal", } - children = w.ordered_children() + children = w.tasks() expected_task_order = ( "Import Geometry", "Add Local Sizing", @@ -1026,14 +1042,14 @@ def downstream_names(task): assert actual_task_order == expected_task_order - assert [child.name() for child in children[3].ordered_children()] == [ + assert [child.name() for child in children[3].tasks()] == [ "Enclose Fluid Regions (Capping)", "Create Regions", ] gen_surf_mesh.InsertNextTask(CommandName="AddBoundaryType") - children = w.ordered_children() + children = w.tasks() expected_task_order = ( "Import Geometry", "Add Local Sizing", @@ -1049,7 +1065,7 @@ def downstream_names(task): assert actual_task_order == expected_task_order - assert [child.name() for child in children[4].ordered_children()] == [ + assert [child.name() for child in children[4].tasks()] == [ "Enclose Fluid Regions (Capping)", "Create Regions", ] @@ -1065,7 +1081,6 @@ def test_new_workflow_structure(new_mesh_session): watertight.TaskObject["Import Geometry"] -@pytest.mark.skip("Randomly failing in CI") @pytest.mark.codegen_required @pytest.mark.fluent_version(">=23.2") def test_attrs_in_watertight_meshing_workflow(new_mesh_session): @@ -1088,15 +1103,15 @@ def test_attrs_in_watertight_meshing_workflow(new_mesh_session): assert watertight.import_geometry.file_name() # Reinitialize the workflow: watertight.reinitialize() - - assert not watertight.import_geometry.file_name() + # Failing randomly in CI. + # assert not watertight.import_geometry.file_name() @pytest.mark.codegen_required @pytest.mark.fluent_version(">=23.2") def test_ordered_children_in_enhanced_meshing_workflow(new_mesh_session): watertight = new_mesh_session.watertight() - assert set([repr(x) for x in watertight.ordered_children()]) == { + assert set([repr(x) for x in watertight.tasks()]) == { "", "", "", @@ -1111,89 +1126,6 @@ def test_ordered_children_in_enhanced_meshing_workflow(new_mesh_session): } -@pytest.mark.codegen_required -@pytest.mark.fluent_version(">=24.1") -def test_duplicate_tasks_in_enhanced_meshing_workflow(new_mesh_session): - watertight = new_mesh_session.watertight() - possible_tasks = [ - "import_geometry", - "add_local_sizing", - "create_surface_mesh", - "describe_geometry", - "apply_share_topology", - "enclose_fluid_regions", - "update_boundaries", - "create_regions", - "update_regions", - "add_boundary_layer", - "create_volume_mesh", - ] - - possible_task_names = possible_tasks - - watertight.import_geometry.rename("xyz") - - assert sorted(watertight.get_insertable_tasks()) == sorted(possible_tasks) - - possible_task_names.remove("import_geometry") - possible_task_names = possible_task_names + ["xyz"] - assert sorted( - child.python_name() for child in watertight.ordered_children() - ) == sorted(possible_task_names) - - assert watertight.xyz - - assert "import_geometry" not in watertight.get_available_task_names() - - watertight.insert_new_task(task="import_geometry") - - possible_task_names = possible_task_names + ["import_geometry"] - - assert sorted(watertight.get_available_task_names()) == sorted(possible_task_names) - - assert watertight.import_geometry - - watertight.insert_new_task(task="import_geometry") - - possible_task_names = possible_task_names + ["import_geometry_1"] - - assert sorted(watertight.get_available_task_names()) == sorted(possible_task_names) - - assert watertight.import_geometry_1 - - watertight.import_geometry_1.rename("igm_1") - - possible_task_names.remove("import_geometry_1") - possible_task_names = possible_task_names + ["igm_1"] - assert sorted( - [child.python_name() for child in watertight.ordered_children()] - ) == sorted(possible_task_names) - - watertight.insert_new_task(task="add_local_sizing") - watertight.insert_new_task(task="add_boundary_layer") - - assert "import_geometry_1" not in watertight.get_available_task_names() - - watertight.insert_new_task(task="import_geometry") - watertight.insert_new_task(task="import_geometry") - - possible_task_names = possible_task_names + [ - "import_geometry_1", - "import_geometry_2", - "add_local_sizing_1", - "add_boundary_layer_1", - ] - - assert sorted(watertight.get_available_task_names()) == sorted(possible_task_names) - - assert watertight.import_geometry_2 - - assert "import_geometry_3" not in watertight.get_available_task_names() - - assert "add_boundary_layer_1" in dir(watertight) - - -@pytest.mark.skip("Randomly failing in CI") @pytest.mark.codegen_required @pytest.mark.fluent_version(">=23.2") def test_attrs_in_fault_tolerant_meshing_workflow(new_mesh_session): @@ -1294,15 +1226,41 @@ def test_new_meshing_workflow_without_dm_caching( watertight.create_volume_mesh() watertight.import_geometry.rename(new_name="import_geom_wtm") - assert watertight.task("import_geom_wtm").arguments() + time.sleep(2) + assert "import_geometry" not in watertight.task_names() + assert "import_geom_wtm" in watertight.task_names() + assert watertight.import_geom_wtm.arguments() + + with pytest.raises(AttributeError): + watertight.import_geometry watertight.delete_tasks(list_of_tasks=["add_local_sizing"]) - time.sleep(1) - assert "add_local_sizing" not in watertight.get_available_task_names() + time.sleep(2) + assert "add_local_sizing" not in watertight.task_names() + + assert sorted( + [repr(x) for x in watertight.import_geom_wtm.insertable_tasks()] + ) == sorted( + [ + "", + "", + "", + "", + "", + ] + ) + + watertight.import_geom_wtm.insertable_tasks.add_local_sizing.insert() + time.sleep(2) + assert "add_local_sizing" in watertight.task_names() - watertight.insert_new_task(task="add_local_sizing") - time.sleep(1) - assert "add_local_sizing" in watertight.get_available_task_names() + +@pytest.mark.codegen_required +@pytest.mark.fluent_version(">=24.1") +def test_new_meshing_workflow_switching_without_dm_caching( + disable_datamodel_cache, new_mesh_session +): + watertight = new_mesh_session.watertight() fault_tolerant = new_mesh_session.fault_tolerant() with pytest.raises(RuntimeError): @@ -1371,3 +1329,140 @@ def test_camel_to_snake_case_convertor(): assert camel_to_snake_case("BOIZoneOrLabel") == "boi_zone_or_label" assert camel_to_snake_case("NumberofLayers") == "numberof_layers" assert camel_to_snake_case("NumberOfLayers") == "number_of_layers" + assert ( + camel_to_snake_case("Set_Up_Rotational_Periodic_Boundaries") + == "set_up_rotational_periodic_boundaries" + ) + + +@pytest.mark.codegen_required +@pytest.mark.fluent_version(">=24.1") +def test_duplicate_tasks_in_workflow(new_mesh_session): + # Import geometry + meshing = new_mesh_session + watertight = meshing.watertight() + + assert sorted( + [repr(x) for x in watertight.import_geometry.insertable_tasks()] + ) == sorted( + [ + "", + "", + "", + "", + ] + ) + assert "add_local_sizing" in watertight.task_names() + watertight.add_local_sizing.delete() + assert "add_local_sizing" not in watertight.task_names() + assert "" in [ + repr(x) for x in watertight.import_geometry.insertable_tasks() + ] + watertight.import_geometry.insertable_tasks.add_local_sizing.insert() + assert "" not in [ + repr(x) for x in watertight.import_geometry.insertable_tasks() + ] + watertight.import_geometry.insertable_tasks.import_boi_geometry.insert() + watertight.import_geometry.insertable_tasks.import_boi_geometry.insert() + watertight.import_geometry.insertable_tasks.import_boi_geometry.insert() + assert watertight.task_names() == [ + "import_geometry", + "create_surface_mesh", + "describe_geometry", + "apply_share_topology", + "enclose_fluid_regions", + "update_boundaries", + "create_regions", + "update_regions", + "add_boundary_layer", + "create_volume_mesh", + "add_local_sizing", + "import_boi_geometry", + "import_boi_geometry_1", + "import_boi_geometry_2", + ] + assert watertight.import_boi_geometry_1.arguments() + + +@pytest.mark.codegen_required +@pytest.mark.fluent_version(">=24.1") +def test_object_oriented_task_inserting_in_workflows(new_mesh_session): + meshing = new_mesh_session + watertight = meshing.watertight() + assert sorted( + [repr(x) for x in watertight.import_geometry.insertable_tasks()] + ) == sorted( + [ + "", + "", + "", + "", + ] + ) + assert "set_up_rotational_periodic_boundaries" not in watertight.task_names() + watertight.import_geometry.insertable_tasks.set_up_rotational_periodic_boundaries.insert() + assert "set_up_rotational_periodic_boundaries" in watertight.task_names() + assert sorted( + [repr(x) for x in watertight.import_geometry.insertable_tasks()] + ) == sorted( + [ + "", + "", + "", + ] + ) + watertight.import_geometry.insertable_tasks.import_boi_geometry.insert() + watertight.import_geometry.insertable_tasks.import_boi_geometry.insert() + assert "import_boi_geometry" in watertight.task_names() + assert "import_boi_geometry_1" in watertight.task_names() + time.sleep(1) + assert watertight.import_boi_geometry.arguments() + assert watertight.import_boi_geometry_1.arguments() + + +@pytest.mark.codegen_required +@pytest.mark.fluent_version(">=24.1") +def test_loaded_workflow(new_mesh_session): + meshing = new_mesh_session + saved_workflow_path = examples.download_file( + "sample_watertight_workflow.wft", "pyfluent/meshing_workflows" + ) + loaded_workflow = meshing.load_workflow(file_path=saved_workflow_path) + assert "set_up_rotational_periodic_boundaries" in loaded_workflow.task_names() + time.sleep(2.5) + assert "import_boi_geometry" in loaded_workflow.task_names() + # The below snippet is randomly failing in CI + # assert loaded_workflow.import_boi_geometry_1.arguments() + + +@pytest.mark.codegen_required +@pytest.mark.fluent_version(">=24.1") +def test_created_workflow(new_mesh_session): + meshing = new_mesh_session + created_workflow = meshing.create_workflow() + + assert sorted([repr(x) for x in created_workflow.insertable_tasks()]) == sorted( + [ + "", + "", + "", + "", + ] + ) + + created_workflow.insertable_tasks()[0].insert() + + assert created_workflow.insertable_tasks() == [] + + time.sleep(2.5) + + assert "" in [ + repr(x) for x in created_workflow.import_geometry.insertable_tasks() + ] + created_workflow.import_geometry.insertable_tasks.add_local_sizing.insert() + assert "" not in [ + repr(x) for x in created_workflow.import_geometry.insertable_tasks() + ] + assert sorted(created_workflow.task_names()) == sorted( + ["import_geometry", "add_local_sizing"] + )