diff --git a/.ci/compare_flobject.py b/.ci/compare_flobject.py index 04416165549..5d68afb34ac 100644 --- a/.ci/compare_flobject.py +++ b/.ci/compare_flobject.py @@ -13,20 +13,20 @@ def compare_flobject(): RuntimeError If flobject.py is inconsistent in Fluent and PyFluent. """ - image_name = f"ghcr.io/ansys/pyfluent:v24.1.0" + image_name = "ghcr.io/ansys/pyfluent:v24.1.0" container_name = uuid.uuid4().hex is_linux = platform.system() == "Linux" subprocess.run( f"docker container create --name {container_name} {image_name}", shell=is_linux, ) - xml_source = f"/ansys_inc/v241/fluent/fluent24.1.0/cortex/pylib/flapi/flobject.py" + xml_source = "/ansys_inc/v241/fluent/fluent24.1.0/cortex/pylib/flapi/flobject.py" subprocess.run( f"docker cp {container_name}:{xml_source} fluent_flobject.py", shell=is_linux ) subprocess.run(f"docker container rm {container_name}", shell=is_linux) p = subprocess.run( - f"diff -u fluent_flobject.py src/ansys/fluent/core/solver/flobject.py", + "diff -u fluent_flobject.py src/ansys/fluent/core/solver/flobject.py", shell=is_linux, capture_output=True, text=True, diff --git a/.flake8 b/.flake8 index bc0554384af..617d4f8e0a6 100644 --- a/.flake8 +++ b/.flake8 @@ -1,8 +1,7 @@ [flake8] -exclude = venv, doc/_build, src/ansys/api/fluent/v0/*, src/ansys/fluent/core/meshing/tui.py, src/ansys/fluent/core/solver/tui.py, src/ansys/fluent/core/solver/settings/*, src/ansys/fluent/core/datamodel/* -max-line-length = 88 +exclude = src/ansys/fluent/core/generated count = True -max-complexity = 10 statistics = True -select = W191 W291 W293 W391 E115 E117 E122 E124 E125 E225 E231 E301 E303 F401 F403 N801 N802 N803 N804 N805 N806 -extend-ignore = E203, E501 \ No newline at end of file +max-complexity = 10 +max-line-length = 88 +extend-ignore = E203, E501, C901 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0b10cba26a6..a3f77753977 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,19 +25,14 @@ repos: rev: 7.1.1 hooks: - id: flake8 - additional_dependencies: [ - 'flake8-annotations==3.0.1' - ] args: [ --exclude, src/ansys/fluent/core/generated, - --select, W191 W291 W293 W391 E115 E117 E122 E124 E125 E225 E231 E301 E303 F401 F403 N801 N802 N803 N804 N805 N806, - #--select, W191 W291 W293 W391 E115 E117 E122 E124 E125 E225 E231 E301 E303 F401 F403 N801 N802 N803 N804 N805 N806 ANN001 ANN201 ANN205 ANN206, --count, --statistics, --max-complexity, "10", --max-line-length, "88", - --extend-ignore, E203 E501, - ansys, doc, examples, tests + --extend-ignore, E203 E501 C901, + src, doc, examples, tests ] - repo: https://github.com/codespell-project/codespell diff --git a/doc/api_rstgen.py b/doc/api_rstgen.py index 736d23b7944..d65a1928910 100644 --- a/doc/api_rstgen.py +++ b/doc/api_rstgen.py @@ -158,7 +158,7 @@ def _generate_api_source_rst_files(folder: str, files: list): rst.write(f".. _ref_{file}:\n\n") if folder: if "root" in file: - rst.write(f"solver.settings\n") + rst.write("solver.settings\n") rst.write(f'{"="*(len("solver.settings"))}\n\n') rst.write( "The :ref:`ref_root` is the top-level solver settings object. It contains all\n" @@ -176,7 +176,7 @@ def _generate_api_source_rst_files(folder: str, files: list): rst.write(f"ansys.fluent.core.{file}\n") rst.write(f'{"="*(len(f"ansys.fluent.core.{file}"))}\n\n') rst.write(f".. automodule:: ansys.fluent.core.{file}\n") - if not "root" in file: + if "root" not in file: _write_common_rst_members(rst_file=rst) @@ -195,7 +195,7 @@ def _generate_api_index_rst_files(): index.write(f'{"="*(len(f"ansys.fluent.core.{folder}"))}\n\n') index.write(f".. automodule:: ansys.fluent.core.{folder}\n") _write_common_rst_members(rst_file=index) - index.write(f".. toctree::\n") + index.write(".. toctree::\n") index.write(" :maxdepth: 2\n") index.write(" :hidden:\n\n") for file in files: diff --git a/doc/rstgen.py b/doc/rstgen.py index 9b491766e4d..c0dc7178144 100644 --- a/doc/rstgen.py +++ b/doc/rstgen.py @@ -279,7 +279,7 @@ def _write_datamodel_index_doc(datamodels: list, mode: str): f.write(f".. _ref_{mode}_datamodel:\n\n") f.write(f"{datamodel_mode}\n") f.write(f"{'=' * len(datamodel_mode)}\n\n") - f.write(f".. automodule:: ansys.fluent.core.datamodel\n") + f.write(".. automodule:: ansys.fluent.core.datamodel\n") f.write(" :autosummary:\n\n") f.write(" :autosummary-members:\n\n") f.write(".. toctree::\n") diff --git a/doc/settings_rstgen.py b/doc/settings_rstgen.py index 5e8cad8eab2..4587d882a16 100644 --- a/doc/settings_rstgen.py +++ b/doc/settings_rstgen.py @@ -62,7 +62,7 @@ def _populate_parents_list(cls): child_cls_name = child_cls.__name__ if not parents_dict.get(child_cls_name): parents_dict[child_cls_name] = [] - if not cls in parents_dict[child_cls_name]: + if cls not in parents_dict[child_cls_name]: parents_dict[child_cls_name].append(cls) if hasattr(cls, "command_names"): @@ -71,7 +71,7 @@ def _populate_parents_list(cls): child_cls_name = child_cls.__name__ if not parents_dict.get(child_cls_name): parents_dict[child_cls_name] = [] - if not cls in parents_dict[child_cls_name]: + if cls not in parents_dict[child_cls_name]: parents_dict[child_cls_name].append(cls) if hasattr(cls, "argument_names"): @@ -80,7 +80,7 @@ def _populate_parents_list(cls): child_cls_name = child_cls.__name__ if not parents_dict.get(child_cls_name): parents_dict[child_cls_name] = [] - if not cls in parents_dict[child_cls_name]: + if cls not in parents_dict[child_cls_name]: parents_dict[child_cls_name].append(cls) if hasattr(cls, "child_object_type"): @@ -88,7 +88,7 @@ def _populate_parents_list(cls): child_cls_name = child_cls.__name__ if not parents_dict.get(child_cls_name): parents_dict[child_cls_name] = [] - if not cls in parents_dict[child_cls_name]: + if cls not in parents_dict[child_cls_name]: parents_dict[child_cls_name].append(cls) if hasattr(cls, "child_names"): @@ -131,7 +131,7 @@ def _populate_rst_from_settings(rst_dir, cls, version): r.write(f"{istr1}:show-inheritance:\n\n") if has_children: - r.write(f".. rubric:: Attributes\n\n") + r.write(".. rubric:: Attributes\n\n") data_dict = {} data_dict["Attribute"] = "Summary" for child in cls.child_names: @@ -141,7 +141,7 @@ def _populate_rst_from_settings(rst_dir, cls, version): _generate_table_for_rst(r, data_dict) if has_commands: - r.write(f".. rubric:: Methods\n\n") + r.write(".. rubric:: Methods\n\n") data_dict = {} data_dict["Method"] = "Summary" for child in cls.command_names: @@ -151,7 +151,7 @@ def _populate_rst_from_settings(rst_dir, cls, version): _generate_table_for_rst(r, data_dict) if has_arguments: - r.write(f".. rubric:: Arguments\n\n") + r.write(".. rubric:: Arguments\n\n") data_dict = {} data_dict["Argument"] = "Summary" for child in cls.argument_names: @@ -165,11 +165,11 @@ def _populate_rst_from_settings(rst_dir, cls, version): ref_string = f":ref:`{child_cls.__name__} <{child_cls.__name__}>`" data_dict = {} data_dict[ref_string] = child_cls.__doc__.strip("\n").split("\n")[0] - r.write(f".. rubric:: Named object type\n\n") + r.write(".. rubric:: Named object type\n\n") r.write(f"{ref_string}\n\n\n") if parents_dict.get(cls_name): - r.write(f".. rubric:: Included in:\n\n") + r.write(".. rubric:: Included in:\n\n") data_dict = {} data_dict["Parent"] = "Summary" for parent in parents_dict.get(cls_name): @@ -180,7 +180,7 @@ def _populate_rst_from_settings(rst_dir, cls, version): data_dict[ref_string] = parent.__doc__.strip("\n").split("\n")[0] _generate_table_for_rst(r, data_dict) - if not rstpath in rst_list: + if rstpath not in rst_list: rst_list.append(rstpath) if has_children: for child in cls.child_names: diff --git a/doc/source/conf.py b/doc/source/conf.py index deee11a5128..ec4c7b71637 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -163,7 +163,6 @@ def _stop_fluent_container(gallery_conf, fname): "backreferences_dir": None, # Modules for which function level galleries are created. In "doc_module": "ansys-fluent-core", - "ignore_pattern": "flycheck*", "thumbnail_size": (350, 350), "reset_modules_order": "after", "reset_modules": (_stop_fluent_container), diff --git a/examples/00-fluent/DOE_ML.py b/examples/00-fluent/DOE_ML.py index 2caf1c609c6..ce4832176f2 100644 --- a/examples/00-fluent/DOE_ML.py +++ b/examples/00-fluent/DOE_ML.py @@ -25,6 +25,8 @@ # Import required libraries/modules # ================================= +# flake8: noqa: E402 + from pathlib import Path import matplotlib.pyplot as plt @@ -216,10 +218,10 @@ # * Prediction on Unseen/Test Data (scikit-learn) # * Parity Plot (Matplotlib and Seaborn) -from pprint import pprint # noqa: F401 +# from pprint import pprint -from sklearn.ensemble import RandomForestRegressor # noqa: F401 -from sklearn.linear_model import LinearRegression # noqa: F401 +# from sklearn.ensemble import RandomForestRegressor +# from sklearn.linear_model import LinearRegression from sklearn.metrics import r2_score from sklearn.model_selection import RepeatedKFold, cross_val_score from xgboost import XGBRegressor diff --git a/examples/00-fluent/conjugate_heat_transfer.py b/examples/00-fluent/conjugate_heat_transfer.py index 86c1cc584b7..2b122b03c9d 100644 --- a/examples/00-fluent/conjugate_heat_transfer.py +++ b/examples/00-fluent/conjugate_heat_transfer.py @@ -34,9 +34,7 @@ import ansys.fluent.core as pyfluent from ansys.fluent.core import examples -import ansys.fluent.visualization as viz # noqa: F401 -from ansys.fluent.visualization.matplotlib import Plots # noqa: F401 -from ansys.fluent.visualization.matplotlib import matplot_windows_manager # noqa: F401 +from ansys.fluent.visualization.matplotlib import Plots from ansys.fluent.visualization.pyvista import Graphics, pyvista_windows_manager from ansys.fluent.visualization.pyvista.pyvista_windows_manager import PyVistaWindow @@ -100,7 +98,7 @@ r"CappingRequired": r"No", r"InvokeShareTopology": r"No", r"NonConformal": r"Yes", - r"SetupType": r"The geometry consists of both fluid and solid regions and/or voids", # noqa: E501 + r"SetupType": r"The geometry consists of both fluid and solid regions and/or voids", } ) @@ -331,7 +329,7 @@ meshing.workflow.TaskObject["Run Custom Journal"].Rename(NewName=r"set-periodicity") meshing.workflow.TaskObject["set-periodicity"].Arguments = dict( { - r"JournalString": r"""/bo rps translational semi-auto periodic-1-high periodic-2-high periodic-3-high periodic-4-high , 0 0 -2.3 # noqa: E501 + r"JournalString": r"""/bo rps translational semi-auto periodic-1-high periodic-2-high periodic-3-high periodic-4-high , 0 0 -2.3 /bo rps translational semi-auto periodic-5* , 0 0 -2.3 /bo rps translational semi-auto periodic-6-high , 0 0 -2.3 /bo rps translational semi-auto periodic-7-high , 0 0 -2.3 @@ -371,7 +369,7 @@ r"wall-fluid-tet-4-solid-tet-4", r"wall-fluid-tet-3-solid-tet-3", r"wall-fluid-tet-2-solid-tet-2", - r"wall-fluid-tet-2-solid-tet-2-wall-fluid-tet-3-solid-tet-3-fluid-tet-2-solid-tet-2", # noqa: E501 + r"wall-fluid-tet-2-solid-tet-2-wall-fluid-tet-3-solid-tet-3-fluid-tet-2-solid-tet-2", r"wall-fluid-tet-1-solid-tet-1", r"wall-fluid-sweep-fin-solid-sweep-fin.1", r"wall-fluid-sweep-fin-solid-sweep-fin", diff --git a/examples/00-fluent/lunar_lander_thermal.py b/examples/00-fluent/lunar_lander_thermal.py index 57c51debc9b..6952c8b0b68 100644 --- a/examples/00-fluent/lunar_lander_thermal.py +++ b/examples/00-fluent/lunar_lander_thermal.py @@ -61,6 +61,8 @@ # Perform required imports, including downloading the required geometry files. # The mesh has been pre-made for this problem. +# flake8: noqa: E402 + from itertools import chain import numpy as np diff --git a/examples/00-fluent/modeling_ablation.py b/examples/00-fluent/modeling_ablation.py index d8eaa952f65..00e954a0bb6 100644 --- a/examples/00-fluent/modeling_ablation.py +++ b/examples/00-fluent/modeling_ablation.py @@ -77,7 +77,7 @@ # Fluent Solution Setup # ================================================================================== -from ansys.fluent.visualization import set_config +from ansys.fluent.visualization import set_config # noqa: E402 set_config(blocking=True, set_view_on_display="isometric") diff --git a/examples/00-fluent/tyler_sofrin_modes.py b/examples/00-fluent/tyler_sofrin_modes.py index 703a92e2c8c..617c99f69dd 100644 --- a/examples/00-fluent/tyler_sofrin_modes.py +++ b/examples/00-fluent/tyler_sofrin_modes.py @@ -111,7 +111,7 @@ "axial_comp_fullWheel_DFT_23R2.cas.h5", "pyfluent/examples/Tyler-Sofrin-Modes-Compressor", save_path=save_path, -) # noqa: E501 +) examples.download_file( "axial_comp_fullWheel_DFT_23R2.dat.h5", diff --git a/src/ansys/fluent/core/codegen/builtin_settingsgen.py b/src/ansys/fluent/core/codegen/builtin_settingsgen.py index b99122fa648..42746361622 100644 --- a/src/ansys/fluent/core/codegen/builtin_settingsgen.py +++ b/src/ansys/fluent/core/codegen/builtin_settingsgen.py @@ -1,6 +1,7 @@ """Generate builtin setting classes.""" import os +from zipimport import zipimporter from ansys.fluent.core import CODEGEN_OUTDIR, FluentVersion from ansys.fluent.core.solver.flobject import CreatableNamedObjectMixin diff --git a/src/ansys/fluent/core/codegen/datamodelgen.py b/src/ansys/fluent/core/codegen/datamodelgen.py index 781be261648..215ccadbd4b 100644 --- a/src/ansys/fluent/core/codegen/datamodelgen.py +++ b/src/ansys/fluent/core/codegen/datamodelgen.py @@ -259,7 +259,7 @@ def _populate_static_info(self): print( "Information: Icing settings not generated ( R23.1+ is required )\n" ) - except: + except Exception: print( "Information: Problem accessing flserver datamodel for icing settings\n" ) @@ -361,7 +361,7 @@ def _write_static_info(self, name: str, info: Any, f: FileIO, level: int = 0): ) file.write("from typing import Any\n") file.write("\n\n") - file.write(f"class Root(PyMenu):\n") + file.write("class Root(PyMenu):\n") for k in commands: _write_command_query_stub( k, @@ -402,7 +402,7 @@ def write_static_info(self) -> None: """Write API classes to files.""" api_tree = {"": {}, "": {}} for name, info in self._static_info.items(): - if info.static_info == None: + if info.static_info is None: continue with open(info.file_name, "w", encoding="utf8") as f: f.write("#\n") diff --git a/src/ansys/fluent/core/codegen/settingsgen.py b/src/ansys/fluent/core/codegen/settingsgen.py index e96d20958e2..be46fe0397f 100644 --- a/src/ansys/fluent/core/codegen/settingsgen.py +++ b/src/ansys/fluent/core/codegen/settingsgen.py @@ -147,7 +147,7 @@ def _write_function_stub(name, data, s_stub): for arg_name in data["argument_names"]: arg_type = _arg_type_strings[data["child_classes"][arg_name]["bases"][0]] s_stub.write(f", {arg_name}: {arg_type}") - s_stub.write(f"):\n") + s_stub.write("):\n") # TODO: add return type doc = data["doc"] doc = doc.strip().replace("\n", "\n ") @@ -178,25 +178,25 @@ def _write_data(cls_name: str, python_name: str, data: dict, f: IO, f_stub: IO | s.write(f" fluent_name = {data['fluent_name']!r}\n") # _python_name preserves the original non-suffixed name of the class. s.write(f" _python_name = {python_name!r}\n") - s_stub.write(f" version: str\n") - s_stub.write(f" fluent_name: str\n") - s_stub.write(f" _python_name: str\n") + s_stub.write(" version: str\n") + s_stub.write(" fluent_name: str\n") + s_stub.write(" _python_name: str\n") child_names = data["child_names"] if child_names: s.write(f" child_names = {child_names}\n") - s_stub.write(f" child_names: list[str]\n") + s_stub.write(" child_names: list[str]\n") command_names = data["command_names"] if command_names: s.write(f" command_names = {command_names}\n") - s_stub.write(f" command_names: list[str]\n") + s_stub.write(" command_names: list[str]\n") query_names = data["query_names"] if query_names: s.write(f" query_names = {query_names}\n") - s_stub.write(f" query_names: list[str]\n") + s_stub.write(" query_names: list[str]\n") argument_names = data["argument_names"] if argument_names: s.write(f" argument_names = {argument_names}\n") - s_stub.write(f" argument_names: list[str]\n") + s_stub.write(" argument_names: list[str]\n") classes_to_write = {} # values are (class_name, data, hash, should_write_stub) if data["child_classes"]: s.write(" _child_classes = dict(\n") @@ -248,7 +248,7 @@ def _write_data(cls_name: str, python_name: str, data: dict, f: IO, f_stub: IO | return_type = data["return_type"] if return_type: s.write(f" return_type = {return_type!r}\n") - s_stub.write(f" return_type: str\n") + s_stub.write(" return_type: str\n") s.write("\n") for name, (python_name, data, hash_, should_write_stub) in classes_to_write.items(): if name not in _CLASS_WRITTEN: diff --git a/src/ansys/fluent/core/codegen/settingsgen_old.py b/src/ansys/fluent/core/codegen/settingsgen_old.py index ac80d162749..333c4c8dc6b 100644 --- a/src/ansys/fluent/core/codegen/settingsgen_old.py +++ b/src/ansys/fluent/core/codegen/settingsgen_old.py @@ -490,7 +490,7 @@ def _populate_init(parent_dir, hash): f.write("# This is an auto-generated file. DO NOT EDIT!\n") f.write("#\n") f.write("\n") - f.write(f'"""A package providing Fluent\'s Settings Objects in Python."""') + f.write('"""A package providing Fluent\'s Settings Objects in Python."""') f.write("\n") f.write("from ansys.fluent.core.solver.flobject import *\n\n") f.write(f'SHASH = "{hash}"\n') diff --git a/src/ansys/fluent/core/codegen/tuigen.py b/src/ansys/fluent/core/codegen/tuigen.py index d4a47a2c030..b6da2abeb89 100644 --- a/src/ansys/fluent/core/codegen/tuigen.py +++ b/src/ansys/fluent/core/codegen/tuigen.py @@ -62,7 +62,7 @@ def _get_tui_docdir(mode: str): "source", "api", mode, - f"tui", + "tui", ) ) diff --git a/src/ansys/fluent/core/codegen/write_settings_yaml.py b/src/ansys/fluent/core/codegen/write_settings_yaml.py index 6d06da50c3d..587323073ab 100644 --- a/src/ansys/fluent/core/codegen/write_settings_yaml.py +++ b/src/ansys/fluent/core/codegen/write_settings_yaml.py @@ -7,7 +7,6 @@ import sys import ansys.fluent.core as pyfluent -from ansys.fluent.core.services import settings indent_factor = 2 @@ -37,9 +36,9 @@ def write_yaml(out, obj, indent=0): print("Usage: write_settings_yaml.py [outfile]") else: session = pyfluent.launch_fluent(mode="solver") - settings = session.settings_service.get_static_info() + static_info = session.settings_service.get_static_info() if len(sys.argv) == 2: with open(sys.argv[1], "w") as f: - write_yaml(f, settings) + write_yaml(f, static_info) elif len(sys.argv) == 1: - write_yaml(sys.stdout, settings) + write_yaml(sys.stdout, static_info) diff --git a/src/ansys/fluent/core/data_model_cache.py b/src/ansys/fluent/core/data_model_cache.py index ff9f81a467d..79980f1daf0 100644 --- a/src/ansys/fluent/core/data_model_cache.py +++ b/src/ansys/fluent/core/data_model_cache.py @@ -31,8 +31,8 @@ class NameKey(Enum): DISPLAY = "_name_" def __invert__(self): - l = list(NameKey) - return l[~l.index(self)] + lst = list(NameKey) + return lst[~lst.index(self)] class _CacheImpl: @@ -372,7 +372,7 @@ def get_state( cached state """ name_key_in_config = self.get_config(rules, "name_key") - if name_key == None: + if name_key is None: name_key = name_key_in_config cache = self.rules_str_to_cache[rules] with self._with_lock(rules): diff --git a/src/ansys/fluent/core/filereader/casereader.py b/src/ansys/fluent/core/filereader/casereader.py index daffa6270ad..5f0d098c009 100644 --- a/src/ansys/fluent/core/filereader/casereader.py +++ b/src/ansys/fluent/core/filereader/casereader.py @@ -4,9 +4,10 @@ from ansys.fluent.core.warnings import PyFluentDeprecationWarning +from .case_file import CaseFile as CaseReader # noqa: F401 + # Compatibility aliases warnings.warn( "Use case_file.CaseFile instead of casereader.CaseReader", PyFluentDeprecationWarning, ) -from .case_file import CaseFile as CaseReader # noqa: F401 diff --git a/src/ansys/fluent/core/filereader/lispy.py b/src/ansys/fluent/core/filereader/lispy.py index 7bc82c1fd75..adbfb22bf3a 100644 --- a/src/ansys/fluent/core/filereader/lispy.py +++ b/src/ansys/fluent/core/filereader/lispy.py @@ -1,5 +1,7 @@ """Provides a module for Scheme Interpreter in Python.""" +# flake8: noqa: E266 + ################ Scheme Interpreter in Python ## (c) Peter Norvig, 2010; See http://norvig.com/lispy2.html diff --git a/src/ansys/fluent/core/journaling.py b/src/ansys/fluent/core/journaling.py index 0ab19cfa13e..6d39bbb7529 100644 --- a/src/ansys/fluent/core/journaling.py +++ b/src/ansys/fluent/core/journaling.py @@ -14,4 +14,4 @@ def start(self, file_name: str): def stop(self): """Stop writing the Fluent Python journal.""" - self.scheme_eval.exec([f"(api-stop-python-journal)"]) + self.scheme_eval.exec(["(api-stop-python-journal)"]) diff --git a/src/ansys/fluent/core/launcher/launcher_utils.py b/src/ansys/fluent/core/launcher/launcher_utils.py index db2d9800e29..d8c3568f9d6 100644 --- a/src/ansys/fluent/core/launcher/launcher_utils.py +++ b/src/ansys/fluent/core/launcher/launcher_utils.py @@ -40,7 +40,7 @@ def _get_subprocess_kwargs_for_fluent(env: Dict[str, Any], argvals) -> Dict[str, del fluent_env["PARA_MESH_NPROCS"] if not is_slurm: - if INFER_REMOTING_IP and not "REMOTING_SERVER_ADDRESS" in fluent_env: + if INFER_REMOTING_IP and "REMOTING_SERVER_ADDRESS" not in fluent_env: remoting_ip = find_remoting_ip() if remoting_ip: fluent_env["REMOTING_SERVER_ADDRESS"] = remoting_ip diff --git a/src/ansys/fluent/core/launcher/pyfluent_enums.py b/src/ansys/fluent/core/launcher/pyfluent_enums.py index f9f69523b02..cfff8b63c2b 100644 --- a/src/ansys/fluent/core/launcher/pyfluent_enums.py +++ b/src/ansys/fluent/core/launcher/pyfluent_enums.py @@ -42,7 +42,7 @@ def _missing_(cls, value: str | int | None): def is_int(): for m in cls: - return True if type(m.value) == int else False + return True if isinstance(m.value, int) else False msg = ", " if is_int() else "', '" msg = ( diff --git a/src/ansys/fluent/core/launcher/watchdog.py b/src/ansys/fluent/core/launcher/watchdog.py index e8531f4ab7e..b584b3fd42c 100644 --- a/src/ansys/fluent/core/launcher/watchdog.py +++ b/src/ansys/fluent/core/launcher/watchdog.py @@ -157,7 +157,7 @@ def launch( subprocess.Popen(cmd_send, **kwargs) - logger.info(f"Waiting for Watchdog to initialize, then proceeding...") + logger.info("Waiting for Watchdog to initialize, then proceeding...") file_exists = timeout_loop( lambda: init_file.is_file() or watchdog_err.is_file(), 30.0 ) diff --git a/src/ansys/fluent/core/post_objects/meta.py b/src/ansys/fluent/core/post_objects/meta.py index 82df12ddf98..8b5abc8d99f 100644 --- a/src/ansys/fluent/core/post_objects/meta.py +++ b/src/ansys/fluent/core/post_objects/meta.py @@ -98,7 +98,7 @@ def _execute(_self, *args, **kwargs): ) elif attr == "range": - if type(arg_value) != int and type(arg_value) != float: + if not isinstance(arg_value, (int, float)): raise RuntimeError( f"{arg} value {arg_value} is not number." ) @@ -788,15 +788,6 @@ def __init__(self, parent, object_class, api_helper, name=""): cls(self, api_helper, name), ) - @classmethod - def get_root(self, obj=None): - """Returns the top-most parent object.""" - obj = self if obj is None else obj - parent = obj - if getattr(obj, "_parent", None): - parent = self.get_root(obj._parent) - return parent - def update(self, value): """Updates this object with the provided dictionary.""" for name, val in value.items(): @@ -842,35 +833,6 @@ def session_handle(self): """Returns the session-handle object.""" return self.get_session_handle() - def get_root(self, obj=None): - """Get root object.""" - obj = self if obj is None else obj - parent = obj - if getattr(obj, "_parent", None): - parent = self.get_root(obj._parent) - return parent - - def get_session(self, obj=None): - """Get session object.""" - root = self.get_root(obj) - return root.session - - def get_path(self): - """Get parent path.""" - if getattr(self, "_parent", None): - return self._parent.get_path() + "/" + self._name - return self._name - - @property - def path(self): - """Get path.""" - return self.get_path() - - @property - def session(self): - """Get session object.""" - return self.get_session() - def __iter__(self): return iter(self.__collection) diff --git a/src/ansys/fluent/core/post_objects/post_helper.py b/src/ansys/fluent/core/post_objects/post_helper.py index ce1fe24b48e..ef3c6e2609d 100644 --- a/src/ansys/fluent/core/post_objects/post_helper.py +++ b/src/ansys/fluent/core/post_objects/post_helper.py @@ -127,7 +127,7 @@ def remote_surface_name(self, local_surface_name): # Following functions will be deprecated in future. def get_vector_fields(self): """Returns vector field.""" - scheme_eval_str = "(map car (apply append (map client-inquire-cell-vector-functions (inquire-domain-for-cell-functions))))" # noqa: E501 + scheme_eval_str = "(map car (apply append (map client-inquire-cell-vector-functions (inquire-domain-for-cell-functions))))" return self._scheme_str_to_py_list(scheme_eval_str) def get_field_unit(self, field): @@ -135,7 +135,7 @@ def get_field_unit(self, field): quantity = self._field_unit_quantity(field) if quantity == "*null*": return "" - scheme_eval_str = f"(units/get-pretty-wb-units-from-dimension (units/inquire-dimension '{quantity}))" # noqa: E501 + scheme_eval_str = f"(units/get-pretty-wb-units-from-dimension (units/inquire-dimension '{quantity}))" return " ".join(self._scheme_str_to_py_list(scheme_eval_str)) def _field_unit_quantity(self, field): diff --git a/src/ansys/fluent/core/services/batch_ops.py b/src/ansys/fluent/core/services/batch_ops.py index 85aacd9014c..e1717bc277c 100644 --- a/src/ansys/fluent/core/services/batch_ops.py +++ b/src/ansys/fluent/core/services/batch_ops.py @@ -69,7 +69,9 @@ class BatchOps: """ _proto_files: list[ModuleType] | None = None - _instance = lambda: None + + def _instance(): + return None @classmethod def instance(cls) -> _TBatchOps | None: diff --git a/src/ansys/fluent/core/services/datamodel_se.py b/src/ansys/fluent/core/services/datamodel_se.py index c036310be57..6b1520ed80a 100644 --- a/src/ansys/fluent/core/services/datamodel_se.py +++ b/src/ansys/fluent/core/services/datamodel_se.py @@ -142,8 +142,6 @@ def __call__(self, parent, names): if self._filter_fn is None: return names - filtered_children = [] - def validate_name(name): obj = getattr(parent, name) # might need to make this more flexible (e.g., enhanced workflow types) diff --git a/src/ansys/fluent/core/services/scheme_eval.py b/src/ansys/fluent/core/services/scheme_eval.py index 11ed207b9df..baafddfc324 100644 --- a/src/ansys/fluent/core/services/scheme_eval.py +++ b/src/ansys/fluent/core/services/scheme_eval.py @@ -5,9 +5,9 @@ >>> from ansys.fluent.core.services.scheme_eval import Symbol as S >>> session.scheme_eval.eval([S('+'), 2, 3]) 5 ->>> session.scheme_eval.eval([S('rpgetvar'), [S('string->symbol'), "mom/relax"]]) # noqa: E501 +>>> session.scheme_eval.eval([S('rpgetvar'), [S('string->symbol'), "mom/relax"]]) 0.7 ->>> session.scheme_eval.exec(('(ti-menu-load-string "/report/system/proc-stats")',)) # noqa: E501 +>>> session.scheme_eval.exec(('(ti-menu-load-string "/report/system/proc-stats")',)) >>> # Returns TUI output string >>> session.scheme_eval.string_eval("(+ 2 3)") '5' @@ -355,7 +355,6 @@ def is_defined(self, symbol: str) -> bool: bool True if symbol is defined, False otherwise """ - S = Symbol # noqa N806 return not self.scheme_eval( f"(lexical-unreferenceable? user-initial-environment '{symbol})" ) diff --git a/src/ansys/fluent/core/session.py b/src/ansys/fluent/core/session.py index 368a463f5a1..a35ed312c98 100644 --- a/src/ansys/fluent/core/session.py +++ b/src/ansys/fluent/core/session.py @@ -12,10 +12,6 @@ from ansys.fluent.core.services import service_creator from ansys.fluent.core.services.field_data import FieldDataService from ansys.fluent.core.services.scheme_eval import SchemeEval -from ansys.fluent.core.session_shared import ( # noqa: F401 - _CODEGEN_MSG_DATAMODEL, - _CODEGEN_MSG_TUI, -) from ansys.fluent.core.streaming_services.datamodel_event_streaming import ( DatamodelEvents, ) diff --git a/src/ansys/fluent/core/solver/flobject.py b/src/ansys/fluent/core/solver/flobject.py index 6fdf39f0ccc..451e815b6ab 100644 --- a/src/ansys/fluent/core/solver/flobject.py +++ b/src/ansys/fluent/core/solver/flobject.py @@ -48,10 +48,13 @@ import weakref from zipimport import zipimporter +import ansys.fluent.core as pyfluent from ansys.fluent.core.utils.fluent_version import FluentVersion from ansys.fluent.core.warnings import PyFluentDeprecationWarning, PyFluentUserWarning +from .error_message import allowed_name_error_message, allowed_values_error from .flunits import UnhandledQuantity, get_si_unit_for_fluent_quantity +from .settings_external import expand_api_file_argument, use_search def _ansys_units(): @@ -64,11 +67,6 @@ def _ansys_units(): pass -import ansys.fluent.core as pyfluent - -from .error_message import allowed_name_error_message, allowed_values_error -from .settings_external import expand_api_file_argument, use_search - settings_logger = logging.getLogger("pyfluent.settings_api") @@ -468,6 +466,9 @@ def max(self): return None if isinstance(val, bool) else val +QuantityT = TypeVar("QuantityT") + + class RealNumerical(Numerical): """A ``RealNumerical`` object representing a real value setting, including single real values and containers of real values, such as lists. @@ -484,7 +485,7 @@ class RealNumerical(Numerical): Get the units string. """ - def as_quantity(self) -> ansys.units.Quantity | None: + def as_quantity(self) -> QuantityT | None: """Get the state of the object as an ansys.units.Quantity.""" error = None if not _ansys_units(): @@ -1889,12 +1890,12 @@ def __iter__(self): def __len__(self): """Number of child named objects.""" - l = 0 + count = 0 for cname in self.child_names: cobj = getattr(self, cname) if isinstance(cobj, NamedObject): - l += len(cobj) - return l + count += len(cobj) + return count class CreatableNamedObjectMixin(collections.abc.MutableMapping, Generic[ChildTypeT]): diff --git a/src/ansys/fluent/core/solver/flunits.py b/src/ansys/fluent/core/solver/flunits.py index f98d78a77c5..2fce3d2bd83 100644 --- a/src/ansys/fluent/core/solver/flunits.py +++ b/src/ansys/fluent/core/solver/flunits.py @@ -103,6 +103,8 @@ def make_python_fl_unit_table(scheme_unit_table): from __future__ import annotations +from typing import TypeVar + _fl_unit_table = { "acceleration": "m s^-2", "angle": "radian", @@ -242,13 +244,16 @@ def __init__( ) +QuantityT = TypeVar("QuantityT") + + class UnhandledQuantity(RuntimeError): """Raised on an attempt to get an unhandled Quantity.""" def __init__( self, path: str, - quantity: Quantity, + quantity: QuantityT, ) -> None: """Initialize UnhandledQuantity.""" super().__init__( diff --git a/src/ansys/fluent/core/solver/function/reduction.py b/src/ansys/fluent/core/solver/function/reduction.py index 63eff9dbc0c..12107b38f3f 100644 --- a/src/ansys/fluent/core/solver/function/reduction.py +++ b/src/ansys/fluent/core/solver/function/reduction.py @@ -197,7 +197,7 @@ def _extent_expression( numerator += val * extent denominator += extent except TypeError: - if type(val) == list: + if isinstance(val, list): numerator += np.multiply(val, extent) denominator += extent else: diff --git a/src/ansys/fluent/core/streaming_services/monitor_streaming.py b/src/ansys/fluent/core/streaming_services/monitor_streaming.py index 19e75329ef8..8a3ce964b74 100644 --- a/src/ansys/fluent/core/streaming_services/monitor_streaming.py +++ b/src/ansys/fluent/core/streaming_services/monitor_streaming.py @@ -188,7 +188,6 @@ def _process_streaming(self, id, stream_begin_method, started_evt, *args, **kwar try: data_received = {} response = next(responses) - x_axis_type = response.xaxisdata.xaxistype x_axis_index = response.xaxisdata.xaxisindex data_received["xvalues"] = x_axis_index for y_axis_value in response.yaxisvalues: diff --git a/src/ansys/fluent/core/systemcoupling.py b/src/ansys/fluent/core/systemcoupling.py index cbbfd68d26a..a288fc1950a 100644 --- a/src/ansys/fluent/core/systemcoupling.py +++ b/src/ansys/fluent/core/systemcoupling.py @@ -91,7 +91,7 @@ def get_scp_string() -> str: file_name=scp_file_name ) - if self._solver._fluent_connection._remote_instance != None: + if self._solver._fluent_connection._remote_instance is not None: # download the file locally in case Fluent is remote # assume file transfer service is configured - download the file self._solver.download(scp_file_name) diff --git a/src/ansys/fluent/core/utils/__init__.py b/src/ansys/fluent/core/utils/__init__.py index 7c98f70737f..ee9f5549fea 100644 --- a/src/ansys/fluent/core/utils/__init__.py +++ b/src/ansys/fluent/core/utils/__init__.py @@ -5,10 +5,10 @@ from pathlib import Path import sys -logger = logging.getLogger("pyfluent.general") - from ansys.fluent.core.search import _search # noqa: F401 +logger = logging.getLogger("pyfluent.general") + def load_module(module_name, file_path): """Load a module from a file path.""" diff --git a/src/ansys/fluent/core/utils/execution.py b/src/ansys/fluent/core/utils/execution.py index d10e71447e3..77f3165bbfb 100644 --- a/src/ansys/fluent/core/utils/execution.py +++ b/src/ansys/fluent/core/utils/execution.py @@ -33,8 +33,8 @@ def asynchronous(f: Callable) -> Callable: >>> asynchronous_solve(solver_session_1, 100).result() >>> asynchronous(solver_session_2.tui.solve.iterate)(100).result() - .. _Future: https://docs.python.org/3/library/asyncio-future.html#future-object # noqa: E501 - .. _result(): https://docs.python.org/3/library/asyncio-future.html#asyncio.Future.result # noqa: E501 + .. _Future: https://docs.python.org/3/library/asyncio-future.html#future-object + .. _result(): https://docs.python.org/3/library/asyncio-future.html#asyncio.Future.result """ @functools.wraps(f) diff --git a/src/ansys/fluent/core/utils/file_transfer_service.py b/src/ansys/fluent/core/utils/file_transfer_service.py index 0254f2c25f2..6bf488e4fb8 100644 --- a/src/ansys/fluent/core/utils/file_transfer_service.py +++ b/src/ansys/fluent/core/utils/file_transfer_service.py @@ -4,7 +4,7 @@ import pathlib import random import shutil -from typing import Any, Callable, List, Protocol # noqa: F401 +from typing import Any, Protocol import warnings from ansys.fluent.core.utils import get_user_data_dir diff --git a/src/ansys/fluent/core/utils/fluent_version.py b/src/ansys/fluent/core/utils/fluent_version.py index 06fa35d21ca..0be9ab773a6 100644 --- a/src/ansys/fluent/core/utils/fluent_version.py +++ b/src/ansys/fluent/core/utils/fluent_version.py @@ -20,7 +20,7 @@ class ComparisonError(RuntimeError): def __init__(self): """Initialize ComparisonError.""" super().__init__( - f"Comparison operations are only supported between two members of 'FluentVersion'." + "Comparison operations are only supported between two members of 'FluentVersion'." ) diff --git a/src/ansys/fluent/core/utils/networking.py b/src/ansys/fluent/core/utils/networking.py index 003e1713808..deb4aec01c4 100644 --- a/src/ansys/fluent/core/utils/networking.py +++ b/src/ansys/fluent/core/utils/networking.py @@ -69,7 +69,7 @@ def find_remoting_ip() -> str: ip = addrinfo[-1][0] port = get_free_port() address = f"{ip}:{port}" - with _GrpcServer(address) as server: + with _GrpcServer(address): with grpc.insecure_channel(address) as channel: stub = health_pb2_grpc.HealthStub(channel) try: diff --git a/src/ansys/fluent/core/workflow.py b/src/ansys/fluent/core/workflow.py index a7703908816..dcdff54df0d 100644 --- a/src/ansys/fluent/core/workflow.py +++ b/src/ansys/fluent/core/workflow.py @@ -224,19 +224,6 @@ def get_direct_upstream_tasks(self) -> list: attr="requiredInputs", other_attr="outputs" ) - def get_direct_upstream_tasks(self) -> list: - """Get the list of tasks upstream of this one and directly connected by a data - dependency. - - Returns - ------- - list - Upstream task list. - """ - return self._tasks_with_matching_attributes( - attr="requiredInputs", other_attr="outputs" - ) - def get_direct_downstream_tasks(self) -> list: """Get the list of tasks downstream of this one and directly connected by a data dependency. @@ -988,7 +975,7 @@ def _refreshed_command(self) -> ReadOnlyObject: def _cmd_sub_items_read_only(self, cmd, cmd_state): for key, value in cmd_state.items(): - if type(value) == dict: + if isinstance(value, dict): setattr( cmd, key, self._cmd_sub_items_read_only(getattr(cmd, key), value) ) diff --git a/tests/conftest.py b/tests/conftest.py index 674c8e1978d..0409764ffe7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -144,8 +144,8 @@ def exhaust_system_geometry_filename(): def create_session(**kwargs): if pyfluent.USE_FILE_TRANSFER_SERVICE: - container_dict = {"mount_source": file_transfer_service.MOUNT_SOURCE} file_transfer_service = RemoteFileTransferStrategy() + container_dict = {"mount_source": file_transfer_service.MOUNT_SOURCE} return pyfluent.launch_fluent( container_dict=container_dict, file_transfer_service=file_transfer_service, @@ -300,3 +300,8 @@ def periodic_rot_settings_session(new_solver_session): lightweight_setup=True, ) return solver + + +@pytest.fixture +def disable_datamodel_cache(monkeypatch: pytest.MonkeyPatch): + monkeypatch.setattr(pyfluent, "DATAMODEL_USE_STATE_CACHE", False) diff --git a/tests/fluent/test_meshing_workflow/test.py b/tests/fluent/test_meshing_workflow/test.py index dec27cd492f..efa1e2b321e 100644 --- a/tests/fluent/test_meshing_workflow/test.py +++ b/tests/fluent/test_meshing_workflow/test.py @@ -1,7 +1,7 @@ from ansys.fluent.core.examples import download_file geometry_file = download_file("mixing_elbow.pmdb", "pyfluent/mixing_elbow") -watertight = meshing.watertight() +watertight = meshing.watertight() # noqa: F821 watertight.import_geometry.file_name.set_state(geometry_file) assert watertight.import_geometry.length_unit() == "mm" watertight.import_geometry.length_unit = "in" diff --git a/tests/fluent/test_settings_api/test.py b/tests/fluent/test_settings_api/test.py index f43c3fb0d44..b695111bdcd 100644 --- a/tests/fluent/test_settings_api/test.py +++ b/tests/fluent/test_settings_api/test.py @@ -1,8 +1,8 @@ from ansys.fluent.core.examples import download_file case_name = download_file("mixing_elbow.cas.h5", "pyfluent/mixing_elbow") -solver.settings.file.read_case(file_name=case_name) -viscous_settings = solver.settings.setup.models.viscous +solver.settings.file.read_case(file_name=case_name) # noqa: F821 +viscous_settings = solver.settings.setup.models.viscous # noqa: F821 assert viscous_settings.model() == "k-omega" allowed_values = viscous_settings.model.allowed_values() assert "k-epsilon" in allowed_values diff --git a/tests/fluent/test_version/test.py b/tests/fluent/test_version/test.py index c6beb5ad7fe..f2c0611dff0 100644 --- a/tests/fluent/test_version/test.py +++ b/tests/fluent/test_version/test.py @@ -1,2 +1,2 @@ -assert ansys.fluent.core.__version__ == "0.27.dev0" +assert ansys.fluent.core.__version__ == "0.27.dev0" # noqa: F821 exit() diff --git a/tests/integration/test_optislang/test_optislang_integration.py b/tests/integration/test_optislang/test_optislang_integration.py index 90bcc3f7a26..90d08e9f40f 100644 --- a/tests/integration/test_optislang/test_optislang_integration.py +++ b/tests/integration/test_optislang/test_optislang_integration.py @@ -84,7 +84,7 @@ def test_simple_solve(mixing_elbow_param_case_data_session): else: print("Solution is converged") - assert convergence == True, "Solution failed to converge" + assert convergence, "Solution failed to converge" # Step 5: Read the data again from the case and data file solver_session.settings.file.read_case_data(file_name=case_path) @@ -148,8 +148,8 @@ def test_generate_read_mesh(mixing_elbow_geometry_filename): h5_path = str(Path(temporary_resource_path) / "default_mesh.msh.h5") meshing.tui.file.write_mesh(gz_path) meshing.tui.file.write_mesh(h5_path) - assert (Path(temporary_resource_path) / "default_mesh.msh.gz").exists() == True - assert (Path(temporary_resource_path) / "default_mesh.msh.h5").exists() == True + assert (Path(temporary_resource_path) / "default_mesh.msh.gz").exists() + assert (Path(temporary_resource_path) / "default_mesh.msh.h5").exists() # Step 3: use created mesh file - .msh.gz/.msh.h5 meshing.tui.file.read_mesh(gz_path, "ok") @@ -163,8 +163,8 @@ def test_generate_read_mesh(mixing_elbow_geometry_filename): write_case = solver.settings.file.write_case write_case(file_name=gz_path) write_case(file_name=h5_path) - assert (Path(temporary_resource_path) / "default_case.cas.gz").exists() == True - assert (Path(temporary_resource_path) / "default_case.cas.h5").exists() == True + assert (Path(temporary_resource_path) / "default_case.cas.gz").exists() + assert (Path(temporary_resource_path) / "default_case.cas.h5").exists() solver.exit() shutil.rmtree(temporary_resource_path, ignore_errors=True) diff --git a/tests/test_cad_to_post_ftm.py b/tests/test_cad_to_post_ftm.py index 5ce6dd22f8d..59974d4d80f 100644 --- a/tests/test_cad_to_post_ftm.py +++ b/tests/test_cad_to_post_ftm.py @@ -32,9 +32,7 @@ def test_exhaust_system( meshing_session = fault_tolerant_workflow_session workflow = meshing_session.workflow - assign_task_args = partial( - assign_task_arguments, workflow=workflow, check_state=True - ) + _ = partial(assign_task_arguments, workflow=workflow, check_state=True) execute_task_with_pre_and_postconditions = partial( execute_task_with_pre_and_postcondition_checks, workflow=workflow diff --git a/tests/test_cad_to_post_wtm.py b/tests/test_cad_to_post_wtm.py index 2284bf83be8..0d3db8cd048 100644 --- a/tests/test_cad_to_post_wtm.py +++ b/tests/test_cad_to_post_wtm.py @@ -17,7 +17,7 @@ from functools import partial import pytest -from util.meshing_workflow import ( # noqa: F401 +from util.meshing_workflow import ( assign_task_arguments, execute_task_with_pre_and_postcondition_checks, ) diff --git a/tests/test_codegen.py b/tests/test_codegen.py index e8ad5f8a71f..65f312b3949 100644 --- a/tests/test_codegen.py +++ b/tests/test_codegen.py @@ -813,8 +813,8 @@ def test_codegen_old_with_settings_static_info(monkeypatch): "Q1": "Query", } api_tree_expected = {} - api_tree_expected[f""] = {} - api_tree_expected[f""] = settings_tree + api_tree_expected[""] = {} + api_tree_expected[""] = settings_tree assert api_tree == api_tree_expected shutil.rmtree(str(codegen_outdir)) @@ -1078,8 +1078,8 @@ def test_codegen_with_settings_static_info(monkeypatch): "Q1": "Query", } api_tree_expected = {} - api_tree_expected[f""] = {} - api_tree_expected[f""] = settings_tree + api_tree_expected[""] = {} + api_tree_expected[""] = settings_tree assert api_tree == api_tree_expected shutil.rmtree(str(codegen_outdir)) diff --git a/tests/test_data_model_cache.py b/tests/test_data_model_cache.py index 36927e222cd..7c53b54c6c5 100644 --- a/tests/test_data_model_cache.py +++ b/tests/test_data_model_cache.py @@ -429,6 +429,6 @@ def test_cache_per_session(): ): assert m1.meshing.GlobalSettings.EnableComplexMeshing() assert m2.meshing.GlobalSettings.EnableComplexMeshing() - w1 = m1.watertight() + _ = m1.watertight() assert not m1.meshing.GlobalSettings.EnableComplexMeshing() assert m2.meshing.GlobalSettings.EnableComplexMeshing() diff --git a/tests/test_datamodel_service.py b/tests/test_datamodel_service.py index c10539a6b69..074bc925d24 100644 --- a/tests/test_datamodel_service.py +++ b/tests/test_datamodel_service.py @@ -124,7 +124,7 @@ def test_add_on_deleted(new_meshing_session): meshing = new_meshing_session meshing.workflow.InitializeWorkflow(WorkflowType="Watertight Geometry") data = [] - subscription = meshing.workflow.TaskObject["Import Geometry"].add_on_deleted( + _ = meshing.workflow.TaskObject["Import Geometry"].add_on_deleted( lambda obj: data.append(convert_path_to_se_path(obj.path)) ) assert data == [] @@ -166,7 +166,7 @@ def test_add_on_affected(new_meshing_session): wt = meshing.watertight() sleep(5) assert len(data) > 0 - assert data[0] == True + assert data[0] calls = [] subscription2 = meshing.workflow.add_on_affected(lambda obj: calls.append(True)) @@ -220,7 +220,7 @@ def test_add_on_affected_at_type_path(new_meshing_session): meshing.workflow.InitializeWorkflow(WorkflowType="Watertight Geometry") sleep(5) assert len(data) > 0 - assert data[0] == True + assert data[0] data.clear() subscription.unsubscribe() meshing.workflow.InitializeWorkflow(WorkflowType="Fault-tolerant Meshing") @@ -244,7 +244,7 @@ def test_add_on_command_executed(new_meshing_session): meshing.meshing.ImportGeometry(FileName=import_file_name) sleep(5) assert len(data) > 0 - assert data[0] == True + assert data[0] data.clear() subscription.unsubscribe() meshing.meshing.ImportGeometry(FileName=import_file_name) @@ -252,11 +252,6 @@ def test_add_on_command_executed(new_meshing_session): assert data == [] -@pytest.fixture -def disable_datamodel_cache(monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr(pyfluent, "DATAMODEL_USE_STATE_CACHE", False) - - @pytest.mark.skip("https://github.com/ansys/pyfluent/issues/2999") @pytest.mark.fluent_version(">=23.2") @pytest.mark.codegen_required @@ -591,7 +586,7 @@ def test_on_child_created_lifetime(new_solver_session): root = _create_datamodel_root(solver, test_rules) root.A["A1"] = {} data = [] - h = root.A["A1"].add_on_child_created("B", lambda _: data.append(1)) + _ = root.A["A1"].add_on_child_created("B", lambda _: data.append(1)) root.A["A1"].add_on_child_created("B", lambda _: data.append(2)) gc.collect() assert "/test/created/A:A1/B" in solver._se_service.subscriptions @@ -609,7 +604,7 @@ def test_on_deleted_lifetime(new_solver_session): root = _create_datamodel_root(solver, test_rules) root.A["A1"] = {} data = [] - h = root.A["A1"].add_on_deleted(lambda _: data.append(1)) + _ = root.A["A1"].add_on_deleted(lambda _: data.append(1)) root.A["A1"].add_on_deleted(lambda _: data.append(2)) gc.collect() assert "/test/deleted/A:A1" in solver._se_service.subscriptions @@ -630,7 +625,7 @@ def test_on_changed_lifetime(new_solver_session): root = _create_datamodel_root(solver, test_rules) root.A["A1"] = {} data = [] - h = root.A["A1"].X.add_on_changed(lambda _: data.append(1)) + _ = root.A["A1"].X.add_on_changed(lambda _: data.append(1)) root.A["A1"].X.add_on_changed(lambda _: data.append(2)) gc.collect() assert "/test/modified/A:A1/X" in solver._se_service.subscriptions @@ -648,7 +643,7 @@ def test_on_affected_lifetime(new_solver_session): root = _create_datamodel_root(solver, test_rules) root.A["A1"] = {} data = [] - h = root.A["A1"].add_on_affected(lambda _: data.append(1)) + _ = root.A["A1"].add_on_affected(lambda _: data.append(1)) root.A["A1"].add_on_affected(lambda _: data.append(2)) gc.collect() assert "/test/affected/A:A1" in solver._se_service.subscriptions @@ -666,7 +661,7 @@ def test_on_affected_at_type_path_lifetime(new_solver_session): root = _create_datamodel_root(solver, test_rules) root.A["A1"] = {} data = [] - h = root.A["A1"].add_on_affected_at_type_path("B", lambda _: data.append(1)) + _ = root.A["A1"].add_on_affected_at_type_path("B", lambda _: data.append(1)) root.A["A1"].add_on_affected_at_type_path("B", lambda _: data.append(2)) gc.collect() assert "/test/affected/A:A1/B" in solver._se_service.subscriptions @@ -684,7 +679,7 @@ def test_on_command_executed_lifetime(new_solver_session): root = _create_datamodel_root(solver, test_rules) root.A["A1"] = {} data = [] - h = root.A["A1"].add_on_command_executed("C", lambda *args: data.append(1)) + _ = root.A["A1"].add_on_command_executed("C", lambda *args: data.append(1)) root.A["A1"].add_on_command_executed("C", lambda *args: data.append(2)) gc.collect() assert "/test/command_executed/A:A1/C" in solver._se_service.subscriptions @@ -702,7 +697,7 @@ def test_on_attribute_changed_lifetime(new_solver_session): root = _create_datamodel_root(solver, test_rules) root.A["A1"] = {} data = [] - h = root.A["A1"].add_on_attribute_changed("isABC", lambda _: data.append(1)) + _ = root.A["A1"].add_on_attribute_changed("isABC", lambda _: data.append(1)) root.A["A1"].add_on_attribute_changed("isABC", lambda _: data.append(2)) gc.collect() assert "/test/attribute_changed/A:A1/isABC" in solver._se_service.subscriptions @@ -722,7 +717,7 @@ def test_on_command_attribute_changed_lifetime(new_solver_session): root = _create_datamodel_root(solver, test_rules) root.A["A1"] = {} data = [] - h = root.A["A1"].add_on_command_attribute_changed( + _ = root.A["A1"].add_on_command_attribute_changed( "C", "isABC", lambda _: data.append(1) ) root.A["A1"].add_on_command_attribute_changed( @@ -757,7 +752,7 @@ def test_on_affected_lifetime_with_delete_child_objects(new_solver_session): pyfluent.logging.enable() root.A["A1"] = {} data = [] - h = root.A["A1"].add_on_affected(lambda _: data.append(1)) + _ = root.A["A1"].add_on_affected(lambda _: data.append(1)) root.A["A1"].add_on_affected(lambda _: data.append(2)) gc.collect() assert "/test/affected/A:A1" in solver._se_service.subscriptions @@ -776,7 +771,7 @@ def test_on_affected_lifetime_with_delete_all_child_objects(new_solver_session): pyfluent.logging.enable() root.A["A1"] = {} data = [] - h = root.A["A1"].add_on_affected(lambda _: data.append(1)) + _ = root.A["A1"].add_on_affected(lambda _: data.append(1)) root.A["A1"].add_on_affected(lambda _: data.append(2)) gc.collect() assert "/test/affected/A:A1" in solver._se_service.subscriptions diff --git a/tests/test_field_data.py b/tests/test_field_data.py index 3464696d2c9..0848a9a2491 100644 --- a/tests/test_field_data.py +++ b/tests/test_field_data.py @@ -347,19 +347,19 @@ def test_field_data_errors(new_solver_session) -> None: "mixing_elbow.msh.h5", "pyfluent/mixing_elbow" ) - with pytest.raises(DisallowedValuesError) as fne: + with pytest.raises(DisallowedValuesError): solver.fields.field_data.get_scalar_field_data( field_name="y-face-area", surfaces=[0] ) - with pytest.raises(DisallowedValuesError) as fne: + with pytest.raises(DisallowedValuesError): solver.fields.field_data.get_scalar_field_data( field_name="partition-neighbors", surfaces=[0] ) solver.file.read(file_type="case", file_name=import_file_name) - with pytest.raises(FieldUnavailable) as fnu: + with pytest.raises(FieldUnavailable): solver.fields.field_data.get_scalar_field_data( field_name="density", surfaces=[0] ) @@ -378,14 +378,14 @@ def test_field_data_errors(new_solver_session) -> None: solver.solution.initialization.hybrid_initialize() # Get field data object - field_data = solver.fields.field_data + assert solver.fields.field_data - with pytest.raises(DisallowedValuesError) as sne: + with pytest.raises(DisallowedValuesError): solver.fields.field_data.get_scalar_field_data( field_name="density", surfaces=["bob"] ) - with pytest.raises(DisallowedValuesError) as fne: + with pytest.raises(DisallowedValuesError): solver.fields.field_data.get_scalar_field_data( field_name="xdensity", surfaces=[0] ) @@ -403,19 +403,19 @@ def test_field_info_validators(new_solver_session) -> None: vector_field_1 = solver.fields.field_info.validate_vector_fields("velocity") assert vector_field_1 is None - with pytest.raises(DisallowedValuesError) as vector_field_error: + with pytest.raises(DisallowedValuesError): solver.fields.field_info.validate_vector_fields("relative-vel") scalar_field_1 = solver.fields.field_info.validate_scalar_fields("z-velocity") assert scalar_field_1 is None - with pytest.raises(DisallowedValuesError) as scalar_field_error: + with pytest.raises(DisallowedValuesError): solver.fields.field_info.validate_scalar_fields("z-vel") surface = solver.fields.field_info.validate_surfaces(["cold-inlet"]) assert surface is None - with pytest.raises(DisallowedValuesError) as surface_error: + with pytest.raises(DisallowedValuesError): solver.fields.field_info.validate_surfaces(["out"]) diff --git a/tests/test_file_session.py b/tests/test_file_session.py index 1f72ab3eaf8..06355793200 100644 --- a/tests/test_file_session.py +++ b/tests/test_file_session.py @@ -320,10 +320,10 @@ def test_error_handling_single_phase(): transaction_1 = field_data.new_transaction() - with pytest.raises(NotImplementedError) as msg: + with pytest.raises(NotImplementedError): transaction_1.add_pathlines_fields_request("SV_T", surfaces=[3, 5]) - with pytest.raises(NotImplementedError) as msg: + with pytest.raises(NotImplementedError): field_data.get_pathlines_field_data("SV_T", surfaces=[3, 5]) @@ -345,13 +345,11 @@ def test_error_handling_multi_phase(): field_data = file_session.fields.field_data transaction_1 = field_data.new_transaction() - with pytest.raises(InvalidMultiPhaseFieldName) as msg: + with pytest.raises(InvalidMultiPhaseFieldName): transaction_1.add_scalar_fields_request("SV_WALL_YPLUS", surfaces=[29, 30]) - with pytest.raises(InvalidMultiPhaseFieldName) as msg: - d_size = field_data.get_vector_field_data("velocity", surfaces=[34])[34].size + with pytest.raises(InvalidMultiPhaseFieldName): + field_data.get_vector_field_data("velocity", surfaces=[34])[34].size - with pytest.raises(InvalidFieldName) as msg: - d_size = field_data.get_vector_field_data("phase-1:temperature", surfaces=[34])[ - 34 - ].size + with pytest.raises(InvalidFieldName): + field_data.get_vector_field_data("phase-1:temperature", surfaces=[34])[34].size diff --git a/tests/test_flobject.py b/tests/test_flobject.py index 55293bad038..8010c88f263 100644 --- a/tests/test_flobject.py +++ b/tests/test_flobject.py @@ -230,13 +230,12 @@ def __len__(self): def size(self): return len(self._objs) - def resize(self, l): - if l > len(self._objs): - # pylint: disable=unused-variable - for i in range(len(self._objs), l): + def resize(self, new_size): + if new_size > len(self._objs): + for _ in range(len(self._objs), new_size): self._objs.append(self.child_object_type(self)) - elif l < len(self._objs): - self._objs = self._objs[:l] + elif new_size < len(self._objs): + self._objs = self._objs[:new_size] def get_child(self, c): return self._objs[int(c)] @@ -526,7 +525,7 @@ def test_attrs(): assert r.g_1.s_4.get_attr("allowed-values") == ["foo", "bar"] r.g_1.b_3 = True assert not r.g_1.s_4.get_attr("active?") - with pytest.raises(InactiveObjectError) as einfo: + with pytest.raises(InactiveObjectError): r.g_1.s_4.get_attr("allowed-values") == ["foo", "bar"] @@ -994,7 +993,7 @@ def test_strings_with_allowed_values(static_mixer_settings_session): solver = static_mixer_settings_session with pytest.raises(AttributeError) as e: - string_without_allowed_values = solver.file.auto_save.root_name.allowed_values() + solver.file.auto_save.root_name.allowed_values() assert e.value.args[0] == "'root_name' object has no attribute 'allowed_values'" string_with_allowed_values = solver.setup.general.solver.type.allowed_values() @@ -1034,7 +1033,7 @@ def test_ansys_units_integration(mixing_elbow_settings_session): hydraulic_diameter = turbulence.hydraulic_diameter hydraulic_diameter.set_state("1 [in]") assert hydraulic_diameter() == "1 [in]" - assert hydraulic_diameter.as_quantity() == None + assert hydraulic_diameter.as_quantity() is None assert hydraulic_diameter.state_with_units() == ("1 [in]", "m") assert hydraulic_diameter.units() == "m" turbulent_intensity = turbulence.turbulent_intensity diff --git a/tests/test_fluent_fixes.py b/tests/test_fluent_fixes.py index e4ab6e5f8ba..be1fe0c4232 100644 --- a/tests/test_fluent_fixes.py +++ b/tests/test_fluent_fixes.py @@ -31,7 +31,7 @@ def test_allowed_values_on_report_definitions_1364(new_solver_session): assert report_def.zone_names.allowed_values() == ["fluid"] - assert report_def.expr_list.allowed_values() == None + assert report_def.expr_list.allowed_values() is None @pytest.mark.fluent_version(">=23.2") @@ -42,7 +42,7 @@ def test_monitors_list_set_data_637_974_1744_2188(new_solver_session): file_name="exhaust_system.cas.h5", directory="pyfluent/exhaust_system" ) - import_data = examples.download_file( + examples.download_file( file_name="exhaust_system.dat.h5", directory="pyfluent/exhaust_system" ) diff --git a/tests/test_fluent_session.py b/tests/test_fluent_session.py index 1a2e3e596be..2539f1e0e21 100644 --- a/tests/test_fluent_session.py +++ b/tests/test_fluent_session.py @@ -134,7 +134,7 @@ def test_does_not_exit_fluent_by_default_when_connected_to_running_fluent( ) -> None: session1 = pyfluent.launch_fluent() - with pytest.raises(IpPortNotProvided) as msg: + with pytest.raises(IpPortNotProvided): session2 = pyfluent.connect_to_fluent( ip=session1.connection_properties.ip, password=session1.connection_properties.password, @@ -265,6 +265,6 @@ def test_fluent_exit_wait(): session3.exit(wait=True) assert session3._fluent_connection.wait_process_finished(wait=0) - with pytest.raises(WaitTypeError) as msg: + with pytest.raises(WaitTypeError): session4 = pyfluent.launch_fluent() session4.exit(wait="wait") diff --git a/tests/test_launcher.py b/tests/test_launcher.py index a4384c15000..18ac2a0862c 100644 --- a/tests/test_launcher.py +++ b/tests/test_launcher.py @@ -5,7 +5,7 @@ import pytest import ansys.fluent.core as pyfluent -from ansys.fluent.core import PyFluentDeprecationWarning # noqa: F401 +from ansys.fluent.core import PyFluentDeprecationWarning from ansys.fluent.core.examples.downloads import download_file from ansys.fluent.core.exceptions import DisallowedValuesError, InvalidArgument from ansys.fluent.core.launcher import launcher_utils @@ -35,7 +35,7 @@ def test_gpu_version_error(): - with pytest.raises(GPUSolverSupportError) as msg: + with pytest.raises(GPUSolverSupportError): pyfluent.launch_fluent( mode="meshing", dimension=2, @@ -483,6 +483,6 @@ def test_container_warning_for_mount_source(caplog): "mount_source": os.getcwd(), "mount_target": "/mnt/pyfluent/tests", } - solver = pyfluent.launch_fluent(container_dict=container_dict) + _ = pyfluent.launch_fluent(container_dict=container_dict) assert container_dict["mount_source"] in caplog.text assert container_dict["mount_target"] in caplog.text diff --git a/tests/test_launcher_remote.py b/tests/test_launcher_remote.py index f7250ca4382..7815a741bb6 100644 --- a/tests/test_launcher_remote.py +++ b/tests/test_launcher_remote.py @@ -99,7 +99,7 @@ def test_launch_remote_instance(monkeypatch, new_solver_session): ) server.start() - with pytest.raises(UnsupportedRemoteFluentInstance) as msg: + with pytest.raises(UnsupportedRemoteFluentInstance): fluent_connection = FluentConnection( ip=ip, port=port, diff --git a/tests/test_meshing_workflow.py b/tests/test_meshing_workflow.py index cfcfe9d3aeb..b02cbdb498f 100644 --- a/tests/test_meshing_workflow.py +++ b/tests/test_meshing_workflow.py @@ -1,7 +1,7 @@ from functools import partial import pytest -from util.meshing_workflow import ( # noqa: F401; model_object_throws_on_invalid_arg, +from util.meshing_workflow import ( assign_task_arguments, execute_task_with_pre_and_postcondition_checks, ) @@ -195,10 +195,10 @@ def test_read_only_behaviour_of_command_arguments(new_meshing_session): w.InitializeWorkflow(WorkflowType="Watertight Geometry") import_geom = w.TaskObject["Import Geometry"] - with pytest.raises(AttributeError) as msg: + with pytest.raises(AttributeError): import_geom.arguments.MeshUnit.set_state("in") - with pytest.raises(AttributeError) as msg: + with pytest.raises(AttributeError): import_geom.arguments.CadImportOptions.OneZonePer.set_state(None) assert "set_state" in dir(m()) @@ -212,7 +212,7 @@ def test_dummy_journal_data_model_methods(new_meshing_session): w.InitializeWorkflow(WorkflowType="Watertight Geometry") import_geom = w.TaskObject["Import Geometry"] - with pytest.raises(AttributeError) as msg: + with pytest.raises(AttributeError): import_geom.delete_child() @@ -229,7 +229,7 @@ def test_iterate_meshing_workflow_task_container(new_meshing_session): def test_nonexistent_attrs(new_meshing_session): meshing = new_meshing_session assert not hasattr(meshing.workflow, "xyz") - with pytest.raises(AttributeError) as msg: + with pytest.raises(AttributeError): meshing.workflow.xyz @@ -239,7 +239,7 @@ def test_old_workflow_structure(new_meshing_session): meshing = new_meshing_session meshing.workflow.InitializeWorkflow(WorkflowType="Watertight Geometry") assert meshing.workflow.TaskObject["Import Geometry"] - with pytest.raises(AttributeError) as msg: + with pytest.raises(AttributeError): meshing.workflow.import_geometry diff --git a/tests/test_meshingmode/test_meshing_launch.py b/tests/test_meshingmode/test_meshing_launch.py index f831444e1f8..af8f64e9d6f 100644 --- a/tests/test_meshingmode/test_meshing_launch.py +++ b/tests/test_meshingmode/test_meshing_launch.py @@ -83,7 +83,7 @@ def test_launch_pure_meshing(mixing_elbow_watertight_pure_meshing_session): def test_launch_meshing_and_switch(new_meshing_session): meshing = new_meshing_session assert not meshing.switched - solver = meshing.switch_to_solver() + _ = meshing.switch_to_solver() assert meshing.switched assert not meshing.tui assert not meshing.meshing diff --git a/tests/test_new_meshing_workflow.py b/tests/test_new_meshing_workflow.py index ba8264dd1e3..6ae93fc40cb 100644 --- a/tests/test_new_meshing_workflow.py +++ b/tests/test_new_meshing_workflow.py @@ -6,7 +6,6 @@ from ansys.fluent.core import FluentVersion, examples from ansys.fluent.core.workflow import camel_to_snake_case from tests.conftest import new_meshing_session -from tests.test_datamodel_service import disable_datamodel_cache # noqa: F401 @pytest.mark.nightly diff --git a/tests/test_preferences.py b/tests/test_preferences.py index 71533f1e224..65a1daddbb2 100644 --- a/tests/test_preferences.py +++ b/tests/test_preferences.py @@ -77,13 +77,13 @@ def test_read_only_preferences(new_solver_session): solver = new_solver_session m = solver.preferences.MeshingWorkflow m.SaveCheckpointFiles = True - assert m.SaveCheckpointFiles() == True + assert m.SaveCheckpointFiles() is True assert m.CheckpointingOption() == "Write mesh files" - assert m.CheckpointingOption.is_read_only() == True + assert m.CheckpointingOption.is_read_only() is True with pytest.raises(RuntimeError): m.CheckpointingOption = "Write into memory" m.SaveCheckpointFiles = False - assert m.SaveCheckpointFiles() == False - assert m.CheckpointingOption.is_read_only() == False + assert m.SaveCheckpointFiles() is False + assert m.CheckpointingOption.is_read_only() is False m.CheckpointingOption = "Write into memory" assert m.CheckpointingOption() == "Write into memory" diff --git a/tests/test_reduction.py b/tests/test_reduction.py index 24067fc9545..06924b8146d 100644 --- a/tests/test_reduction.py +++ b/tests/test_reduction.py @@ -1,3 +1,5 @@ +from typing import Any + import pytest from ansys.fluent.core.services.reduction import _locn_names_and_objs @@ -58,7 +60,7 @@ def _test_area_average(solver): "AreaAve(AbsolutePressure, ['inlet1'])" ) expr_val = solver_named_expressions["test_expr_1"].get_value() - assert type(expr_val) == float and expr_val != 0.0 + assert isinstance(expr_val, float) and expr_val != 0.0 val = solver.fields.reduction.area_average( expression="AbsolutePressure", locations=solver.setup.boundary_conditions.velocity_inlet, @@ -80,7 +82,7 @@ def _test_min(solver1, solver2): test_expr2.definition = "minimum(test_expr_2, ['outlet'])" # (MK) Is the expression definition valid? # expected_result = test_expr2.get_value() - result = solver1.fields.reduction.minimum( + solver1.fields.reduction.minimum( expression=test_expr1.definition(), locations=[ solver1.setup.boundary_conditions["outlet"], @@ -260,7 +262,7 @@ def _test_area_integrated_average(solver1, solver2): def _test_error_handling(solver): if int(solver._version) < 241: - with pytest.raises(RuntimeError) as msg: + with pytest.raises(RuntimeError): solver.fields.reduction.area_average( expression="AbsoluteVelocity", # This is a wrong expression intentionally passed locations=solver.setup.boundary_conditions.velocity_inlet, @@ -333,7 +335,7 @@ def _test_sum(solver): "Sum(AbsolutePressure, ['inlet1'], Weight=Area)" ) expr_val = solver.setup.named_expressions["test_expr_1"].get_value() - assert type(expr_val) == float and expr_val != 0.0 + assert isinstance(expr_val, float) and expr_val != 0.0 val = solver.fields.reduction.sum( expression="AbsolutePressure", @@ -352,7 +354,7 @@ def _test_sum_if(solver): "SumIf(AbsolutePressure, AbsolutePressure > 0[Pa], ['inlet1'], Weight=Area)" ) expr_val = solver.setup.named_expressions["test_expr_1"].get_value() - assert type(expr_val) == float and expr_val != 0.0 + assert isinstance(expr_val, float) and expr_val != 0.0 val = solver.fields.reduction.sum_if( expression="AbsolutePressure", @@ -366,13 +368,15 @@ def _test_sum_if(solver): @pytest.fixture -def static_mixer_case_session2(static_mixer_case_session): +def static_mixer_case_session2(static_mixer_case_session: Any): return static_mixer_case_session @pytest.mark.nightly @pytest.mark.fluent_version(">=23.1") -def test_reductions(static_mixer_case_session, static_mixer_case_session2) -> None: +def test_reductions( + static_mixer_case_session: Any, static_mixer_case_session2: Any +) -> None: solver1 = static_mixer_case_session solver2 = static_mixer_case_session2 _test_context(solver1) @@ -391,7 +395,7 @@ def test_reductions(static_mixer_case_session, static_mixer_case_session2) -> No @pytest.mark.fluent_version(">=24.2") -def test_reduction_does_not_modify_case(static_mixer_case_session): +def test_reduction_does_not_modify_case(static_mixer_case_session: Any): solver = static_mixer_case_session # After reading the static-mixer case in Fluent, case-modifed? flag is somehow True solver.scheme_eval.scheme_eval("(%save-case-id)") @@ -404,7 +408,7 @@ def test_reduction_does_not_modify_case(static_mixer_case_session): @pytest.mark.fluent_version(">=24.2") -def test_fix_for_invalid_location_inputs(static_mixer_case_session): +def test_fix_for_invalid_location_inputs(static_mixer_case_session: Any): solver = static_mixer_case_session solver.solution.initialization.hybrid_initialize() diff --git a/tests/test_rp_vars.py b/tests/test_rp_vars.py index a42d9b927e5..ae070859013 100644 --- a/tests/test_rp_vars.py +++ b/tests/test_rp_vars.py @@ -55,7 +55,7 @@ def test_rp_vars_allowed_values(new_solver_session) -> None: assert rp_vars("number-of-iterations") == 0 - with pytest.raises(RuntimeError) as msg: + with pytest.raises(RuntimeError): rp_vars("number-of-iterat") assert "number-of-iterations" in rp_vars.allowed_values() diff --git a/tests/test_session.py b/tests/test_session.py index 485fc8228b8..5e55e72a40c 100644 --- a/tests/test_session.py +++ b/tests/test_session.py @@ -97,7 +97,7 @@ def SchemeEval( def test_download_file(): with pytest.raises(examples.RemoteFileNotFoundError): - import_case = examples.download_file( + examples.download_file( "mixing_elbow.cas.h5", "pyfluent/examples/DOE-ML-Mixing-Elbow" ) @@ -113,7 +113,7 @@ def test_create_mock_session_by_passing_ip_port_password() -> None: ) server.start() - with pytest.raises(PortNotProvided) as msg: + with pytest.raises(PortNotProvided): fluent_connection = FluentConnection( ip=ip, password="12345", cleanup_on_exit=False ) @@ -327,16 +327,16 @@ def test_start_transcript_file_write(new_meshing_session): file_name = Path(file_name) file_name.touch() - prev_stat = file_name.stat() - prev_mtime = prev_stat.st_mtime - prev_size = prev_stat.st_size + # prev_stat = file_name.stat() + # prev_mtime = prev_stat.st_mtime + # prev_size = prev_stat.st_size session = new_meshing_session session.transcript.start(file_name) session = session.switch_to_solver() session.transcript.stop() - new_stat = file_name.stat() + # new_stat = file_name.stat() # this assertion is invalid. # assert new_stat.st_mtime > prev_mtime or new_stat.st_size > prev_size @@ -360,8 +360,8 @@ def test_read_case_using_lightweight_mode(): "mixing_elbow.cas.h5", "pyfluent/mixing_elbow" ) if pyfluent.USE_FILE_TRANSFER_SERVICE: - container_dict = {"mount_source": file_transfer_service.MOUNT_SOURCE} file_transfer_service = RemoteFileTransferStrategy() + container_dict = {"mount_source": file_transfer_service.MOUNT_SOURCE} solver = pyfluent.launch_fluent( case_file_name=import_file_name, lightweight_mode=True, @@ -380,7 +380,7 @@ def test_read_case_using_lightweight_mode(): idle_period=1, ) timeout_loop( - solver.setup.models.energy.enabled() == False, + not solver.setup.models.energy.enabled(), timeout=60, idle_period=1, ) @@ -425,7 +425,7 @@ def mock_parse_server_info_file(file_name): monkeypatch.setattr(session, "_parse_server_info_file", mock_parse_server_info_file) with pytest.raises(LaunchFluentError) as ex: - solver = pyfluent.launch_fluent() + _ = pyfluent.launch_fluent() # grpc.RpcError -> RuntimeError -> LaunchFluentError assert ex.value.__context__.__context__.code() == grpc.StatusCode.UNAVAILABLE @@ -562,7 +562,7 @@ def test_general_exception_behaviour_in_session(new_solver_session): } graphics.contour["contour-velocity"].display() - mesh_file_2d = examples.download_file( + examples.download_file( "sample_2d_mesh.msh.h5", "pyfluent/surface_mesh", return_without_path=False, diff --git a/tests/test_settings_reader.py b/tests/test_settings_reader.py index 941a494a4b0..815276cb9c5 100644 --- a/tests/test_settings_reader.py +++ b/tests/test_settings_reader.py @@ -57,7 +57,7 @@ def test_settings_reader_static_mixer_h5(): def test_meshing_unavailable(): reader = SettingsReader(settings_file_name=static_mixer_settings_file()) - with pytest.raises(AttributeError) as msg: + with pytest.raises(AttributeError): reader.get_mesh() diff --git a/tests/test_solver_monitors.py b/tests/test_solver_monitors.py index 8d2dbe735bd..a59a0e640a4 100644 --- a/tests/test_solver_monitors.py +++ b/tests/test_solver_monitors.py @@ -74,7 +74,7 @@ def monitor_callback(): # n.b. there is no checking of the callback signature at registration. Instead # we would get a TypeError at callback time if the signature is wrong. The correct # signature is undocumented. - register_id = solver.monitors.register_callback(monitor_callback) + solver.monitors.register_callback(monitor_callback) # trigger callback by running the solver assert not monitor_callback.called diff --git a/tests/test_solvermode/test_calculationactivities.py b/tests/test_solvermode/test_calculationactivities.py index 3d4066e64df..7d6f84bfd51 100644 --- a/tests/test_solvermode/test_calculationactivities.py +++ b/tests/test_solvermode/test_calculationactivities.py @@ -7,14 +7,14 @@ def test_solver_calculation(static_mixer_case_session): solver_session = static_mixer_case_session scheme_eval = solver_session.scheme_eval.scheme_eval - assert scheme_eval("(client-get-var 'residuals/plot?)") == True + assert scheme_eval("(client-get-var 'residuals/plot?)") is True # TODO: Remove the if condition after a stable version of 23.1 is available and update the commands as required. if solver_session.get_fluent_version() < FluentVersion.v231: solver_session.tui.solve.monitors.residual.plot("no") - assert scheme_eval("(client-get-var 'residuals/plot?)") == False - assert scheme_eval("(data-valid?)") == False + assert scheme_eval("(client-get-var 'residuals/plot?)") is False + assert scheme_eval("(data-valid?)") is False solver_session.solution.initialization.hybrid_initialize() - assert scheme_eval("(data-valid?)") == True + assert scheme_eval("(data-valid?)") is True # solver_session.solution.run_calculation.iterate.get_attr("arguments") # solver_session.solution.run_calculation.number_of_iterations = 5 # assert solver_session.solution.run_calculation.number_of_iterations == 5 diff --git a/tests/test_solvermode/test_controls.py b/tests/test_solvermode/test_controls.py index 90dd6fbc35e..4656f98b813 100644 --- a/tests/test_solvermode/test_controls.py +++ b/tests/test_solvermode/test_controls.py @@ -43,8 +43,8 @@ def test_controls(mixing_elbow_settings_session): ) assert param_coarsening.max_coarse_levels() == 48 assert param_coarsening.coarsen_by_interval() == 9 - assert param_coarsening.conservative_coarsening() == True - assert param_coarsening.aggressive_coarsening() == True + assert param_coarsening.conservative_coarsening() is True + assert param_coarsening.aggressive_coarsening() is True param_fixed_cycle = ( solver.solution.controls.advanced.multi_grid.amg_controls.scalar_parameters.fixed_cycle_parameters @@ -66,7 +66,7 @@ def test_controls(mixing_elbow_settings_session): solver.solution.methods.p_v_coupling.flow_scheme = "Coupled" assert solver.solution.methods.p_v_coupling.flow_scheme() == "Coupled" solver.solution.methods.p_v_coupling.coupled_form = True - assert solver.solution.methods.p_v_coupling.coupled_form() == True + assert solver.solution.methods.p_v_coupling.coupled_form() is True solver.solution.controls.advanced.multi_grid.amg_controls.scalar_parameters.smoother_type = ( "Gauss-Seidel" ) diff --git a/tests/test_solvermode/test_methods.py b/tests/test_solvermode/test_methods.py index 578a1201e47..38b22f9e5b4 100644 --- a/tests/test_solvermode/test_methods.py +++ b/tests/test_solvermode/test_methods.py @@ -57,7 +57,7 @@ def test_methods(mixing_elbow_settings_session): "first_to_second_order_blending": 1.0, } solver.solution.methods.expert.numerics_pbns.presto_pressure_scheme = True - assert solver.solution.methods.expert.numerics_pbns.presto_pressure_scheme() == True + assert solver.solution.methods.expert.numerics_pbns.presto_pressure_scheme() is True solver.solution.methods.gradient_scheme = "green-gauss-node-based" assert solver.solution.methods.gradient_scheme() == "green-gauss-node-based" solver.solution.methods.warped_face_gradient_correction( diff --git a/tests/test_solvermode/test_models.py b/tests/test_solvermode/test_models.py index 29c9d0d853f..3fe0bdc3d80 100644 --- a/tests/test_solvermode/test_models.py +++ b/tests/test_solvermode/test_models.py @@ -82,16 +82,16 @@ def test_disk_2d_models(disk_settings_session): ] k_omega_options = models.viscous.k_omega_options k_omega_options.kw_low_re_correction = True - assert k_omega_options.kw_low_re_correction() == True + assert k_omega_options.kw_low_re_correction() is True turb_options = models.viscous.options turb_options.production_kato_launder_enabled = True - assert turb_options.production_kato_launder_enabled() == True + assert turb_options.production_kato_launder_enabled() is True turb_options.production_limiter.clip_factor = 9 assert turb_options.production_limiter.clip_factor() == 9 turb_expert = models.viscous.turbulence_expert turb_expert.turb_non_newtonian = True - assert turb_expert.turb_non_newtonian() == True + assert turb_expert.turb_non_newtonian() is True models.viscous.model = "laminar" assert models.viscous.model() == "laminar" diff --git a/tests/test_topy.py b/tests/test_topy.py index 505d86a0c45..6f813c9f0e3 100644 --- a/tests/test_topy.py +++ b/tests/test_topy.py @@ -110,7 +110,7 @@ def test_2_jou(): + "_" + Path(file_name_2).stem.split(".")[0] + ".py" - ) # noqa: E501 + ) gen_file_name = os.path.join(os.getcwd(), gen_file_name) with open(gen_file_name) as file: @@ -162,7 +162,7 @@ def test_2_scm(): + "_" + Path(file_name_2).stem.split(".")[0] + ".py" - ) # noqa: E501 + ) gen_file_name = os.path.join(os.getcwd(), gen_file_name) with open(gen_file_name) as file: diff --git a/tests/test_utils.py b/tests/test_utils.py index 7cba8f4ba7d..f571375412e 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -54,7 +54,7 @@ def __call__(self): ret = timeout_loop(waiter, timeout=0.2, expected="truthy", idle_period=0.1) assert ret is False - with pytest.raises(InvalidArgument) as msg: + with pytest.raises(InvalidArgument): timeout_loop(waiter, timeout=0.2, expected=True, idle_period=0.1)