diff --git a/.ci/code_generation.py b/.ci/code_generation.py index e073929848..bcae92a3a3 100644 --- a/.ci/code_generation.py +++ b/.ci/code_generation.py @@ -2,8 +2,6 @@ from ansys.dpf import core from ansys.dpf.core.operators import build -import os -import glob from pathlib import Path import shutil diff --git a/.ci/run_examples.py b/.ci/run_examples.py index 91d9d56628..b4aeb0ec4b 100644 --- a/.ci/run_examples.py +++ b/.ci/run_examples.py @@ -1,5 +1,4 @@ import os -import glob from pathlib import Path import subprocess import sys diff --git a/.ci/run_non_regression_examples.py b/.ci/run_non_regression_examples.py index fc3dc8fedc..69aa3d3dc4 100644 --- a/.ci/run_non_regression_examples.py +++ b/.ci/run_non_regression_examples.py @@ -1,5 +1,4 @@ import os -import glob from ansys.dpf import core import pathlib import subprocess diff --git a/.ci/update_dpf_dependencies.py b/.ci/update_dpf_dependencies.py index c3426d8a9d..a4e7160d43 100644 --- a/.ci/update_dpf_dependencies.py +++ b/.ci/update_dpf_dependencies.py @@ -14,7 +14,6 @@ """ import os -import glob from pathlib import Path import platform import shutil diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fe93f07e7a..ecc086d361 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,7 +12,7 @@ exclude: > repos: - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.7.3 + rev: v0.8.1 hooks: - id: ruff args: [--fix] diff --git a/examples/09-averaging/00-compute_and_average.py b/examples/09-averaging/00-compute_and_average.py index 45491e0d45..149dedb0c7 100644 --- a/examples/09-averaging/00-compute_and_average.py +++ b/examples/09-averaging/00-compute_and_average.py @@ -98,7 +98,7 @@ def compute_von_mises_then_average(analysis): # Create a model from the results of the simulation and retrieve its mesh model = dpf.Model(analysis) - mesh = model.metadata.meshed_region + model.metadata.meshed_region # Apply the stress operator to obtain the stresses in the body stress_op = dpf.operators.result.stress() @@ -135,7 +135,7 @@ def compute_von_mises_then_average(analysis): def average_then_compute_von_mises(analysis): # Creating the model from the results of the simulation model = dpf.Model(analysis) - mesh = model.metadata.meshed_region + model.metadata.meshed_region # Retrieving the stresses stress_op = dpf.operators.result.stress() diff --git a/examples/10-mesh_operations/13-nodes_in_local_coordinate_system.py b/examples/10-mesh_operations/13-nodes_in_local_coordinate_system.py index e6007c3eae..5d1bbe396a 100644 --- a/examples/10-mesh_operations/13-nodes_in_local_coordinate_system.py +++ b/examples/10-mesh_operations/13-nodes_in_local_coordinate_system.py @@ -60,7 +60,7 @@ # Starting with DPF 2025.1.pre1 cs = dpf.operators.result.coordinate_system() cs.inputs.data_sources.connect(model) -except (KeyError, DPFServerException) as e: +except (KeyError, DPFServerException): # For previous DPF versions cs = model.operator(r"mapdl::rst::CS") diff --git a/pyproject.toml b/pyproject.toml index e0673b008f..9123204fc8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,7 +67,7 @@ indent-style = "space" select = [ # "E", # pycodestyle, see https://beta.ruff.rs/docs/rules/#pycodestyle-e-w "D", # pydocstyle, see https://beta.ruff.rs/docs/rules/#pydocstyle-d -# "F", # pyflakes, see https://beta.ruff.rs/docs/rules/#pyflakes-f + "F", # pyflakes, see https://beta.ruff.rs/docs/rules/#pyflakes-f # "I", # isort, see https://beta.ruff.rs/docs/rules/#isort-i # "N", # pep8-naming, see https://beta.ruff.rs/docs/rules/#pep8-naming-n "PTH", # flake9-use-pathlib, https://beta.ruff.rs/docs/rules/#flake8-use-pathlib-pth diff --git a/src/ansys/dpf/core/core.py b/src/ansys/dpf/core/core.py index 56564bfe92..1e224da55b 100644 --- a/src/ansys/dpf/core/core.py +++ b/src/ansys/dpf/core/core.py @@ -28,11 +28,10 @@ import weakref from pathlib import Path -from ansys.dpf.core import errors, misc +from ansys.dpf.core import errors from ansys.dpf.core import server as server_module from ansys.dpf.core.check_version import version_requires, server_meet_version from ansys.dpf.core.runtime_config import ( - RuntimeClientConfig, RuntimeCoreConfig, ) from ansys.dpf.gate import ( @@ -538,9 +537,9 @@ def release_dpf(self): Available with server's version starting at 6.0 (Ansys 2023R2). """ if self._server().has_client(): - error = self._api.data_processing_release_on_client(self._server().client, 1) + self._api.data_processing_release_on_client(self._server().client, 1) else: - error = self._api.data_processing_release(1) + self._api.data_processing_release(1) @version_requires("4.0") def get_runtime_core_config(self): @@ -674,7 +673,7 @@ def download_file(self, server_file_path, to_client_file_path): download service only available for server with gRPC communication protocol """ raise errors.ServerTypeError(txt) - client_path = self._api.data_processing_download_file( + self._api.data_processing_download_file( client=self._server().client, server_file_path=str(server_file_path), to_client_file_path=str(to_client_file_path), diff --git a/src/ansys/dpf/core/dpf_operator.py b/src/ansys/dpf/core/dpf_operator.py index 62578d3d0d..dec2542e49 100644 --- a/src/ansys/dpf/core/dpf_operator.py +++ b/src/ansys/dpf/core/dpf_operator.py @@ -1010,6 +1010,6 @@ def _write_output_type_to_type(output_type): if isinstance(output_type, types): try: return types_enum_to_types()[output_type] - except KeyError as e: + except KeyError: raise TypeError(f"{output_type} is not an implemented Operator's output") return output_type diff --git a/src/ansys/dpf/core/examples/__init__.py b/src/ansys/dpf/core/examples/__init__.py index 8ac8884584..15c628d482 100644 --- a/src/ansys/dpf/core/examples/__init__.py +++ b/src/ansys/dpf/core/examples/__init__.py @@ -21,8 +21,52 @@ # SOFTWARE. """Provide utility functions for downloading and locating DPF example files.""" -from .examples import * -from .downloads import * +from .examples import get_example_required_minimum_dpf_version, find_files, fluid_axial_model +from .downloads import ( + delete_downloads, + download_transient_result, + download_all_kinds_of_complexity, + download_all_kinds_of_complexity_modal, + download_pontoon, + download_multi_harmonic_result, + download_multi_stage_cyclic_result, + download_sub_file, + download_msup_files_to_dict, + download_average_filter_plugin, + download_distributed_files, + download_fluent_multi_species, + download_fluent_multi_phase, + download_extrapolation_3d_result, + download_extrapolation_2d_result, + download_easy_statistics, + download_gltf_plugin, + download_hemisphere, + download_example_asme_result, + download_crankshaft, + download_piston_rod, + download_d3plot_beam, + download_binout_matsum, + download_binout_glstat, + download_cycles_to_failure, + download_modal_frame, + download_harmonic_clamped_pipe, + download_modal_cyclic, + download_fluent_axial_comp, + download_fluent_mixing_elbow_steady_state, + download_fluent_mixing_elbow_transient, + download_cfx_heating_coil, + download_cfx_mixing_elbow, + find_simple_bar, + find_static_rst, + find_complex_rst, + find_multishells_rst, + find_electric_therm, + find_steady_therm, + find_transient_therm, + find_msup_transient, + find_simple_cyclic, + find_distributed_msup_folder, +) # called if module. fails @@ -68,3 +112,54 @@ def __getattr__(name): distributed_msup_folder = find_distributed_msup_folder() return distributed_msup_folder raise AttributeError(f"module {__name__!r} has no attribute {name!r}") + + +__all__ = [ + "download_all_kinds_of_complexity", + "download_all_kinds_of_complexity_modal", + "get_example_required_minimum_dpf_version", + "find_files", + "fluid_axial_model", + "download_all_kinds_of_complexity", + "download_modal_frame", + "download_transient_result", + "download_multi_stage_cyclic_result", + "download_fluent_mixing_elbow_steady_state", + "download_fluent_multi_species", + "download_harmonic_clamped_pipe", + "download_binout_glstat", + "download_fluent_axial_comp", + "download_d3plot_beam", + "download_multi_harmonic_result", + "find_simple_bar", + "find_static_rst", + "find_complex_rst", + "find_multishells_rst", + "find_electric_therm", + "find_steady_therm", + "find_transient_therm", + "find_msup_transient", + "find_simple_cyclic", + "find_distributed_msup_folder", + "download_average_filter_plugin", + "delete_downloads", + "download_cfx_mixing_elbow", + "download_cfx_heating_coil", + "download_modal_cyclic", + "download_crankshaft", + "download_example_asme_result", + "download_piston_rod", + "download_fluent_mixing_elbow_transient", + "download_easy_statistics", + "download_gltf_plugin", + "download_fluent_multi_phase", + "download_pontoon", + "download_binout_matsum", + "download_cycles_to_failure", + "download_distributed_files", + "download_hemisphere", + "download_sub_file", + "download_extrapolation_3d_result", + "download_extrapolation_2d_result", + "download_msup_files_to_dict", +] diff --git a/src/ansys/dpf/core/geometry.py b/src/ansys/dpf/core/geometry.py index 073a3a6a8f..5b87177ed7 100644 --- a/src/ansys/dpf/core/geometry.py +++ b/src/ansys/dpf/core/geometry.py @@ -92,7 +92,7 @@ def __str__(self): """Print Points information.""" txt = "DPF Points object:\n" txt += f"Number of points: {self.n_points}\n" - txt += f"Coordinates:\n" + txt += "Coordinates:\n" for point in self._coordinates.data: txt += f" {point}\n" return txt diff --git a/src/ansys/dpf/core/help.py b/src/ansys/dpf/core/help.py index f47bea9d3e..eba000b6dc 100644 --- a/src/ansys/dpf/core/help.py +++ b/src/ansys/dpf/core/help.py @@ -123,7 +123,7 @@ def _sum_oper(oper): if oper.physics_name: field._name = f"Sum of {field.physics_name}" else: - field._name = f"Sum" + field._name = "Sum" field._unit = field._unit return field diff --git a/src/ansys/dpf/core/inputs.py b/src/ansys/dpf/core/inputs.py index cde64a9a78..dbdfbb1bda 100644 --- a/src/ansys/dpf/core/inputs.py +++ b/src/ansys/dpf/core/inputs.py @@ -240,7 +240,6 @@ def connect(self, inpt): elif isinstance(inpt, Path): inpt = str(inpt) - input_type_name = type(inpt).__name__ for input_pin in self._inputs: self._operator()._find_outputs_corresponding_pins( input_pin._python_expected_types, diff --git a/src/ansys/dpf/core/misc.py b/src/ansys/dpf/core/misc.py index 13ccee9d62..0bf46ce74c 100644 --- a/src/ansys/dpf/core/misc.py +++ b/src/ansys/dpf/core/misc.py @@ -23,7 +23,6 @@ """Miscellaneous functions for the DPF module.""" import platform -import glob import os import re from pathlib import Path diff --git a/src/ansys/dpf/core/nodes.py b/src/ansys/dpf/core/nodes.py index 241c142571..cdf2502d7d 100644 --- a/src/ansys/dpf/core/nodes.py +++ b/src/ansys/dpf/core/nodes.py @@ -25,7 +25,6 @@ import numpy as np from ansys.dpf.core.common import nodal_properties, locations from ansys.dpf.core.check_version import version_requires -from ansys.dpf.core.check_version import version_requires class Node: diff --git a/src/ansys/dpf/core/path_utilities.py b/src/ansys/dpf/core/path_utilities.py index 844afd71fe..a290a304a1 100644 --- a/src/ansys/dpf/core/path_utilities.py +++ b/src/ansys/dpf/core/path_utilities.py @@ -27,8 +27,6 @@ server into account to create path. """ -import os - import ansys.dpf.core.server_types from ansys.dpf.core import server as server_module from pathlib import Path diff --git a/src/ansys/dpf/core/plotter.py b/src/ansys/dpf/core/plotter.py index 9c957c4198..d41c9e6b53 100644 --- a/src/ansys/dpf/core/plotter.py +++ b/src/ansys/dpf/core/plotter.py @@ -31,7 +31,6 @@ from __future__ import annotations import tempfile -import os import sys import numpy as np import warnings diff --git a/src/ansys/dpf/core/result_info.py b/src/ansys/dpf/core/result_info.py index 14f5a7f4b8..29082f14ed 100644 --- a/src/ansys/dpf/core/result_info.py +++ b/src/ansys/dpf/core/result_info.py @@ -599,7 +599,7 @@ def __len__(self): """ try: return self.n_results - except Exception as e: + except Exception: return 0 def __iter__(self): diff --git a/src/ansys/dpf/core/server.py b/src/ansys/dpf/core/server.py index 81888c4435..6a7c8172b6 100644 --- a/src/ansys/dpf/core/server.py +++ b/src/ansys/dpf/core/server.py @@ -40,7 +40,7 @@ from ansys import dpf -from ansys.dpf.core.misc import is_ubuntu, get_ansys_path +from ansys.dpf.core.misc import get_ansys_path from ansys.dpf.core import errors from ansys.dpf.core.server_factory import ( diff --git a/src/ansys/dpf/core/server_factory.py b/src/ansys/dpf/core/server_factory.py index 6268767bf4..84047439fd 100644 --- a/src/ansys/dpf/core/server_factory.py +++ b/src/ansys/dpf/core/server_factory.py @@ -34,7 +34,6 @@ import io from ansys.dpf.gate.load_api import ( - _get_path_in_install, _find_outdated_ansys_version, ) @@ -317,7 +316,7 @@ def __str__(self): """ text = f"Server configuration: protocol={self.protocol}" if self.legacy: - text += f" (legacy gRPC)" + text += " (legacy gRPC)" return text def __eq__(self, other: "ServerConfig"): @@ -398,9 +397,9 @@ def get_default_server_config( config = AvailableServerConfigs.LegacyGrpcServer else: raise NotImplementedError( - f"DPF_SERVER_TYPE environment variable must " - f"be set to one of the following: INPROCESS, " - f"GRPC, LEGACYGRPC." + "DPF_SERVER_TYPE environment variable must " + "be set to one of the following: INPROCESS, " + "GRPC, LEGACYGRPC." ) elif config is None and docker_config.use_docker: config = get_default_remote_server_config() diff --git a/src/ansys/dpf/core/server_types.py b/src/ansys/dpf/core/server_types.py index b032999a56..77751a7f6b 100644 --- a/src/ansys/dpf/core/server_types.py +++ b/src/ansys/dpf/core/server_types.py @@ -72,7 +72,7 @@ def _get_dll_path(name, ansys_path=None): ANSYS_INSTALL = Path(core.misc.get_ansys_path(ansys_path)) api_path = load_api._get_path_in_install() if api_path is None: - raise ImportError(f"Could not find API path in install.") + raise ImportError("Could not find API path in install.") SUB_FOLDERS = ANSYS_INSTALL / api_path if ISPOSIX: name = "lib" + name diff --git a/src/ansys/dpf/core/unit_system.py b/src/ansys/dpf/core/unit_system.py index f6f318ff65..bd18757ffd 100644 --- a/src/ansys/dpf/core/unit_system.py +++ b/src/ansys/dpf/core/unit_system.py @@ -152,5 +152,5 @@ class unit_systems: solver_bft = UnitSystem("solver_bft", ID=7) solver_bin = UnitSystem("solver_bin", ID=8) undefined = UnitSystem("undefined", ID=-1) - except dpf_errors.DpfVersionNotSupported as e: # pragma: no cover + except dpf_errors.DpfVersionNotSupported: # pragma: no cover pass diff --git a/src/ansys/dpf/core/workflow_topology/__init__.py b/src/ansys/dpf/core/workflow_topology/__init__.py index 013884911d..1188512e33 100644 --- a/src/ansys/dpf/core/workflow_topology/__init__.py +++ b/src/ansys/dpf/core/workflow_topology/__init__.py @@ -25,3 +25,5 @@ from .operator_connection import OperatorConnection from .data_connection import DataConnection from .exposed_pin import ExposedPin + +__all__ = [WorkflowTopology, OperatorConnection, DataConnection, ExposedPin] diff --git a/tests/test_animation.py b/tests/test_animation.py index a5662ced49..993a6bbced 100644 --- a/tests/test_animation.py +++ b/tests/test_animation.py @@ -20,7 +20,6 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import os from pathlib import Path import pytest diff --git a/tests/test_animator.py b/tests/test_animator.py index 4986546ab6..84cd52ccc3 100644 --- a/tests/test_animator.py +++ b/tests/test_animator.py @@ -20,7 +20,6 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import os from pathlib import Path import pytest diff --git a/tests/test_codegeneration.py b/tests/test_codegeneration.py index 8c90344f8a..30001414e5 100644 --- a/tests/test_codegeneration.py +++ b/tests/test_codegeneration.py @@ -21,7 +21,6 @@ # SOFTWARE. # -*- coding: utf-8 -*- -import os import copy import tempfile from pathlib import Path diff --git a/tests/test_cyclic_support.py b/tests/test_cyclic_support.py index 31ca521870..f9844edd2a 100644 --- a/tests/test_cyclic_support.py +++ b/tests/test_cyclic_support.py @@ -27,7 +27,6 @@ import conftest import pytest -from ansys import dpf from ansys.dpf import core as dpf @@ -105,7 +104,7 @@ def test_cyc_support_from_to_operator(cyclic_lin_rst, server_type): cyclic_support=cyc_support, server=server_type ) exp = op.outputs.cyclic_support() - mesh = op.outputs.meshed_region() + op.outputs.meshed_region() assert exp.num_sectors() == 15 assert exp.num_stages == 1 assert np.allclose( @@ -145,7 +144,7 @@ def test_cyc_support_from_to_workflow(cyclic_lin_rst, server_type): wf.set_output_name("sup", op.outputs.cyclic_support) wf.connect("sup", cyc_support) exp = wf.get_output("sup", dpf.types.cyclic_support) - mesh = op.outputs.meshed_region() + op.outputs.meshed_region() assert exp.num_sectors() == 15 assert exp.num_stages == 1 assert np.allclose( @@ -248,14 +247,14 @@ def test_delete_auto_cyc_support(cyclic_lin_rst): def test_cyc_support_memory_leaks(cyclic_lin_rst): import gc - for i in range(2000): + for _ in range(2000): gc.collect() data_sources = dpf.DataSources(cyclic_lin_rst) model = dpf.Model(data_sources) result_info = model.metadata.result_info cyc_support = result_info.cyclic_support - a = cyc_support.num_stages - b = cyc_support.num_sectors() - c = cyc_support.sectors_set_for_expansion() - d = cyc_support.base_elements_scoping() - e = cyc_support.base_nodes_scoping() + cyc_support.num_stages + cyc_support.num_sectors() + cyc_support.sectors_set_for_expansion() + cyc_support.base_elements_scoping() + cyc_support.base_nodes_scoping() diff --git a/tests/test_data_tree.py b/tests/test_data_tree.py index b3226ca153..4aacef52d4 100644 --- a/tests/test_data_tree.py +++ b/tests/test_data_tree.py @@ -21,7 +21,6 @@ # SOFTWARE. from ansys.dpf import core as dpf -import os import pytest import conftest from pathlib import Path @@ -94,7 +93,7 @@ def test_add_dict_data_tree(server_type): @conftest.raises_for_servers_version_under("4.0") def test_add_data_to_fill_data_tree(): data_tree = dpf.DataTree() - with data_tree.to_fill() as to_fill: + with data_tree.to_fill() as _: data_tree.int = 1 data_tree.double = 1.0 data_tree.string = "hello" @@ -411,7 +410,7 @@ def test_attribute_errors_data_tree(server_type): @conftest.raises_for_servers_version_under("4.0") def test_add_data_bool_data_tree(): data_tree = dpf.DataTree() - with data_tree.to_fill() as to_fill: + with data_tree.to_fill() as _: data_tree.int = 1 data_tree.bool = True assert data_tree.get_as("int", dpf.types.int) == 1 diff --git a/tests/test_datasources.py b/tests/test_datasources.py index 1c06c110d5..71a9add114 100644 --- a/tests/test_datasources.py +++ b/tests/test_datasources.py @@ -130,10 +130,10 @@ def test_print_data_sources(allkindofcomplexity, server_type): def test_data_sources_from_data_sources(allkindofcomplexity, server_type): with pytest.raises(ValueError) as e: - data_sources_false = dpf.core.DataSources(data_sources="Wrong Input", server=server_type) + dpf.core.DataSources(data_sources="Wrong Input", server=server_type) assert "gRPC data sources" in e data_sources = dpf.core.DataSources(server=server_type) - data_sources2 = dpf.core.DataSources(data_sources=data_sources, server=server_type) + dpf.core.DataSources(data_sources=data_sources, server=server_type) @pytest.mark.skipif( diff --git a/tests/test_field.py b/tests/test_field.py index 9fd6d3544a..c19a5a8e3e 100644 --- a/tests/test_field.py +++ b/tests/test_field.py @@ -976,31 +976,6 @@ def get_simple_field(server_clayer): return field -@conftest.raises_for_servers_version_under("4.0") -def test_mutable_entity_data_contiguous_field(server_clayer): - simple_field = get_simple_field(server_clayer) - vec = simple_field.get_entity_data(0) - assert np.allclose(vec, np.array(range(0, 6))) - - vec[0][0] = 1 - vec[0][5] = 4 - - assert np.allclose(vec, np.array([1, 1, 2, 3, 4, 4])) - - vec.commit() - - assert np.allclose(simple_field.get_entity_data(0), np.array([1, 1, 2, 3, 4, 4])) - - vec = simple_field.get_entity_data_by_id(2) - assert np.allclose(vec, np.array(range(6, 12))) - - vec[0][0] = 1 - vec[0][5] = 4 - assert np.allclose(vec, np.array([1, 7, 8, 9, 10, 4])) - vec = None - assert np.allclose(simple_field.get_entity_data_by_id(2), np.array([1, 7, 8, 9, 10, 4])) - - @pytest.mark.skipif( not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_5_0, reason="change in memory ownership in server 5.0", diff --git a/tests/test_generic_data_container.py b/tests/test_generic_data_container.py index d9de81feeb..5c28814bcf 100644 --- a/tests/test_generic_data_container.py +++ b/tests/test_generic_data_container.py @@ -24,7 +24,6 @@ from ansys.dpf import core as dpf from conftest import ( - SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0, SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0, raises_for_servers_version_under, ) diff --git a/tests/test_local_server.py b/tests/test_local_server.py index 2acb6741a9..73ba24b05f 100644 --- a/tests/test_local_server.py +++ b/tests/test_local_server.py @@ -227,113 +227,3 @@ def test_dot_operator_server_fields_container(local_server): out = add.outputs.fields_container() assert np.allclose(out[0].scoping.ids, [1, 2]) assert np.allclose(out[0].data, -field.data) - - -def test_add_operator_server_fields_container(local_server): - field = dpf.core.fields_factory.create_3d_vector_field(2, server=local_server) - field.data = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0] - field.scoping.ids = [1, 2] - - fc = dpf.core.fields_container_factory.over_time_freq_fields_container( - [field, field], server=local_server - ) - - # operator with field out - forward = ops.utility.forward_field(field, server=local_server) - add = fc + forward - assert type(add) == ops.math.add_fc - out = add.outputs.fields_container() - assert len(out) == 2 - assert np.allclose(out[0].scoping.ids, [1, 2]) - assert np.allclose(out[0].data, np.array(field.data) * 2.0) - - # fc + list - add = fc + [0.0, 1.0, 2.0] - assert type(add) == ops.math.add_fc - out = add.outputs.fields_container() - assert len(out) == 2 - assert np.allclose(out[0].scoping.ids, [1, 2]) - assert np.allclose(out[0].data, field.data + np.array([[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]])) - - # fc + float - add = fc + 1.0 - assert type(add) == ops.math.add_fc - out = add.outputs.fields_container() - assert np.allclose(out[0].scoping.ids, [1, 2]) - assert np.allclose(out[0].data, np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])) - - -def test_minus_operator_server_fields_container(local_server): - field = dpf.core.fields_factory.create_3d_vector_field(2, server=local_server) - field.data = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0] - field.scoping.ids = [1, 2] - - fc = dpf.core.fields_container_factory.over_time_freq_fields_container( - [field, field], server=local_server - ) - - # operator with field out - forward = ops.utility.forward_field(field, server=local_server) - add = fc - forward - assert type(add) == ops.math.minus_fc - out = add.outputs.fields_container() - assert len(out) == 2 - assert np.allclose(out[0].scoping.ids, [1, 2]) - assert np.allclose(out[0].data, np.zeros((2, 3))) - - # fc - list - add = fc - [0.0, 1.0, 2.0] - assert type(add) == ops.math.minus_fc - out = add.outputs.fields_container() - assert len(out) == 2 - assert np.allclose(out[0].scoping.ids, [1, 2]) - assert np.allclose(out[0].data, np.array([[0.0, 0.0, 0.0], [3.0, 3.0, 3.0]])) - - # fc - float - add = fc - 1.0 - assert type(add) == ops.math.minus_fc - out = add.outputs.fields_container() - assert np.allclose(out[0].scoping.ids, [1, 2]) - assert np.allclose(out[0].data, np.array([[-1.0, 0.0, 1.0], [2.0, 3.0, 4.0]])) - - -def test_dot_operator_server_fields_container(local_server): - field = dpf.core.fields_factory.create_3d_vector_field(2, server=local_server) - field.data = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0] - field.scoping.ids = [1, 2] - - fc = dpf.core.fields_container_factory.over_time_freq_fields_container( - [field, field], server=local_server - ) - - # fc * op - forward = ops.utility.forward_field(field, server=local_server) - add = fc * forward - assert type(add) == ops.math.generalized_inner_product_fc - out = add.outputs.fields_container() - assert len(out) == 2 - assert np.allclose(out[0].scoping.ids, [1, 2]) - assert np.allclose(out[0].data, np.array([5.0, 50.0])) - - # fc * field - add = fc * field - assert type(add) == ops.math.generalized_inner_product_fc - out = add.outputs.fields_container() - assert len(out) == 2 - assert np.allclose(out[0].scoping.ids, [1, 2]) - assert np.allclose(out[0].data, np.array([5.0, 50.0])) - - # fc * list - add = fc * [0.0, 1.0, 2.0] - assert type(add) == ops.math.generalized_inner_product_fc - out = add.outputs.fields_container() - assert len(out) == 2 - assert np.allclose(out[0].scoping.ids, [1, 2]) - assert np.allclose(out[0].data, np.array([5.0, 14.0])) - - # fc * float - add = fc * -1.0 - assert type(add) == ops.math.generalized_inner_product_fc - out = add.outputs.fields_container() - assert np.allclose(out[0].scoping.ids, [1, 2]) - assert np.allclose(out[0].data, -field.data) diff --git a/tests/test_model.py b/tests/test_model.py index 54567769ce..ebfc4d5ea7 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -234,7 +234,7 @@ def test_result_not_dynamic(plate_msup): assert len(fc) == 2 assert np.allclose(fc.time_freq_support.time_frequencies.data, np.array([0.115, 0.125])) assert fc[0].unit == "Pa" - dis = model.results.displacement().eval() + model.results.displacement().eval() dpf.core.settings.set_dynamic_available_results_capability(True) diff --git a/tests/test_multi_server.py b/tests/test_multi_server.py index 53e5ee21e0..3bf67bef59 100644 --- a/tests/test_multi_server.py +++ b/tests/test_multi_server.py @@ -192,7 +192,7 @@ def test_model_cyc_support_multi_server(cyc_models): def test_model_displacement_multi_server(transient_models): - tf = transient_models[0].metadata.time_freq_support + transient_models[0].metadata.time_freq_support time_scoping = range(1, 3) disp = transient_models[0].results.displacement() disp.inputs.time_scoping(time_scoping) @@ -226,7 +226,7 @@ def check_fc(fc, fc2): def test_model_stress_multi_server(transient_models): - tf = transient_models[0].metadata.time_freq_support + transient_models[0].metadata.time_freq_support time_scoping = range(1, 3) disp = transient_models[0].results.stress() disp.inputs.time_scoping(time_scoping) diff --git a/tests/test_operator.py b/tests/test_operator.py index 380c5edbd7..309fc4432b 100644 --- a/tests/test_operator.py +++ b/tests/test_operator.py @@ -23,7 +23,6 @@ import gc import os import shutil -import types import weakref from pathlib import Path @@ -578,7 +577,7 @@ def test_inputs_outputs_meshes_container(allkindofcomplexity): opsc = dpf.core.Operator("scoping::by_property") opsc.inputs.mesh.connect(model.metadata.meshed_region) - sc = opsc.outputs.mesh_scoping() + opsc.outputs.mesh_scoping() stress = model.results.stress() stress.inputs.connect(op.outputs) @@ -843,17 +842,6 @@ def test_connect_result2(plate_msup, server_type): assert len(out) == len(out2) -@pytest.mark.skipif( - not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_3_0, - reason="Bug in server version lower than 3.0", -) -def test_connect_get_output_int_list_operator(server_type): - d = list(range(0, 1000000)) - op = dpf.core.operators.utility.forward(d, server=server_type) - d_out = op.get_output(0, dpf.core.types.vec_int) - assert np.allclose(d, d_out) - - @pytest.mark.skipif( not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_3_0, reason="Bug in server version lower than 3.0", @@ -1171,7 +1159,7 @@ def test_get_static_spec_operator_in_proc(server_clayer): spec = dpf.core.Operator.operator_specification(name, server=server_clayer) assert len(spec.operator_name) > 0 l = len(spec.inputs) - d = spec.description + spec.description @conftest.raises_for_servers_version_under("3.0") @@ -1206,40 +1194,6 @@ def test_with_progress_operator_in_proc(allkindofcomplexity, server_clayer): assert len(fc) == 2 -@conftest.raises_for_servers_version_under("3.0") -def test_list_operators(server_type_legacy_grpc): - l = dpf.core.dpf_operator.available_operator_names(server=server_type_legacy_grpc) - assert len(l) > 400 - assert "merge::result_info" in l - assert "unit_convert" in l - assert "stream_provider" in l - - -@conftest.raises_for_servers_version_under("3.0") -def test_get_static_spec_operator(server_type_legacy_grpc): - l = dpf.core.dpf_operator.available_operator_names(server=server_type_legacy_grpc) - for i, name in enumerate(l): - spec = dpf.core.Operator.operator_specification(name, server=server_type_legacy_grpc) - assert len(spec.operator_name) > 0 - assert len(spec.inputs) > 0 - assert len(spec.description) > 0 - - -@conftest.raises_for_servers_version_under("3.0") -def test_with_progress_operator(allkindofcomplexity, server_type): - model = dpf.core.Model(allkindofcomplexity, server=server_type) - op = model.results.stress() - op.inputs.read_cyclic(3) - opnorm = dpf.core.operators.averaging.to_nodal_fc(op, server=server_type) - add = dpf.core.operators.math.add_fc(opnorm, opnorm, server=server_type) - add2 = dpf.core.operators.math.add_fc(add, add, server=server_type) - add3 = dpf.core.operators.math.add_fc(add2, server=server_type) - add4 = dpf.core.operators.math.add_fc(add3, add3, server=server_type) - add4.progress_bar = True - fc = add4.outputs.fields_container() - assert len(fc) == 2 - - def test_operator_specification_simple(server_type): spec = Specification(operator_name="U", server=server_type) assert "displacement" in spec.description @@ -1323,7 +1277,7 @@ def test_delete_operator(server_type): def test_memory_outputs_operator(allkindofcomplexity): model = dpf.core.Model(allkindofcomplexity) - mesh = model.metadata.meshed_region + model.metadata.meshed_region stress_fc = model.results.stress().eqv().eval() assert len(stress_fc) == 2 diff --git a/tests/test_plotter.py b/tests/test_plotter.py index 8129a87744..587bab869a 100644 --- a/tests/test_plotter.py +++ b/tests/test_plotter.py @@ -127,7 +127,7 @@ def test_plotter_on_fields_container_elemental(allkindofcomplexity): avg_op.inputs.fields_container.connect(stress.outputs.fields_container) fc = avg_op.outputs.fields_container() pl = Plotter(model.metadata.meshed_region) - cpos = pl.plot_contour(fc) + pl.plot_contour(fc) @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") @@ -139,7 +139,7 @@ def test_plotter_on_fields_container_nodal(allkindofcomplexity): avg_op.inputs.fields_container.connect(stress.outputs.fields_container) fc = avg_op.outputs.fields_container() pl = Plotter(model.metadata.meshed_region) - cpos = pl.plot_contour(fc) + pl.plot_contour(fc) @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") @@ -169,7 +169,7 @@ def test_fields_container_plot(allkindofcomplexity): @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") def test_field_elemental_plot(allkindofcomplexity): model = Model(allkindofcomplexity) - mesh = model.metadata.meshed_region + model.metadata.meshed_region stress = model.results.stress() stress.inputs.requested_location.connect("Elemental") avg_op = Operator("to_elemental_fc") @@ -182,7 +182,7 @@ def test_field_elemental_plot(allkindofcomplexity): @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") def test_field_nodal_plot(allkindofcomplexity): model = Model(allkindofcomplexity) - mesh = model.metadata.meshed_region + model.metadata.meshed_region stress = model.results.stress() stress.inputs.requested_location.connect("Elemental") avg_op = Operator("to_nodal_fc") @@ -200,7 +200,7 @@ def test_field_nodal_plot(allkindofcomplexity): @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") def test_field_solid_plot(allkindofcomplexity): model = Model(allkindofcomplexity) - mesh = model.metadata.meshed_region + model.metadata.meshed_region stress = model.results.stress() stress.inputs.requested_location.connect("Nodal") fc = stress.outputs.fields_container() @@ -211,7 +211,7 @@ def test_field_solid_plot(allkindofcomplexity): @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") def test_field_shell_plot(allkindofcomplexity): model = Model(allkindofcomplexity) - mesh = model.metadata.meshed_region + model.metadata.meshed_region stress = model.results.stress() stress.inputs.requested_location.connect("Nodal") fc = stress.outputs.fields_container() @@ -222,7 +222,7 @@ def test_field_shell_plot(allkindofcomplexity): @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") def test_field_solid_plot_scoping_nodal(multishells): model = core.Model(multishells) - mesh = model.metadata.meshed_region + model.metadata.meshed_region stress = model.results.stress() stress.inputs.requested_location.connect("Nodal") scoping = core.Scoping() @@ -240,7 +240,7 @@ def test_field_solid_plot_scoping_nodal(multishells): @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") def test_field_shell_plot_scoping_elemental(multishells): model = core.Model(multishells) - mesh = model.metadata.meshed_region + model.metadata.meshed_region stress = model.results.stress() scoping = core.Scoping() scoping.location = "Elemental" diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 7ba5bbadec..d80edadb0d 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -125,7 +125,7 @@ def test_vtk(server_type, tmpdir): op.inputs.fields1.connect(u) op.inputs.mesh.connect(model.metadata.mesh_provider) op.inputs.directory.connect(str(Path(rst_file).parent)) - out_path = op.eval() + # out_path = op.eval() # assert out_path.result_files is not [] # try: # out_path = dpf.core.download_file( diff --git a/tests/test_python_plugins.py b/tests/test_python_plugins.py index 25d75b39d1..aa222e0531 100644 --- a/tests/test_python_plugins.py +++ b/tests/test_python_plugins.py @@ -21,7 +21,6 @@ # SOFTWARE. import pytest -import os from pathlib import Path import platform import numpy as np diff --git a/tests/test_resultinfo.py b/tests/test_resultinfo.py index dd618b7d64..8c64bdb331 100644 --- a/tests/test_resultinfo.py +++ b/tests/test_resultinfo.py @@ -180,8 +180,8 @@ def test_result_info_memory_leaks(model): res = metadata.result_info # Still leaking, but maybe from the Operator.connect # in Metadata._load_result_info() - u = res.unit_system_name - c = res.cyclic_support + res.unit_system_name + res.cyclic_support # v = res.solver_version # date = res.solver_date # time = res.solver_time diff --git a/tests/test_scoping.py b/tests/test_scoping.py index d8378ad15f..9ce610b560 100644 --- a/tests/test_scoping.py +++ b/tests/test_scoping.py @@ -163,7 +163,7 @@ def test_delete_scoping(server_type): scop = Scoping(server=server_type) del scop with pytest.raises(Exception): - scop.ids + scop.ids # noqa: F821 @pytest.mark.skipif( diff --git a/tests/test_server.py b/tests/test_server.py index b852f583f7..67d506eaff 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -160,13 +160,13 @@ def test_available_api_types(self, server_config): set_server_configuration(server_config) server = get_or_create_server(None) assert has_local_server() - types = server.available_api_types + server.available_api_types def test_client(self, server_config): set_server_configuration(server_config) server = get_or_create_server(None) assert has_local_server() - client = server.client + server.client @pytest.mark.skipif( diff --git a/tests/test_service.py b/tests/test_service.py index c246aa2b38..ba7866c65c 100644 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -134,7 +134,7 @@ def test_download_with_subdir(multishells, tmpdir, server_type_remote_process): filename = ntpath.basename(file) parent_path = str(Path(file).parent) to_server_path = parent_path + separator + "subdir" + separator + filename - subdir_filepath = dpf.core.upload_file(file, to_server_path, server=server_type_remote_process) + dpf.core.upload_file(file, to_server_path, server=server_type_remote_process) folder = parent_path _ = dpf.core.download_files_in_folder(folder, str(tmpdir), server=server_type_remote_process) @@ -172,7 +172,7 @@ def test_downloadinfolder_uploadinfolder(multishells, tmpdir, server_type_remote # download it new_tmpdir = tmpdir / "my_tmp_dir" new_tmpdir.mkdir() - out = dpf.core.download_files_in_folder( + _ = dpf.core.download_files_in_folder( TARGET_PATH, str(new_tmpdir), server=server_type_remote_process ) # check if the architecture of the download is ok @@ -520,7 +520,7 @@ def test_server_without_context(remote_config_server_type): ) @conftest.raises_for_servers_version_under("6.0") def test_release_dpf(server_type): - op = dpf.core.Operator("expansion::modal_superposition", server=server_type) + dpf.core.Operator("expansion::modal_superposition", server=server_type) server_type.release() with pytest.raises((KeyError, dpf.core.errors.DPFServerException)): @@ -536,7 +536,7 @@ def test_license_context_manager_as_context(server_type): op.inputs.field(field) op.inputs.threshold(0.0) with dpf.core.LicenseContextManager(server=server_type) as lic: - out = op.outputs.field() + op.outputs.field() st = lic.status assert len(st) != 0 @@ -545,7 +545,7 @@ def test_license_context_manager_as_context(server_type): lic = dpf.core.LicenseContextManager(server=server_type) op.inputs.field(field) op.inputs.threshold(0.0) - out = op.outputs.field() + op.outputs.field() new_st = lic.status assert str(new_st) == str(st) lic = None @@ -556,7 +556,7 @@ def test_license_context_manager_as_context(server_type): with dpf.core.LicenseContextManager( increment_name="ansys", license_timeout_in_seconds=1.0, server=server_type ) as lic: - out = op.outputs.field() + op.outputs.field() st = lic.status assert "ansys" in st st = lic.status diff --git a/tests/test_timefreqsupport.py b/tests/test_timefreqsupport.py index 75e2447dfd..cdf008e1da 100644 --- a/tests/test_timefreqsupport.py +++ b/tests/test_timefreqsupport.py @@ -333,13 +333,13 @@ def test_timefreqsupport_memory_leaks(): frequencies = fields_factory.create_scalar_field(3, server=server) frequencies.data = [0.1, 0.32, 0.4] tfq.time_frequencies = frequencies - frequencies_check = tfq.time_frequencies # Call to get + tfq.time_frequencies # Call to get tfq.complex_frequencies = frequencies - frequencies_cplx_check = tfq.complex_frequencies # Call to get + tfq.complex_frequencies # Call to get tfq.rpms = frequencies - rpm_check = tfq.rpms # Call to get + tfq.rpms # Call to get tfq.set_harmonic_indices(frequencies) - harm_check = tfq.get_harmonic_indices() # Call to get + tfq.get_harmonic_indices() # Call to get @conftest.raises_for_servers_version_under("5.0") diff --git a/tests/test_workflow.py b/tests/test_workflow.py index 505965f06f..07ff18e1f0 100644 --- a/tests/test_workflow.py +++ b/tests/test_workflow.py @@ -360,7 +360,7 @@ def test_output_mesh_workflow(cyclic_lin_rst, cyclic_ds, server_type): == meshed_region.elements.connectivities_field.size ) - fields = wf.get_output("fields", dpf.core.types.fields_container) + wf.get_output("fields", dpf.core.types.fields_container) def test_outputs_bool_workflow(server_type): @@ -584,14 +584,14 @@ def test_transfer_owner_workflow(allkindofcomplexity, server_type): wf.connect("prop", "elshape") wf.set_output_name("scopings", op, 0) id = wf.record(transfer_ownership=True) - wf_copy = dpf.core.Workflow.get_recorded_workflow(id, server=server_type) + dpf.core.Workflow.get_recorded_workflow(id, server=server_type) with pytest.raises(Exception): - wf_copy = dpf.core.Workflow.get_recorded_workflow(id, server=server_type) + dpf.core.Workflow.get_recorded_workflow(id, server=server_type) id = wf.record(transfer_ownership=False) - wf_copy = dpf.core.Workflow.get_recorded_workflow(id, server=server_type) - wf_copy = dpf.core.Workflow.get_recorded_workflow(id, server=server_type) + dpf.core.Workflow.get_recorded_workflow(id, server=server_type) + dpf.core.Workflow.get_recorded_workflow(id, server=server_type) @conftest.raises_for_servers_version_under("3.0") @@ -619,8 +619,8 @@ def test_connect_with_workflow(cyclic_lin_rst, cyclic_ds, server_type): wf2.set_output_name("u", op, 0) wf2.connect_with(wf) - meshed_region = wf2.get_output("mesh_expand", dpf.core.types.meshed_region) - fc = wf2.get_output("u", dpf.core.types.fields_container) + wf2.get_output("mesh_expand", dpf.core.types.meshed_region) + wf2.get_output("u", dpf.core.types.fields_container) @conftest.raises_for_servers_version_under("3.0") @@ -648,8 +648,8 @@ def test_connect_with_2_workflow(cyclic_lin_rst, cyclic_ds, server_type): wf2.set_output_name("u", op, 0) wf2.connect_with(wf, ("support1", "support2")) - meshed_region = wf2.get_output("mesh_expand", dpf.core.types.meshed_region) - fc = wf2.get_output("u", dpf.core.types.fields_container) + wf2.get_output("mesh_expand", dpf.core.types.meshed_region) + wf2.get_output("u", dpf.core.types.fields_container) @conftest.raises_for_servers_version_under("3.0") @@ -677,8 +677,8 @@ def test_connect_with_dict_workflow(cyclic_lin_rst, cyclic_ds, server_type): wf2.set_output_name("u", op, 0) wf2.connect_with(wf, {"support1": "support2"}) - meshed_region = wf2.get_output("mesh_expand", dpf.core.types.meshed_region) - fc = wf2.get_output("u", dpf.core.types.fields_container) + wf2.get_output("mesh_expand", dpf.core.types.meshed_region) + wf2.get_output("u", dpf.core.types.fields_container) @pytest.mark.xfail(raises=dpf.core.errors.ServerTypeError) @@ -935,18 +935,6 @@ def test_connect_get_output_big_strings(server_type, server_in_process): assert np.allclose(out.data, data) -@pytest.mark.skipif( - not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0, reason="Available for servers >=8.0" -) -def test_connect_get_output_big_strings(server_type, server_type_remote_process): - data = np.random.random(100000) - field_a = dpf.core.field_from_array(data, server=server_type) - assert np.allclose(field_a.data, data) - - out = deep_copy_using_workflow(field_a, server_type_remote_process) - assert np.allclose(out.data, data) - - @conftest.raises_for_servers_version_under("8.0") def test_connect_get_non_ascii_string(server_type): str = "\N{GREEK CAPITAL LETTER DELTA}" diff --git a/tests/testfiles/pythonPlugins/all_types/__init__.py b/tests/testfiles/pythonPlugins/all_types/__init__.py index 13d133ac5a..b99231af71 100644 --- a/tests/testfiles/pythonPlugins/all_types/__init__.py +++ b/tests/testfiles/pythonPlugins/all_types/__init__.py @@ -21,3 +21,5 @@ # SOFTWARE. from all_types.load_operators_func import load_operators + +__all__ = ["load_operators"] diff --git a/tests/testfiles/pythonPlugins/syntax_error_plugin/__init__.py b/tests/testfiles/pythonPlugins/syntax_error_plugin/__init__.py index 37c7f13bb2..0ef6410d52 100644 --- a/tests/testfiles/pythonPlugins/syntax_error_plugin/__init__.py +++ b/tests/testfiles/pythonPlugins/syntax_error_plugin/__init__.py @@ -21,3 +21,5 @@ # SOFTWARE. from syntax_error_plugin.load_operators_func import load_operators + +__all__ = ["load_operators"]