diff --git a/pyproject.toml b/pyproject.toml index c0443473a7..d1c429c236 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,7 +66,7 @@ indent-style = "space" [tool.ruff.lint] select = [ # "E", # pycodestyle, see https://beta.ruff.rs/docs/rules/#pycodestyle-e-w -# "D", # pydocstyle, see https://beta.ruff.rs/docs/rules/#pydocstyle-d + "D", # pydocstyle, see https://beta.ruff.rs/docs/rules/#pydocstyle-d # "F", # pyflakes, see https://beta.ruff.rs/docs/rules/#pyflakes-f # "I", # isort, see https://beta.ruff.rs/docs/rules/#isort-i # "N", # pep8-naming, see https://beta.ruff.rs/docs/rules/#pep8-naming-n diff --git a/src/ansys/dpf/core/_version.py b/src/ansys/dpf/core/_version.py index 37236517dd..313a4c17cd 100644 --- a/src/ansys/dpf/core/_version.py +++ b/src/ansys/dpf/core/_version.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -"""Version for ansys-dpf-core""" +"""Version for ansys-dpf-core.""" # Minimal DPF server version supported min_server_version = "4.0" diff --git a/src/ansys/dpf/core/animation.py b/src/ansys/dpf/core/animation.py index 26746d53aa..3c9b1e15a9 100644 --- a/src/ansys/dpf/core/animation.py +++ b/src/ansys/dpf/core/animation.py @@ -20,6 +20,8 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Module contains the function for modal animation creation.""" + import ansys.dpf.core as dpf import numpy as np @@ -34,8 +36,7 @@ def animate_mode( **kwargs, ): # other option: instead of `type` use `min_factor` and `max_factor`. - - """Creates a modal animation based on Fields contained in the FieldsContainer. + """Create a modal animation based on Fields contained in the FieldsContainer. This method creates a movie or a gif based on the time ids of a ``FieldsContainer``. For kwargs see pyvista.Plotter.open_movie/add_text/show. diff --git a/src/ansys/dpf/core/animator.py b/src/ansys/dpf/core/animator.py index 9bedb85433..260dfe2a44 100644 --- a/src/ansys/dpf/core/animator.py +++ b/src/ansys/dpf/core/animator.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -Animator +Animator. This module contains the DPF animator class. @@ -37,8 +37,7 @@ class _InternalAnimatorFactory: - """ - Factory for _InternalAnimator based on the backend.""" + """Factory for _InternalAnimator based on the backend.""" @staticmethod def get_animator_class(): @@ -46,7 +45,7 @@ def get_animator_class(): class _PyVistaAnimator(_PyVistaPlotter): - """This _InternalAnimator class is based on PyVista""" + """An InternalAnimator class based on PyVista.""" def __init__(self, **kwargs): super().__init__(**kwargs) @@ -190,6 +189,8 @@ def animation(): class Animator: + """The DPF animator class.""" + def __init__(self, workflow=None, **kwargs): """ Create an Animator object. @@ -228,6 +229,7 @@ def __init__(self, workflow=None, **kwargs): def workflow(self) -> core.Workflow: """ Workflow used to generate a Field at each frame of the animation. + By default, the "to_render" Field output will be plotted, and the "loop_over" input defines what the animation iterates on. Optionally, the workflow can also have a "deform_by" Field output, @@ -267,7 +269,7 @@ def animate( **kwargs, ): """ - Animate the workflow of the Animator, using inputs + Animate the workflow of the Animator, using inputs. Parameters ---------- @@ -316,6 +318,16 @@ def animate( def scale_factor_to_fc(scale_factor, fc): + """Scale the fields being animated by a factor. + + Parameters + ---------- + scale_factor : int, float, list + Scale factor to apply to the animated field. + fc : FieldsContainer + FieldsContainer containing the fields being animated. + """ + def int_to_field(value, shape, scoping): field = core.fields_factory.field_from_array(np.full(shape=shape, fill_value=value)) field.scoping = scoping diff --git a/src/ansys/dpf/core/any.py b/src/ansys/dpf/core/any.py index 6b0ce4f89a..d1258dadd2 100644 --- a/src/ansys/dpf/core/any.py +++ b/src/ansys/dpf/core/any.py @@ -21,10 +21,9 @@ # SOFTWARE. """ -.. _ref_any: - -Any +Any. +Module containing the wrapper class representing all supported DPF datatypes. """ import traceback @@ -216,7 +215,6 @@ def new_from(obj, server=None): any : Any Wrapped any type. """ - inner_server = server if server is not None else obj._server if not inner_server.meet_version("7.0"): @@ -295,7 +293,6 @@ def cast(self, output_type=None): type Original object instance """ - self._internal_type = output_type if output_type is not None else self._internal_type type_tuple = self._type_to_new_from_get_as_method(self._internal_type) @@ -316,6 +313,7 @@ def cast(self, output_type=None): raise TypeError(f"{output_type} is not currently supported by the Any class.") def __del__(self): + """Delete the entry.""" try: if hasattr(self, "_deleter_func"): obj = self._deleter_func[1](self) diff --git a/src/ansys/dpf/core/available_result.py b/src/ansys/dpf/core/available_result.py index ac4f832ed9..7a526bc6af 100644 --- a/src/ansys/dpf/core/available_result.py +++ b/src/ansys/dpf/core/available_result.py @@ -21,8 +21,9 @@ # SOFTWARE. """ -AvailableResult +AvailableResult. +Module contains the class representing the results that an operator can request. """ from typing import List @@ -33,6 +34,8 @@ @unique class Homogeneity(Enum): + """Enum class listing all possible homogeneity names of results.""" + acceleration = 0 angle = 1 angular_velocity = 2 @@ -147,6 +150,13 @@ def __init__(self, availableresult): self._qualifier_labels = availableresult.qualifier_labels def __str__(self): + """Construct an informal string representation of available result. + + Returns + ------- + str + Informal string representation of available result. + """ txt = ( "DPF Result\n----------\n" + self.name @@ -170,6 +180,13 @@ def __str__(self): return txt def __repr__(self): + """Construct a formal string representation of available result. + + Returns + ------- + str + Formal string representation of available result. + """ return f"AvailableResult" @property @@ -257,13 +274,15 @@ def native_scoping_location(self): @property def physical_name(self) -> str: - """Name of the result with spaces""" + """Name of the result with spaces.""" return self._physics_name @property def qualifiers(self) -> list: - """Returns the list of qualifiers (equivalent to label spaces) - available for a given Result. These qualifiers can then be used to request the result + """ + Returns the list of qualifiers (equivalent to label spaces) available for a given Result. + + These qualifiers can then be used to request the result on specified locations/properties. """ return self._qualifiers @@ -329,6 +348,18 @@ def qualifier_combinations(self) -> List[dict]: def available_result_from_name(name) -> AvailableResult: + """Create an instance of AvailableResult from a specified results name. + + Parameters + ---------- + name : str + Valid property name. + + Returns + ------- + AvailableResult + Instance created from specified result name. + """ for key, item in _result_properties.items(): if item["scripting_name"] == name: from types import SimpleNamespace diff --git a/src/ansys/dpf/core/cache.py b/src/ansys/dpf/core/cache.py index 44aacc8299..7e746ff3a6 100644 --- a/src/ansys/dpf/core/cache.py +++ b/src/ansys/dpf/core/cache.py @@ -20,11 +20,14 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Provides for caching evaluated results for faster re-evaluation.""" + from typing import NamedTuple def class_handling_cache(cls): """Class decorator used to handle cache. + To use it, add a ''_to_cache'' static attribute in the given class. This private dictionary should map class getters to their list of setters. At initialization, this decorator add a ''_cache'' property to the class. @@ -53,11 +56,14 @@ def get_handler(mesh): class MethodIdentifier(NamedTuple): + """Provides for identifying a method.""" + method_name: str args: list kwargs: dict def __eq__(self, other): + """Compare two methods for equality.""" if isinstance(other, str): return self.method_name == other else: @@ -68,6 +74,7 @@ def __eq__(self, other): ) def __hash__(self): + """Fetch the hash corresponding to a method.""" hash = self.method_name.__hash__() if self.args: hash += self.args.__hash__() @@ -77,7 +84,8 @@ def __hash__(self): class CacheHandler: - """ "Handle cache complexity. + """Handle cache complexity. + Is initialized by a class and a dictionary mapping the getters which support caching to their setters. When the getters of the dictionary are called, their parameters @@ -112,6 +120,7 @@ def __init__(self, cls, getters_to_setters_dict): self.cached = {} def handle(self, object, func, *args, **kwargs): + """Recover data which has already been cached.""" identifier = MethodIdentifier(func.__name__, args, kwargs) if identifier in self.cached: return self.cached[identifier] @@ -128,19 +137,19 @@ def handle(self, object, func, *args, **kwargs): return func(object, *args, **kwargs) def clear(self): + """Clear cached data.""" self.cached = {} def _handle_cache(func): - """Calls the cache handler to either recover cached data, either cache the data - or clear some cached data if the method is a setter. + """Call the cache handler to either recover cached data, either cache the data or clear some cached data if the method is a setter. .. note:: The method must be used as a decorator. """ def wrapper(self, *args, **kwargs): - """Call the original function""" + """Call the original function.""" if hasattr(self, "_cache"): return self._cache.handle(self, func, *args, **kwargs) else: @@ -150,15 +159,14 @@ def wrapper(self, *args, **kwargs): def _setter(func): - """Add a private attribute to the class (``self._is_set = True``) - when a method with this decorator is used. + """Add a private attribute to the class (``self._is_set = True``) when a method with this decorator is used. .. note:: The method must be used as a decorator. """ def wrapper(self, *args, **kwargs): - """Call the original function""" + """Call the original function.""" if hasattr(self, "_is_set"): self._is_set = True else: diff --git a/src/ansys/dpf/core/check_version.py b/src/ansys/dpf/core/check_version.py index b9504aae82..9d41450599 100644 --- a/src/ansys/dpf/core/check_version.py +++ b/src/ansys/dpf/core/check_version.py @@ -52,8 +52,7 @@ def server_meet_version(required_version, server): def server_meet_version_and_raise(required_version, server, msg=None): - """Check if a given server version matches with a required version and raise - an exception if it does not match. + """Check if a given server version matches with a required version and raise an exception if it does not match. Parameters ---------- @@ -76,7 +75,6 @@ def server_meet_version_and_raise(required_version, server, msg=None): bool ``True`` when successful, ``False`` when failed. """ - if not server_meet_version(required_version, server): if msg is not None: raise dpf_errors.DpfVersionNotSupported(required_version, msg=msg) @@ -142,8 +140,7 @@ def decorator(func): @wraps(func) def wrapper(self, *args, **kwargs): - """Call the original function""" - + """Call the original function.""" if isinstance(self._server, weakref.ref): server = self._server() else: diff --git a/src/ansys/dpf/core/collection.py b/src/ansys/dpf/core/collection.py index d81afa9ad2..be0480a0a2 100644 --- a/src/ansys/dpf/core/collection.py +++ b/src/ansys/dpf/core/collection.py @@ -21,10 +21,7 @@ # SOFTWARE. # -*- coding: utf-8 -*- -""" -Collection - -""" +"""Module containing the class representing dpf objects organized by label spaces.""" from __future__ import annotations from ansys.dpf.core.any import Any @@ -72,6 +69,7 @@ def __init__(self, collection=None, server=None, entries_type: type = None): self._internal_obj = self._api.collection_of_any_new() def create_subtype(self, obj_by_copy): + """Create dpf instance of any type, which has been cast to its original type.""" return create_dpf_instance(Any, obj_by_copy, self._server).cast(self.entries_type) def get_entries(self, label_space): @@ -123,7 +121,7 @@ def add_entry(self, label_space, entry): def CollectionFactory(subtype, BaseClass=Collection): - """Creates classes deriving from Collection at runtime for a given subtype.""" + """Create classes deriving from Collection at runtime for a given subtype.""" def __init__(self, **kwargs): BaseClass.__init__(self, **kwargs) diff --git a/src/ansys/dpf/core/collection_base.py b/src/ansys/dpf/core/collection_base.py index 3d6e18de29..4d33e930bb 100644 --- a/src/ansys/dpf/core/collection_base.py +++ b/src/ansys/dpf/core/collection_base.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -CollectionBase - -Contains classes associated with the DPF collection. - -""" +"""Contains classes associated with the DPF collection.""" from __future__ import annotations import abc @@ -58,7 +53,6 @@ class CollectionBase(Generic[TYPE]): - entries_type: Optional[type[TYPE]] # type of the entries in the collection. """Represents a collection of entries ordered by labels and IDs. Parameters @@ -72,6 +66,8 @@ class CollectionBase(Generic[TYPE]): """ + entries_type: Optional[type[TYPE]] # type of the entries in the collection. + def __init__(self, collection=None, server: BaseServer = None): # step 1: get server self._server = server_module.get_or_create_server( @@ -137,11 +133,12 @@ def name(self, name: str): @abc.abstractmethod def create_subtype(self, obj_by_copy): + """Must be implemented by subclasses.""" pass @staticmethod def integral_collection(inpt, server: BaseServer = None): - """Creates a collection of integral type with a list. + """Create a collection of integral type with a list. The collection of integral is the equivalent of an array of data sent server side. It can be used to efficiently stream @@ -237,6 +234,13 @@ def _get_labels(self) -> list: @property def labels(self) -> List[str]: + """Provides for getting scoping labels as a property. + + Returns + ------- + List[str] + List of labels scoping the collection. + """ return self._get_labels() @labels.setter @@ -410,7 +414,7 @@ def get_label_scoping(self, label="time"): return scoping def __getitem__(self, index): - """Retrieves the entry at a requested index value. + """Retrieve the entry at a requested index value. Parameters ---------- @@ -447,7 +451,7 @@ def _data_processing_core_api(self): def _add_entry(self, label_space, entry): """Update or add an entry at a requested label space. - parameters + Parameters ---------- label_space : list[str,int] Label space of the requested fields. For example, ``{"time":1, "complex":0}``. @@ -562,6 +566,7 @@ def _get_ownership(self): return self._internal_obj def __iter__(self): + """Provide for looping through entry items.""" for i in range(len(self)): yield self[i] @@ -589,6 +594,7 @@ def __init__(self, server=None, collection=None): @abc.abstractmethod def create_subtype(self, obj_by_copy): + """Must be implemented by subclasses.""" pass @abc.abstractmethod @@ -596,11 +602,11 @@ def _set_integral_entries(self, input): pass def get_integral_entries(self): + """Must be implemented by subclasses.""" pass class IntCollection(CollectionBase[int]): - entries_type = int """Creates a collection of integers with a list. The collection of integral is the equivalent of an array of @@ -618,6 +624,8 @@ class IntCollection(CollectionBase[int]): list is connected or returned. """ + entries_type = int + def __init__(self, list=None, server=None, collection=None): super().__init__(server=server, collection=collection) if self._internal_obj is None: @@ -629,6 +637,7 @@ def __init__(self, list=None, server=None, collection=None): self._set_integral_entries(list) def create_subtype(self, obj_by_copy): + """Create a sub type.""" return int(obj_by_copy) def _set_integral_entries(self, input): @@ -643,6 +652,7 @@ def _set_integral_entries(self, input): self._api.collection_set_data_as_int(self, input, input.size) def get_integral_entries(self): + """Get integral entries.""" try: vec = dpf_vector.DPFVectorInt(client=self._server.client) self._api.collection_get_data_as_int_for_dpf_vector( @@ -654,7 +664,6 @@ def get_integral_entries(self): class FloatCollection(CollectionBase[float]): - entries_type = float """Creates a collection of floats (double64) with a list. The collection of integral is the equivalent of an array of @@ -672,6 +681,8 @@ class FloatCollection(CollectionBase[float]): list is connected or returned. """ + entries_type = float + def __init__(self, list=None, server=None, collection=None): super().__init__(server=server, collection=collection) self._sub_type = float @@ -686,6 +697,7 @@ def __init__(self, list=None, server=None, collection=None): self._set_integral_entries(list) def create_subtype(self, obj_by_copy): + """Create a sub type.""" return float(obj_by_copy) def _set_integral_entries(self, input): @@ -700,6 +712,7 @@ def _set_integral_entries(self, input): self._api.collection_set_data_as_double(self, input, input.size) def get_integral_entries(self): + """Get integral entries.""" try: vec = dpf_vector.DPFVectorDouble(client=self._server.client) self._api.collection_get_data_as_double_for_dpf_vector( @@ -711,7 +724,6 @@ def get_integral_entries(self): class StringCollection(CollectionBase[str]): - entries_type = str """Creates a collection of strings with a list. The collection of integral is the equivalent of an array of @@ -729,6 +741,8 @@ class StringCollection(CollectionBase[str]): list is connected or returned. """ + entries_type = str + def __init__(self, list=None, server=None, collection=None, local: bool = False): super().__init__(server=server, collection=collection) self._sub_type = str @@ -748,6 +762,7 @@ def __init__(self, list=None, server=None, collection=None, local: bool = False) self._set_integral_entries(list) def create_subtype(self, obj_by_copy): + """Create a sub type.""" return str(obj_by_copy) def _set_integral_entries(self, input): @@ -755,6 +770,7 @@ def _set_integral_entries(self, input): self._api.collection_add_string_entry(self, s) def get_integral_entries(self): + """Get integral entries.""" num = self._api.collection_get_size(self) out = [] for i in range(num): diff --git a/src/ansys/dpf/core/common.py b/src/ansys/dpf/core/common.py index ea4323ef44..7dcc1ff096 100644 --- a/src/ansys/dpf/core/common.py +++ b/src/ansys/dpf/core/common.py @@ -20,14 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -Common - - -.. autoclass:: locations - :members: - -""" +"""Common.""" import re import sys @@ -83,13 +76,7 @@ def __missing__(self, key): class types(Enum): - """ - The ``'types'`` enum contains the available types passed through operators - and workflows to DPF. - - - - """ + """The ``'types'`` enum contains the available types passed through operators and workflows to DPF.""" # Types from grpc proto, do not modify string = 0 @@ -127,6 +114,13 @@ class types(Enum): def types_enum_to_types(): + """Return a mapping of enums and corresponding python or dpf types. + + Returns + ------- + dict + Mapping of enum to the corresponding type. + """ from ansys.dpf.core import ( cyclic_support, data_sources, @@ -186,6 +180,7 @@ def types_enum_to_types(): class natures(Enum): """The ``'natures'`` enum contains the dimensionality types. + It can be used to create a field of a given dimensionality. """ @@ -196,7 +191,9 @@ class natures(Enum): class shell_layers(Enum): - """The ``'shell_layers'`` enum contains the available order of + """Contains data identifying shell layers. + + The ``'shell_layers'`` enum contains the available order of shell layers (or lack of shell layers) that defines how the field's data is ordered. """ @@ -304,6 +301,8 @@ class DefinitionLabels: class TqdmProgressBar(ProgressBarBase): + """Custom progress bar implementation based on tqdm.""" + def __init__(self, text, unit, tot_size=None): import tqdm @@ -323,6 +322,7 @@ def __init__(self, text, unit, tot_size=None): ) def update(self, current_value): + """Modify how the current value of the progress bar is updated.""" if self.tot_size is None: self.bar.total = current_value * 2 self.bar.update(current_value - self.current) @@ -330,6 +330,13 @@ def update(self, current_value): @staticmethod def progress_available(): + """Check if the tdqm module exists. + + Returns + ------- + bool + True if module exists, else False. + """ return module_exists("tqdm") @@ -346,9 +353,10 @@ def _common_percentage_progress_bar(text): class SubClassSmartDict(dict): + """Return the superclass name for a key if not found initially.""" + def __getitem__(self, item): - """If found returns the item of key == ìtem`, else returns item with key matching `issubclass(item, - key)`.""" + """If found returns the item of key == ìtem`, else returns item with key matching `issubclass(item, key)`.""" if item in self: return super().__getitem__(item) else: @@ -362,6 +370,13 @@ def __getitem__(self, item): def type_to_internal_object_keyword(): + """Return dpf types mapped to internal object keywords. + + Returns + ------- + SubClassSmartDict + Custom dictionary that returns superclass name for a key if not found initially. + """ global _type_to_internal_object_keyword if _type_to_internal_object_keyword is None: from ansys.dpf.core import ( @@ -418,6 +433,7 @@ def type_to_internal_object_keyword(): def type_to_special_dpf_constructors(): + """Return dpf type mapped to special dpf constructors.""" global _type_to_special_dpf_constructors if _type_to_special_dpf_constructors is None: from ansys.dpf.gate.dpf_vector import DPFVectorInt @@ -436,7 +452,7 @@ def type_to_special_dpf_constructors(): def derived_class_name_to_type() -> Dict[str, type]: """ - Returns a mapping of derived class names to their corresponding Python classes. + Return a mapping of derived class names to their corresponding Python classes. Returns ------- @@ -454,7 +470,7 @@ def derived_class_name_to_type() -> Dict[str, type]: def record_derived_class(class_name: str, py_class: type, overwrite: bool = False): """ - Records a new derived class in the mapping of class names to their corresponding Python classes. + Record a new derived class in the mapping of class names to their corresponding Python classes. This function updates the global dictionary that maps derived class names (str) to their corresponding Python class objects (type). If the provided class name already exists in the dictionary, it will either @@ -477,6 +493,7 @@ def record_derived_class(class_name: str, py_class: type, overwrite: bool = Fals def create_dpf_instance(type, internal_obj, server): + """Create a server instance of a given type.""" spe_constructors = type_to_special_dpf_constructors() if type in spe_constructors: return spe_constructors[type](internal_obj, server) diff --git a/src/ansys/dpf/core/config.py b/src/ansys/dpf/core/config.py index 837b79098d..74eceb0438 100644 --- a/src/ansys/dpf/core/config.py +++ b/src/ansys/dpf/core/config.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -Operator Configuration - -""" +"""Operator Configuration.""" import functools import warnings @@ -211,7 +208,7 @@ def config_option_value(self, config_name): Name of the configuration option. Returns - ---------- + ------- str Value for the configuration option. """ @@ -222,6 +219,18 @@ def config_option_value(self, config_name): raise KeyError(f"{config_name} option doesn't exist.") def __try_get_option__(self, config_name): + """Return option associated with a given config name. + + Parameters + ---------- + config_name : _type_ + Name of the configuration. + + Returns + ------- + ConfigSpecification + Available configuration options supported by the Operator + """ if self._config_help: if config_name in self._config_help: return self._config_help[config_name] @@ -236,7 +245,7 @@ def config_option_documentation(self, config_name): Name of the configuration option. Returns - ---------- + ------- str Documentation for the configuration option. """ @@ -254,7 +263,7 @@ def config_option_accepted_types(self, config_name): Name of the configuration option. Returns - ---------- + ------- list, str One or more accepted types for the configuration option. """ @@ -265,13 +274,8 @@ def config_option_accepted_types(self, config_name): def config_option_default_value(self, config_name): """Retrieve the default value for a configuration option. - Parameters - ---------- - config_name : str - Name of the configuration option. - Returns - ---------- + ------- str Default value for the configuration option. """ @@ -285,7 +289,7 @@ def available_config_options(self): """Available configuration options for the operator. Returns - ---------- + ------- list, str One or more available configuration options for the operator. """ @@ -307,6 +311,7 @@ def __str__(self): return _description(self._internal_obj, self._server) def __del__(self): + """Delete this instance of config.""" try: self._deleter_func[0](self._deleter_func[1](self)) except: diff --git a/src/ansys/dpf/core/core.py b/src/ansys/dpf/core/core.py index 39beb3678e..01867c338e 100644 --- a/src/ansys/dpf/core/core.py +++ b/src/ansys/dpf/core/core.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -Core - -""" +"""Core.""" import os import logging @@ -67,6 +64,7 @@ def load_library(filename, name="", symbol="LoadOperators", server=None, generate_operators=False): """Dynamically load an operators library for dpf.core. + Code containing this library's operators is generated in ansys.dpf.core.operators @@ -100,8 +98,7 @@ def load_library(filename, name="", symbol="LoadOperators", server=None, generat def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None): - """Upload a file from the client to the server in a temporary folder - deleted when the server is shutdown + """Upload a file from the client to a temporary server folder deleted on server shutdown. Parameters ---------- @@ -133,8 +130,7 @@ def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None): def upload_files_in_folder( to_server_folder_path, client_folder_path, specific_extension=None, server=None ): - """Upload all the files from a folder of the client - to the target server folder path. + """Upload all the files from a folder of the client to the target server folder path. Parameters ---------- @@ -164,7 +160,7 @@ def upload_files_in_folder( def download_file(server_file_path, to_client_file_path, server=None): - """Download a file from the server to the target client file path + """Download a file from the server to the target client file path. Parameters ---------- @@ -190,8 +186,7 @@ def download_file(server_file_path, to_client_file_path, server=None): def download_files_in_folder( server_folder_path, to_client_folder_path, specific_extension=None, server=None ): - """Download all the files from a folder of the server - to the target client folder path + """Download all the files from a folder of the server to the target client folder path. Parameters ---------- @@ -221,7 +216,7 @@ def download_files_in_folder( def upload_file(file_path, to_server_file_path, server=None): - """Upload a file from the client to the target server file path + """Upload a file from the client to the target server file path. Parameters ---------- @@ -245,8 +240,8 @@ def upload_file(file_path, to_server_file_path, server=None): def make_tmp_dir_server(server=None): - """Create a temporary folder server side. Only one temporary folder can be created - by server instance. + """Create a temporary folder server side. Only one temporary folder can be created by server instance. + The folder will be deleted when the server is stopped. Parameters @@ -265,7 +260,7 @@ def make_tmp_dir_server(server=None): def _description(dpf_entity_message, server=None): - """Ask the server to describe the entity in input + """Ask the server to describe the entity in input. Parameters ---------- @@ -288,7 +283,7 @@ def _description(dpf_entity_message, server=None): def _deep_copy(dpf_entity, server=None): - """Returns a copy of the entity in the requested server + """Return a copy of the entity in the requested server. Parameters ---------- @@ -327,6 +322,7 @@ def _deep_copy(dpf_entity, server=None): class BaseService: """The Base Service class allows to make generic requests to dpf's server. + For example, information about the server can be requested, uploading/downloading file from and to the server can be done, new operators plugins can be loaded... @@ -356,7 +352,7 @@ class BaseService: """ def __init__(self, server=None, load_operators=True, timeout=5): - """Initialize base service""" + """Initialize base service.""" # step 1: get server if server is None: server = server_module.get_or_create_server(server) @@ -376,8 +372,8 @@ def __init__(self, server=None, load_operators=True, timeout=5): self._api.init_data_processing_environment(self) # creates stub when gRPC def make_tmp_dir_server(self): - """Create a temporary folder server side. Only one temporary folder can be created - by server instance. + """Create a temporary folder server side. Only one temporary folder can be created by server instance. + The folder will be deleted when the server is stopped. Returns @@ -392,6 +388,7 @@ def make_tmp_dir_server(self): def load_library(self, file_path, name="", symbol="LoadOperators", generate_operators=False): """Dynamically load an operators library for dpf.core. + Code containing this library's operators is generated in ansys.dpf.core.operators @@ -468,7 +465,8 @@ def __generate_code(TARGET_PATH, filename, name, symbol): @version_requires("6.0") def apply_context(self, context): - """Defines the settings that will be used to load DPF's plugins. + """Define the settings that will be used to load DPF's plugins. + A DPF xml file can be used to list the plugins and set up variables. Parameters @@ -494,7 +492,8 @@ def apply_context(self, context): ) def initialize_with_context(self, context): - """Defines the settings that will be used to initialize DPF. + """Define the settings that will be used to initialize DPF. + A DPF xml file can be used to list the plugins and set up variables. Parameters @@ -531,7 +530,7 @@ def initialize(self): @version_requires("6.0") def release_dpf(self): - """Clears the available Operators and Releases licenses when necessary. + """Clear the available Operators and release licenses when necessary. Notes ----- @@ -544,6 +543,13 @@ def release_dpf(self): @version_requires("4.0") def get_runtime_core_config(self): + """Determine runtime configuration. + + Returns + ------- + RuntimeCoreConfig + Runtime configuration options in DataProcessingCore + """ if self._server().has_client(): data_tree_tmp = self._api.data_processing_get_global_config_as_data_tree_on_client( self._server().client @@ -555,8 +561,7 @@ def get_runtime_core_config(self): @property def server_info(self): - """Send the request for server information and keep - the info into a dictionary + """Send the request for server information and keep the info into a dictionary. Returns ------- @@ -611,7 +616,7 @@ def _get_server_info(self): return out def _description(self, dpf_entity_message): - """Ask the server to describe the entity in input + """Ask the server to describe the entity in input. Parameters ---------- @@ -653,7 +658,7 @@ def _get_separator(self, path): return separator def download_file(self, server_file_path, to_client_file_path): - """Download a file from the server to the target client file path + """Download a file from the server to the target client file path. Parameters ---------- @@ -686,8 +691,7 @@ def _set_collection_api(self): def download_files_in_folder( self, server_folder_path, to_client_folder_path, specific_extension=None ): - """Download all the files from a folder of the server - to the target client folder path + """Download all the files from a folder of the server to the target client folder path. Parameters ---------- @@ -740,8 +744,7 @@ def download_files_in_folder( def upload_files_in_folder( self, to_server_folder_path, client_folder_path, specific_extension=None ): - """Upload all the files from a folder of the client - to the target server folder path. + """Upload all the files from a folder of the client to the target server folder path. Parameters ---------- @@ -822,7 +825,7 @@ def _upload_and_get_server_path( return server_paths def upload_file(self, file_path, to_server_file_path): - """Upload a file from the client to the target server file path + """Upload a file from the client to the target server file path. Parameters ---------- @@ -853,8 +856,7 @@ def upload_file(self, file_path, to_server_file_path): ) def upload_file_in_tmp_folder(self, file_path, new_file_name=None): - """Upload a file from the client to the server in a temporary folder - deleted when the server is shutdown + """Upload a file from the client to a temporary server folder deleted on shutdown. Parameters ---------- @@ -896,7 +898,7 @@ def _prepare_shutdown(self): # @version_requires("4.0") def _release_server(self): """ - Release the reference taken by this client on the server + Release the reference taken by this client on the server. Notes ----- diff --git a/src/ansys/dpf/core/custom_container_base.py b/src/ansys/dpf/core/custom_container_base.py index 1c2201f105..745c3b4130 100644 --- a/src/ansys/dpf/core/custom_container_base.py +++ b/src/ansys/dpf/core/custom_container_base.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -CustomContainerBase +CustomContainerBase. This module contains the `CustomContainerBase` class, which serves as a base for creating wrappers around `GenericDataContainer` objects. diff --git a/src/ansys/dpf/core/custom_fields_container.py b/src/ansys/dpf/core/custom_fields_container.py index f979cfc26e..72be15a3de 100644 --- a/src/ansys/dpf/core/custom_fields_container.py +++ b/src/ansys/dpf/core/custom_fields_container.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -Custom Fields Containers +Custom Fields Containers. Contains the inherited classes from the :class:`FieldsContainer ` class. @@ -88,7 +88,6 @@ def solid_fields(self, timeid=None, complexid=None): Examples -------- - >>> from ansys.dpf import core as dpf >>> from ansys.dpf.core import examples >>> model = dpf.Model(examples.download_all_kinds_of_complexity_modal()) @@ -308,6 +307,7 @@ def __init__(self, fields_container=None, server=None): def get_fields_by_mat_id(self, matid, timeid=None, complexid=None): """Retrieve a list of all fields for a material ID. + You can filter the list of fields for a material ID based on a given time, complex type, or both. @@ -345,6 +345,7 @@ def get_fields_by_mat_id(self, matid, timeid=None, complexid=None): def get_field_by_mat_id(self, matid, timeid=None, complexid=None): """Retrieve a field with a given material ID. + You can filter the field based on a given time, complex type, or both. Parameters @@ -377,7 +378,7 @@ def get_field_by_mat_id(self, matid, timeid=None, complexid=None): return self.get_field(label_space) def get_mat_scoping(self): - """Retrieves the material or body scoping containing material IDs. + """Retrieve the material or body scoping containing material IDs. Returns ------- diff --git a/src/ansys/dpf/core/custom_operator.py b/src/ansys/dpf/core/custom_operator.py index 990f86b6c6..217efdbbed 100644 --- a/src/ansys/dpf/core/custom_operator.py +++ b/src/ansys/dpf/core/custom_operator.py @@ -21,9 +21,7 @@ # SOFTWARE. """ -.. _ref_custom_operator: - -Custom Operator Base +Custom Operator Base. Contains utilities allowing you to implement and record custom Python operators. """ @@ -63,7 +61,7 @@ def update_virtual_environment_for_custom_operators( restore_original: bool = False, ): - """Updates the dpf-site.zip file used to start a venv for Python custom operators to run in. + """Update the dpf-site.zip file used to start a venv for Python custom operators to run in. It updates the site-packages in dpf-site.zip with the site-packages of the current venv. It stores the original dpf-site.zip for future restoration. @@ -221,8 +219,8 @@ def record_operator(operator_type, *args) -> None: class CustomOperatorBase: """ - Base class interfacing CPython Custom Operators which can be used as regular - DPF Operators in any API. + Base class interfacing CPython Custom Operators which can be used as regular DPF Operators in any API. + A CustomOperator is defined by its name, its specification and its run method. These three abstract methods should be implemented to create a CustomOperator. @@ -270,6 +268,7 @@ class CustomOperatorBase: def set_output(self, index: int, data) -> None: """ Add an output to this Operator at the given index. + To use in the ``run`` method. Parameters @@ -293,7 +292,8 @@ def set_output(self, index: int, data) -> None: def get_input(self, index, type: type): """ - Method used to get an input of a requested type at a given index in the ``run`` method. + Get an input of a requested type at a given index in the ``run`` method. + The correct input type must be connected to this Operator beforehand. Parameters @@ -326,6 +326,7 @@ def get_input(self, index, type: type): def set_failed(self) -> None: """ Set the Operator's status to "failed". + To use in the ``run`` method if an error occurred. This "failed" status is automatically set when an exception is raised in the ``run`` method. """ @@ -334,6 +335,7 @@ def set_failed(self) -> None: def set_succeeded(self) -> None: """ Set the Operator's status to "succeeded". + To use at the end of the ``run`` method. """ external_operator_api.external_operator_put_status(self._operator_data, 0) @@ -361,7 +363,8 @@ def _internal_specification(self): @abc.abstractmethod def run(self) -> None: """ - Callback of the Operator to implement. + "Implement the Operator's callback in inheriting subclasses. + The implementation should first request the inputs with the method ``get_input``, compute the output data, then add the outputs with the method ``set_output`` and finally call ``set_succeeded``. @@ -372,8 +375,10 @@ def run(self) -> None: @abc.abstractmethod def specification(self): """ - Documents the operator. The following are mandatory to have a full support - (documentation, code generation and usage) of the new operator: + Documents the operator. + + The following are mandatory to have a full support (documentation, code generation and usage) + of the new operator: * Description * Supported inputs (a name, a document, a list of accepted types (optional) and/or ellipses) * Supported outputs (a name, a document, a type, and can be ellipsis) @@ -391,6 +396,7 @@ def specification(self): def name(self) -> str: """ Returns the identifier or name of the operator. + This name can then be used to instantiate the Operator. """ pass diff --git a/src/ansys/dpf/core/custom_type_field.py b/src/ansys/dpf/core/custom_type_field.py index 26e0324826..b926401fdd 100644 --- a/src/ansys/dpf/core/custom_type_field.py +++ b/src/ansys/dpf/core/custom_type_field.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_custom_type_field: - -CustomTypeField - -""" +"""CustomTypeField.""" import warnings @@ -46,7 +41,10 @@ class dict_with_missing_numpy_type(dict): + """Custom dictionary that returns the name attribute of a missing key.""" + def __missing__(self, key): + """Return the name attribute of a missing key.""" return key.name @@ -63,6 +61,7 @@ def __missing__(self, key): class CustomTypeField(_FieldBase): """Represents a simulation data container with each unitary data being of a custom type. + When initializing the ``CustomTypeField`` class, provide a unitary data type. The ``CustomTypeField`` class gives you the ability to choose the most optimized unitary data type for a given usage, and hence, allows you to optimize memory usage. @@ -113,9 +112,7 @@ def __init__( field=None, server=None, ): - """Initialize the field either with an optional field message or - by connecting to a stub. - """ + """Initialize the field either with an optional field message or by connecting to a stub.""" self._server = server_module.get_or_create_server( field._server if isinstance(field, CustomTypeField) else server ) @@ -211,7 +208,7 @@ def location(self, location): """Change the field location. Parameters - ------- + ---------- location : str or locations Location string, which can be ``"Nodal"``, ``"Elemental"``, ``"ElementalNodal"``... See :class:`ansys.dpf.core.common.locations`. @@ -232,7 +229,7 @@ def location(self, location): self.field_definition = fielddef def is_of_type(self, type_to_compare: np.dtype) -> bool: - """Checks whether the Field's unitary type is the same as the input type. + """Check whether the Field's unitary type is the same as the input type. Parameters ---------- @@ -262,6 +259,7 @@ def is_of_type(self, type_to_compare: np.dtype) -> bool: @property def type(self): """Type of unitary data in the Field's data vector. + Should be properly set at the Field construction to have properly allocated data. Returns @@ -282,14 +280,17 @@ def type(self): @property def component_count(self): + """Number of components.""" return self._api.cscustom_type_field_get_number_of_components(self) @property def elementary_data_count(self): + """Number of elementary data.""" return self._api.cscustom_type_field_get_number_elementary_data(self) @property def size(self): + """Size of data.""" return self._api.cscustom_type_field_get_data_size(self) def _set_scoping(self, scoping): @@ -301,7 +302,7 @@ def _get_scoping(self): return scoping.Scoping(scoping=obj, server=self._server) def get_entity_data(self, index): - """Returns the array corresponding to the data of a given entity index. + """Return the array corresponding to the data of a given entity index. Parameters ---------- @@ -341,7 +342,7 @@ def get_entity_data(self, index): return data def get_entity_data_by_id(self, id): - """Returns the array corresponding to the data of a given entity id. + """Return the array corresponding to the data of a given entity id. Parameters ---------- @@ -384,6 +385,7 @@ def get_entity_data_by_id(self, id): return data def append(self, data, scopingid): + """Append data to the api instance.""" if isinstance(data, list): data = np.array(data, dtype=self._type) self._api.cscustom_type_field_push_back(self, scopingid, _get_size_of_list(data), data) @@ -449,7 +451,7 @@ def unit(self): """Units for the field. Returns - ---------- + ------- str Units for the field. @@ -469,7 +471,7 @@ def unit(self): @unit.setter def unit(self, value): - """Change the unit for the field + """Change the unit for the field. Parameters ---------- @@ -527,8 +529,7 @@ def _set_field_definition(self, field_definition): @property def field_definition(self): - """CustomTypeField information, including its location, unit, dimensionality - and shell layers. + """CustomTypeField information, including its location, unit, dimensionality and shell layers. Returns ------- @@ -546,6 +547,7 @@ def _set_support(self, support): @property def support(self): + """Return the support associated with the custom field.""" obj = self._api.cscustom_type_field_get_support(self) if obj is not None: return Support(support=obj, server=self._server) diff --git a/src/ansys/dpf/core/cyclic_support.py b/src/ansys/dpf/core/cyclic_support.py index 5ef78c7316..577c67d214 100644 --- a/src/ansys/dpf/core/cyclic_support.py +++ b/src/ansys/dpf/core/cyclic_support.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -Cyclic Support - -""" +"""Cyclic Support.""" import traceback import warnings @@ -98,7 +95,7 @@ def __str__(self): @property def num_stages(self) -> int: - """Number of cyclic stages in the model + """Number of cyclic stages in the model. Examples -------- @@ -117,7 +114,7 @@ def num_stages(self) -> int: return self._api.cyclic_support_get_num_stages(self) def num_sectors(self, stage_num=0) -> int: - """Number of sectors to expand on 360 degrees. + """Determine number of sectors to expand on 360 degrees. Parameters ---------- @@ -144,8 +141,7 @@ def num_sectors(self, stage_num=0) -> int: return self._api.cyclic_support_get_num_sectors(self, stage_num) def base_nodes_scoping(self, stage_num=0) -> Scoping: - """Retrieve a nodal scoping containing node IDs in the - base sector of the given stage. + """Retrieve a nodal scoping containing node IDs in the base sector of the given stage. Parameters ---------- @@ -170,8 +166,7 @@ def base_nodes_scoping(self, stage_num=0) -> Scoping: return Scoping(scoping=base_node_scoping, server=self._server) def base_elements_scoping(self, stage_num=0) -> Scoping: - """Retrieve an elemental scoping containing elements IDs in the - base sector of the given stage. + """Retrieve an elemental scoping containing elements IDs in the base sector of the given stage. Parameters ---------- @@ -196,8 +191,7 @@ def base_elements_scoping(self, stage_num=0) -> Scoping: return Scoping(scoping=base_element_scoping, server=self._server) def sectors_set_for_expansion(self, stage_num=0) -> Scoping: - """Retrieve a sector's scoping of the already expanded results - and mesh or the list of sectors that will be expanded by default. + """Retrieve a sector's scoping from expanded results and mesh, or list of sectors for default expansion. A sector's scoping starts from 0, with the maximum equal to num_sectors-1. @@ -226,8 +220,7 @@ def sectors_set_for_expansion(self, stage_num=0) -> Scoping: return Scoping(scoping=sectors_for_expansion, server=self._server) def expand_node_id(self, node_id, sectors=None, stage_num=0): - """Retrieve the node IDs corresponding to the base sector node ID given in the input - after expansion. + """Retrieve the node IDs corresponding to the base sector node ID given in the input after expansion. Parameters ---------- @@ -266,8 +259,7 @@ def expand_node_id(self, node_id, sectors=None, stage_num=0): return Scoping(scoping=expanded_ids, server=self._server) def expand_element_id(self, element_id, sectors=None, stage_num=0): - """Retrieves the element IDs corresponding to the base sector element ID given in the input - after expansion. + """Retrieve the element IDs corresponding to the base sector element ID given in the input after expansion. Parameters ---------- @@ -317,13 +309,11 @@ def cs(self) -> field.Field: >>> cs = cyc_support.cs() """ - cs = self._api.cyclic_support_get_cs(self) return field.Field(field=cs, server=self._server) def low_high_map(self, stage_num: int = 0) -> property_field.PropertyField: - """Retrieve a property field containing node map from low to high - base sector of the given stage. + """Retrieve a property field containing node map from low to high base sector of the given stage. Parameters ---------- @@ -348,8 +338,7 @@ def low_high_map(self, stage_num: int = 0) -> property_field.PropertyField: return property_field.PropertyField(property_field=low_high_map, server=self._server) def high_low_map(self, stage_num: int = 0) -> property_field.PropertyField: - """Retrieve a property field containing node map from high to low - base sector of the given stage. + """Retrieve a property field containing node map from high to low base sector of the given stage. Parameters ---------- @@ -374,6 +363,7 @@ def high_low_map(self, stage_num: int = 0) -> property_field.PropertyField: return property_field.PropertyField(property_field=high_low_map, server=self._server) def __del__(self): + """Delete this instance.""" try: self._deleter_func[0](self._deleter_func[1](self)) except: diff --git a/src/ansys/dpf/core/data_sources.py b/src/ansys/dpf/core/data_sources.py index 6375a7c616..5ab753d760 100644 --- a/src/ansys/dpf/core/data_sources.py +++ b/src/ansys/dpf/core/data_sources.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_data_sources: - -Data Sources - -""" +"""Data Sources.""" import os from pathlib import Path @@ -173,7 +168,6 @@ def guess_result_key(filepath: str) -> str: @staticmethod def guess_second_key(filepath: str) -> str: """For files with an h5 or cff extension, look for another extension.""" - # These files usually end with .cas.h5 or .dat.h5 accepted = ["cas", "dat"] new_split = Path(filepath).suffixes @@ -233,6 +227,7 @@ def add_file_path(self, filepath, key="", is_domain: bool = False, domain_id=0): domain_id: int, optional Domain ID for the distributed files. The default is ``0``. For this parameter to be taken into account, ``domain_path=True`` must be set. + Examples -------- >>> from ansys.dpf import core as dpf @@ -275,6 +270,7 @@ def add_domain_file_path(self, filepath, key, domain_id): plugin when a result is requested by an operator. domain_id: Domain ID for the distributed files. + Examples -------- >>> from ansys.dpf import core as dpf @@ -378,7 +374,7 @@ def result_files(self): """List of result files contained in the data sources. Returns - ---------- + ------- list List of result files. """ @@ -399,7 +395,8 @@ def result_files(self): @version_requires("7.0") def register_namespace(self, result_key: str, namespace: str): - """Adds a link from this ``result_key`` to this ``namespace`` in the DataSources. + """Add a link from this ``result_key`` to this ``namespace`` in the DataSources. + This ``result_key`` to ``namespace`` mapping is used by source operators to find internal operators to call. @@ -422,6 +419,7 @@ def __str__(self): return _description(self._internal_obj, self._server) def __del__(self): + """Delete this instance.""" try: self._deleter_func[0](self._deleter_func[1](self)) except: diff --git a/src/ansys/dpf/core/data_tree.py b/src/ansys/dpf/core/data_tree.py index 6a142afa52..75e115c094 100644 --- a/src/ansys/dpf/core/data_tree.py +++ b/src/ansys/dpf/core/data_tree.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_data_tree: - -DataTree - -""" +"""DataTree.""" import enum import traceback @@ -226,7 +221,8 @@ def _core_api(self): return core_api def to_fill(self): - """ + """Use with a with statement to modify local data_tree and sync with the server in one action. + This method allows to access and modify the local copy of the data_tree without sending a request to the server. It should be used in a ``with`` statement so that the local data tree is released and the data is sent to @@ -276,7 +272,7 @@ def _serialize(self, path, operator): def write_to_txt(self, path=None): """ - Writes the data tree either as a file or as returned string in a text format. + Write the data tree either as a file or as returned string in a text format. Parameters ---------- @@ -307,7 +303,7 @@ def write_to_txt(self, path=None): def write_to_json(self, path=None): """ - Writes the data tree either as a file or as returned string in a json format. + Write the data tree either as a file or as returned string in a json format. Parameters ---------- @@ -354,7 +350,7 @@ def _deserialize(path, txt, server, operator): @staticmethod def read_from_json(path=None, txt=None, server=None): """ - Convert a json string or file to DataTree + Convert a json string or file to DataTree. Parameters ---------- @@ -389,7 +385,7 @@ def read_from_json(path=None, txt=None, server=None): @staticmethod def read_from_txt(path=None, txt=None, server=None): """ - Convert a text string or file to DataTree + Convert a text string or file to DataTree. Parameters ---------- @@ -423,7 +419,7 @@ def read_from_txt(path=None, txt=None, server=None): def has(self, entry): """ - Return True if the entry exists + Return True if the entry exists. Parameters ---------- @@ -448,7 +444,7 @@ def has(self, entry): def get_as(self, name, type_to_return=types.string): """ - Returns an attribute value by its name in the required type. + Return an attribute value by its name in the required type. Parameters ---------- @@ -587,7 +583,7 @@ def __to_dict(self, dic): def to_dict(self): """ - Returns a read-only dictionary representation of the DataTree. + Return a read-only dictionary representation of the DataTree. Returns ------- @@ -609,6 +605,19 @@ def to_dict(self): return dic def __setattr__(self, key, value): + """Set an attribute for the DataTree object. + + Parameters + ---------- + key : str + The name of the attribute to set. If the attribute is a reserved key + (e.g., internal attributes starting with "_common_keys" or attributes + defined in the class), it is set using the parent class's `__setattr__` method. + Otherwise, it adds the attribute and its value to the data tree. + + value : object + The value of the attribute to set. + """ if key == "_common_keys" or key in self._common_keys or key in dir(self): return super.__setattr__(self, key, value) self.add({key: value}) @@ -626,6 +635,7 @@ def __str__(self): return _description(self._internal_obj, self._server) def __del__(self): + """Delete this instance.""" try: # needs a proper deleter only when real datatree and not dict if hasattr(self, "_deleter_func"): diff --git a/src/ansys/dpf/core/dimensionality.py b/src/ansys/dpf/core/dimensionality.py index 54d476f499..cc2dda758c 100644 --- a/src/ansys/dpf/core/dimensionality.py +++ b/src/ansys/dpf/core/dimensionality.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -Dimensionality - -""" +"""Dimensionality.""" from ansys.dpf.core.common import natures @@ -72,16 +69,26 @@ def __init__(self, dim_vec=None, nature: natures = natures.vector): self.dim = [1] def is_1d_dim(self): + """Check if dimensionality is 1.""" return len(self.dim) == 1 def is_2d_dim(self): + """Check if dimensionality is 2.""" return len(self.dim) == 2 def __str__(self): + """Customize the string representation of the dimensionality.""" return str(self.dim) + " " + self.nature.name @property def component_count(self): + """Retrieve the number of components based on the dimensionality. + + Returns + ------- + float + Number of components. + """ count = 1 for comp in self.dim: count *= comp @@ -137,7 +144,37 @@ def tensor_dim(): return Dimensionality([3, 3], natures.symmatrix) def __eq__(self, other): + """Check if this Dimensionality object is equal to another. + + Two Dimensionality objects are considered equal if their dimensionality + vectors (`dim`) and natures (`nature`) are the same. + + Parameters + ---------- + other : Dimensionality + The other Dimensionality object to compare with. + + Returns + ------- + bool + `True` if the two objects are equal, `False` otherwise. + """ return self.dim == other.dim and self.nature == other.nature def __ne__(self, other): + """Check if this Dimensionality object is not equal to another. + + Two Dimensionality objects are considered not equal if their dimensionality + vectors (`dim`) or natures (`nature`) differ. + + Parameters + ---------- + other : Dimensionality + The other Dimensionality object to compare with. + + Returns + ------- + bool + `True` if the two objects are not equal, `False` otherwise. + """ return not self.__eq__(other) diff --git a/src/ansys/dpf/core/dpf_array.py b/src/ansys/dpf/core/dpf_array.py index 90f74da456..10300e96ab 100644 --- a/src/ansys/dpf/core/dpf_array.py +++ b/src/ansys/dpf/core/dpf_array.py @@ -20,14 +20,6 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" - -DPFArray - - -.. autoclass:: DPFArray - :members: - -""" +"""DPFArray.""" from ansys.dpf.gate.dpf_array import DPFArray # noqa: F401 diff --git a/src/ansys/dpf/core/dpf_operator.py b/src/ansys/dpf/core/dpf_operator.py index 723efecf67..5a0595894f 100644 --- a/src/ansys/dpf/core/dpf_operator.py +++ b/src/ansys/dpf/core/dpf_operator.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_operator: - -Operator - -""" +"""Operator.""" import logging import os @@ -209,7 +204,6 @@ def _add_sub_res_operators(self, sub_results): >>> disp_z = model.results.displacement().Z() """ - for result_type in sub_results: try: setattr( @@ -234,8 +228,11 @@ def _outputs(self, value): @property @version_requires("3.0") def progress_bar(self) -> bool: - """With this property, the user can choose to print a progress bar when - the operator's output is requested, default is False""" + """Enable or disable progress bar display when requesting the operator's output. + + With this property, the user can choose to print a progress bar when + the operator's output is requested, default is False + """ return self._progress_bar @progress_bar.setter @@ -315,7 +312,8 @@ def connect(self, pin, inpt, pin_out=0): @version_requires("6.2") def connect_operator_as_input(self, pin, op): - """Connects an operator as an input on a pin. + """Connect an operator as an input on a pin. + Parameters ---------- pin : int @@ -630,7 +628,7 @@ def config(self): For information on an operator's options, see the documentation for that operator. Returns - ---------- + ------- :class:`ansys.dpf.core.config.Config` Copy of the operator's current configuration. @@ -691,7 +689,7 @@ def inputs(self): """Inputs connected to the operator. Returns - -------- + ------- :class:`ansys.dpf.core.inputs` Inputs connected to the operator. @@ -706,7 +704,6 @@ def inputs(self): >>> disp_op.inputs.data_sources(data_src) """ - return self._inputs @property @@ -714,7 +711,7 @@ def outputs(self): """Outputs from the operator's evaluation. Returns - -------- + ------- :class:`ansys.dpf.core.outputs` Outputs from the operator's evaluation. @@ -760,6 +757,7 @@ def default_config(name, server=None): return Config(operator_name=name, server=server) def __del__(self): + """Delete this instance.""" try: if hasattr(self, "_deleter_func"): obj = self._deleter_func[1](self) @@ -811,7 +809,6 @@ def eval(self, pin=None): >>> normfc = math.norm_fc(disp_op).eval() """ - if not pin: if self.outputs != None and len(self.outputs._outputs) > 0: return self.outputs._outputs[0]() @@ -887,6 +884,7 @@ def __sub__(self, fields_b): return op def __pow__(self, value): + """Raise each element of a field or a fields container to power 2.""" if value != 2: raise ValueError('Only the value "2" is supported.') from ansys.dpf.core import dpf_operator, operators @@ -918,13 +916,12 @@ def __mul__(self, value): @staticmethod def operator_specification(op_name, server=None): - """Documents an Operator with its description (what the Operator does), - its inputs and outputs and some properties""" + """Documents an Operator with its description (what the Operator does),its inputs and outputs and some properties.""" return Specification(operator_name=op_name, server=server) @property def specification(self): - """Returns the Specification (or documentation) of this Operator + """Returns the Specification (or documentation) of this Operator. Returns ------- @@ -936,6 +933,12 @@ def specification(self): return Specification(operator_name=self.name, server=self._server) def __truediv__(self, inpt): + """ + Perform division with another operator or a scalar. + + This method allows the use of the division operator (`/`) between an + `Operator` instance and either another `Operator` or a scalar value (float). + """ if isinstance(inpt, Operator): op = Operator("div") op.connect(0, self, 0) @@ -948,7 +951,7 @@ def __truediv__(self, inpt): def available_operator_names(server=None): - """Returns the list of operator names available in the server. + """Return the list of operator names available in the server. Parameters ---------- diff --git a/src/ansys/dpf/core/element_descriptor.py b/src/ansys/dpf/core/element_descriptor.py index 169108dbee..13c248d2f6 100644 --- a/src/ansys/dpf/core/element_descriptor.py +++ b/src/ansys/dpf/core/element_descriptor.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -Element Descriptor - -""" +"""Element Descriptor.""" class ElementDescriptor: @@ -77,7 +74,7 @@ def __init__( is_beam=None, is_quadratic=None, ): - """Constructor of ElementDescriptor.""" + """ElementDescriptor's Constructor.""" self.enum_id = enum_id self.description = description self.name = name @@ -93,6 +90,7 @@ def __init__( self.is_quadratic = is_quadratic def __str__(self): + """Provide more details in the string representation.""" lines = [] lines.append("Element descriptor") lines.append("-" * 18) diff --git a/src/ansys/dpf/core/elements.py b/src/ansys/dpf/core/elements.py index 7e6c923325..9c18092dd8 100644 --- a/src/ansys/dpf/core/elements.py +++ b/src/ansys/dpf/core/elements.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_elements_apis: - -Elements - -""" +"""Elements.""" from __future__ import annotations from enum import Enum @@ -85,7 +80,7 @@ def node_ids(self): IDs of all nodes in the element. Returns - -------- + ------- list List of IDs for all nodes in the element. @@ -132,8 +127,9 @@ def index(self) -> int: def nodes(self): """ All nodes in the element. + Returns - -------- + ------- list List of all nodes in the element. @@ -163,6 +159,7 @@ def n_nodes(self) -> int: return len(self._nodes) def __str__(self): + """Provide more details in the string representation.""" txt = f"DPF Element {self.id:d}\n" txt += f"\tIndex:{self.index:>13}\n" txt += f"\tNodes:{self.n_nodes:>13}\n" @@ -238,7 +235,7 @@ def connectivity(self): Ordered list of node indices of the element. Returns - -------- + ------- list Ordered list of node indices. @@ -275,16 +272,19 @@ def __init__(self, mesh): self._mapping_id_to_index = None def __str__(self): + """Provide a custom string representation.""" return "DPF Elements object with %d elements" % len(self) def __getitem__(self, index): - """Retrieves element based on an index.""" + """Retrieve element based on an index.""" return self.element_by_index(index) def __len__(self): + """Retrieve the number of elements.""" return self.n_elements def __iter__(self): + """Provide for looping through the elements in loops.""" for i in range(len(self)): yield self[i] @@ -609,7 +609,7 @@ def connectivities_field(self): @version_requires("3.0") def connectivities_field(self, property_field): """ - Connectivity field setter. + Set connectivity field. Parameters ---------- @@ -624,7 +624,7 @@ def _get_connectivities_field(self): @property def n_elements(self) -> int: - """Number of elements""" + """Number of elements.""" return self._mesh._api.meshed_region_get_num_elements(self._mesh) def _build_mapping_id_to_index(self): @@ -653,8 +653,7 @@ def mapping_id_to_index(self) -> dict: def map_scoping(self, external_scope): """ - Retrieve the indices to map the scoping of these elements to - the scoping of a field. + Retrieve the indices to map the scoping of these elements to the scoping of a field. Parameters ---------- @@ -789,7 +788,6 @@ def is_solid(self) -> bool: ------- bool """ - return self._shape_info["solid"] @is_solid.setter @@ -807,7 +805,6 @@ def is_shell(self) -> bool: ------- bool """ - return self._shape_info["shell"] @is_shell.setter @@ -825,7 +822,6 @@ def is_beam(self) -> bool: ------- bool """ - return self._shape_info["beam"] @is_beam.setter @@ -843,7 +839,6 @@ def is_point(self) -> bool: ------- bool """ - return self._shape_info["point"] @is_point.setter @@ -858,7 +853,7 @@ def shape(self) -> str: Shape of the element. Returns - -------- + ------- str Shape of the element. Options are ``"solid"``, ``"shell"``, ``"beam"`` and ``"unknown_shape"``. @@ -879,7 +874,7 @@ def shape(self, value): Set the shape of the element. Parameters - -------- + ---------- value : str Shape of the element. Options are ``"solid"``, ``"shell"``, ``"beam"`` and ``"unknown_shape"``. diff --git a/src/ansys/dpf/core/errors.py b/src/ansys/dpf/core/errors.py index cbf7c725a7..f98fc2863c 100644 --- a/src/ansys/dpf/core/errors.py +++ b/src/ansys/dpf/core/errors.py @@ -20,18 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_errors: - -Errors - - -.. autoexception:: DpfVersionNotSupported - :members: - -.. autoexception:: DPFServerNullObject - :members: -""" +"""Errors.""" from functools import wraps from ansys.dpf.gate.errors import ( # noqa: F401 @@ -84,8 +73,7 @@ def __init__(self, msg=_COMPLEX_PLOTTING_ERROR_MSG): class FieldContainerPlottingError(ValueError): - """Error raised when attempting to plot a fields_container containing - multiple fields.""" + """Error raised when attempting to plot a fields_container containing multiple fields.""" def __init__(self, msg=_FIELD_CONTAINER_PLOTTING_MSG): ValueError.__init__(self, msg) @@ -106,15 +94,13 @@ def __init__(self, msg=""): class ServerTypeError(NotImplementedError): - """Error raised when using a functionality unavailable for this server type""" + """Error raised when using a functionality unavailable for this server type.""" pass def protect_source_op_not_found(func): - """Capture DPF's Server exceptions when a source operator is not found - and return a more succinct error message. - """ + """Capture DPF's Server exceptions when a source operator is not found and return a more succinct error message.""" @wraps(func) def wrapper(*args, **kwargs): diff --git a/src/ansys/dpf/core/examples/__init__.py b/src/ansys/dpf/core/examples/__init__.py index 22caad3b60..4ca0a05f07 100644 --- a/src/ansys/dpf/core/examples/__init__.py +++ b/src/ansys/dpf/core/examples/__init__.py @@ -19,6 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Provide utility functions for downloading and locating DPF example files.""" from .examples import * from .downloads import * diff --git a/src/ansys/dpf/core/examples/downloads.py b/src/ansys/dpf/core/examples/downloads.py index 8228d7b99f..06e0f73bfe 100644 --- a/src/ansys/dpf/core/examples/downloads.py +++ b/src/ansys/dpf/core/examples/downloads.py @@ -21,9 +21,10 @@ # SOFTWARE. """ -Downloads +Downloads. -Download example datasets from https://github.com/ansys/example-data""" +Download example datasets from https://github.com/ansys/example-data +""" import os from pathlib import Path @@ -42,7 +43,7 @@ def delete_downloads(verbose=True): - """Delete all downloaded examples to free space or update the files""" + """Delete all downloaded examples to free space or update the files.""" from ansys.dpf.core import LOCAL_DOWNLOADED_EXAMPLES_PATH, examples not_to_remove = [ @@ -87,7 +88,7 @@ def _get_file_url(directory, filename): def _retrieve_file(url, filename, directory): - """Download a file from a url""" + """Download a file from a url.""" from ansys.dpf.core import LOCAL_DOWNLOADED_EXAMPLES_PATH # First check if file has already been downloaded @@ -122,8 +123,8 @@ def _download_file(directory, filename, should_upload: bool, server, return_loca def download_transient_result( should_upload: bool = True, server=None, return_local_path=False ) -> str: - """Download an example transient result file and return the download path - available server side. + """Download an example transient result file and return the download path available server side. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -164,8 +165,8 @@ def download_transient_result( def download_all_kinds_of_complexity( should_upload: bool = True, server=None, return_local_path=False ) -> str: - """Download an example static result and return the download path - available server side. + """Download an example static result and return the download path available server side. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -206,8 +207,8 @@ def download_all_kinds_of_complexity( def download_all_kinds_of_complexity_modal( should_upload: bool = True, server=None, return_local_path=False ) -> str: - """Download an example result file from a static modal analysis and - return the download path available server side. + """Download an example result file from a static modal analysis and return the download path available server side. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -250,8 +251,8 @@ def download_all_kinds_of_complexity_modal( def download_pontoon(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Download an example result file from a static modal analsys and - return the download path available server side. + """Download an example result file from a static modal analsys and return the download path available server side. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -292,8 +293,8 @@ def download_pontoon(should_upload: bool = True, server=None, return_local_path= def download_multi_harmonic_result( should_upload: bool = True, server=None, return_local_path=False ) -> str: - """Download an example multi-harmonic result file and return the - download path available server side. + """Download an example multi-harmonic result file and return the download path available server side. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -333,8 +334,8 @@ def download_multi_harmonic_result( def download_multi_stage_cyclic_result( should_upload: bool = True, server=None, return_local_path=False ) -> str: - """Download an example multi-stage result file and return the - download path available server side. + """Download an example multi-stage result file and return the download path available server side. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -373,8 +374,8 @@ def download_multi_stage_cyclic_result( def download_sub_file(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Download an example .sub result file containing matrices and return the - download path available server side. + r"""Download an example .sub result file containing matrices and return the download path available server side. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -413,8 +414,8 @@ def download_sub_file(should_upload: bool = True, server=None, return_local_path def download_msup_files_to_dict( should_upload: bool = True, server=None, return_local_path=False ) -> dict: - """Download all the files necessary for a msup expansion and return the - download paths available server side into a dictionary extension->path. + r"""Download necessary files for an msup expansion and return a dictionary mapping each file extension to its server-side download path. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -465,8 +466,8 @@ def download_msup_files_to_dict( def download_distributed_files( should_upload: bool = True, server=None, return_local_path=False ) -> dict: - """Download distributed rst files and return the - download paths into a dictionary domain id->path. + r"""Download distributed rst files and return the download paths into a dictionary domain id->path. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -513,8 +514,9 @@ def download_distributed_files( def download_fluent_multi_species( should_upload: bool = True, server=None, return_local_path=False ) -> dict: - """Download the cas and dat file of a fluent analysis with multiple species - and return the download paths into a dictionary extension->path. + r""" + Download the cas and dat files from a multiple species Fluent analysis and return a dictionary of file extensions to download paths. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -569,8 +571,9 @@ def download_fluent_multi_species( def download_fluent_multi_phase( should_upload: bool = True, server=None, return_local_path=False ) -> dict: - """Download the cas and dat file of a fluent analysis with multiple phases - and return the download paths into a dictionary extension->path. + r""" + Download the cas and dat files from a multiple phases Fluent analysis and return a dictionary of file extensions to download paths. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -625,7 +628,9 @@ def download_fluent_multi_phase( def download_extrapolation_3d_result( should_upload: bool = True, server=None, return_local_path=False ) -> dict: - """Download example static results of reference and integrated points + """Download example static results for extrapolation and return a dictionary of two download paths. + + Download example static results of reference and integrated points for extrapolation of 3d-element and return the dictionary of 2 download paths. If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -678,7 +683,9 @@ def download_extrapolation_3d_result( def download_extrapolation_2d_result( should_upload: bool = True, server=None, return_local_path=False ) -> dict: - """Download example static results of reference and integrated points + """Download 2D extrapolation results and return two server-side paths. + + Download example static results of reference and integrated points for extrapolation of 2d-element and return the dictionary of 2 download paths. If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -737,8 +744,8 @@ def download_extrapolation_2d_result( def download_hemisphere(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Download an example result file from a static analysis and - return the download path available server side. + """Download an example result file from a static analysis and return the download path available server side. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -779,8 +786,8 @@ def download_hemisphere(should_upload: bool = True, server=None, return_local_pa def download_example_asme_result( should_upload: bool = True, server=None, return_local_path=False ) -> str: - """Download an example result file from a static analysis and - return the download path available server side. + """Download an example result file from a static analysis and return the download path available server side. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -817,8 +824,8 @@ def download_example_asme_result( def download_crankshaft(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Download the result file of an example of a crankshaft - under load and return the download path available server side. + """Download the result file of an example of a crankshaft under load and return the download path available server side. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -857,8 +864,8 @@ def download_crankshaft(should_upload: bool = True, server=None, return_local_pa def download_piston_rod(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Download the result file of an example of a piston rod - under load and return the download path available server side. + """Download the result file of an example of a piston rod under load and return the download path available server side. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -897,8 +904,8 @@ def download_piston_rod(should_upload: bool = True, server=None, return_local_pa def download_d3plot_beam(should_upload: bool = True, server=None, return_local_path=False) -> list: - """Download the result file of an example of a d3plot file with beam elements and return the - download paths available on the server side. + """Download the result file of an example of a d3plot file with beam elements and return the download paths available on the server side. + If the server is remote (or doesn't share the memory), the file is uploaded or made available on the server side. @@ -951,8 +958,8 @@ def download_d3plot_beam(should_upload: bool = True, server=None, return_local_p def download_binout_matsum(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Download the result file of an example of a binout file with matsum branch and return the - download path available on the server side. + """Download the result file of an example of a binout file with matsum branch and return the download path available on the server side. + If the server is remote (or doesn't share the memory), the file is uploaded or made available on the server side. @@ -991,8 +998,8 @@ def download_binout_matsum(should_upload: bool = True, server=None, return_local def download_binout_glstat(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Download the result file of an example of a binout file with glstat branch and return the - download path available on the server side. + """Download the result file of an example of a binout file with glstat branch and return the download path available on the server side. + If the server is remote (or doesn't share the memory), the file is uploaded or made available on the server side. @@ -1033,8 +1040,8 @@ def download_binout_glstat(should_upload: bool = True, server=None, return_local def download_cycles_to_failure( should_upload: bool = True, server=None, return_local_path=False ) -> str: - """Download an example result file from a cyclic analysis and - return the download path. + """Download an example result file from a cyclic analysis and return the download path. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -1073,8 +1080,8 @@ def download_cycles_to_failure( def download_modal_frame(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Download an example result file from a modal analysis on a frame and - return the download path. + """Download an example result file from a modal analysis on a frame and return the download path. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -1113,8 +1120,8 @@ def download_modal_frame(should_upload: bool = True, server=None, return_local_p def download_harmonic_clamped_pipe( should_upload: bool = True, server=None, return_local_path=False ) -> str: - """Download an example result file from a harmonic analysis on a clamped pipe and - return the download path. + """Download an example result file from a harmonic analysis on a clamped pipe and return the download path. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -1151,8 +1158,8 @@ def download_harmonic_clamped_pipe( def download_modal_cyclic(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Download an example result file from a cyclic modal analysis and - return the download path. + """Download an example result file from a cyclic modal analysis and return the download path. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -1191,8 +1198,8 @@ def download_modal_cyclic(should_upload: bool = True, server=None, return_local_ def download_fluent_axial_comp( should_upload: bool = True, server=None, return_local_path=False ) -> dict: - """Download the flprj, cas and dat files of a fluent analysis of an axial compressor sector - and return the download paths into a dictionary extension->path. + r"""Download flprj, cas, and dat files of an axial compressor sector analysis and return a dictionary of file extensions to paths. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -1232,7 +1239,7 @@ def download_fluent_axial_comp( 'C:\\Users\\user\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\fluent-axial_comp\\axial_comp-1-01438.dat.h5', 'C:\\Users\\user\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\fluent-axial_comp\\axial_comp-1-01439.dat.h5', 'C:\\Users\\user\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\fluent-axial_comp\\axial_comp-1-01440.dat.h5', - ]} # noqa: E501 + ]} """ return { @@ -1295,8 +1302,8 @@ def download_fluent_axial_comp( def download_fluent_mixing_elbow_steady_state( should_upload: bool = True, server=None, return_local_path=False ) -> dict: - """Download the flprj, cas and dat files of a steady-state fluent analysis of a mixing elbow - and return the download paths into a dictionary extension->path. + r"""Download the flprj, cas, and dat files of a steady-state mixing elbow analysis and return a dictionary mapping extensions to paths. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -1439,8 +1446,8 @@ def download_fluent_mixing_elbow_steady_state( def download_fluent_mixing_elbow_transient( should_upload: bool = True, server=None, return_local_path=False ) -> dict: - """Download the flprj, cas and dat files of a transient fluent analysis of a mixing elbow - and return the download paths into a dictionary extension->path. + r"""Download the flprj, cas, and dat files of a transient mixing elbow analysis and return a dictionary mapping extensions to paths. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -1480,7 +1487,7 @@ def download_fluent_mixing_elbow_transient( 'C:\\Users\\user\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\fluent-mixing_elbow_transient\\elbow-2-00003.dat.h5', 'C:\\Users\\user\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\fluent-mixing_elbow_transient\\elbow-2-00004.dat.h5', 'C:\\Users\\user\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\fluent-mixing_elbow_transient\\elbow-2-00005.dat.h5', - ]} # noqa: E501 + ]} """ return { @@ -1543,8 +1550,8 @@ def download_fluent_mixing_elbow_transient( def download_cfx_heating_coil( should_upload: bool = True, server=None, return_local_path=False ) -> dict: - """Download the flprj, cas and dat files of a CFX analysis of a heating coil - and return the download paths into a dictionary extension->path. + r"""Download the flprj, cas, and dat files of a CFX heating coil analysis and return a dictionary mapping extensions to paths. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -1575,7 +1582,7 @@ def download_cfx_heating_coil( >>> paths = examples.download_cfx_heating_coil() >>> paths {'cas': 'C:\\Users\\user\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\cfx-heating_coil\\def.cas.cff', - 'dat': 'C:\\Users\\user\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\cfx-heating_coil\\def.dat.cff'} # noqa: E501 + 'dat': 'C:\\Users\\user\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\cfx-heating_coil\\def.dat.cff'} """ return { @@ -1599,8 +1606,8 @@ def download_cfx_heating_coil( def download_cfx_mixing_elbow( should_upload: bool = True, server=None, return_local_path=False ) -> str: - """Download the res file of a CFX analysis of a mixing elbow - and return the download path. + r"""Download the res file of a CFX analysis of a mixing elbow and return the download path. + If the server is remote (or doesn't share memory), the file is uploaded or made available on the server side. @@ -1643,8 +1650,7 @@ def download_cfx_mixing_elbow( def find_simple_bar(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Make the result file available server side, if the server is remote the file is uploaded - server side. Returns the path on the file. + """Make the result file available server-side; if the server is remote, upload the file and return the file path. Parameters ---------- @@ -1664,7 +1670,6 @@ def find_simple_bar(should_upload: bool = True, server=None, return_local_path=F Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.find_simple_bar() >>> path @@ -1677,8 +1682,7 @@ def find_simple_bar(should_upload: bool = True, server=None, return_local_path=F def find_static_rst(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Make the result file available server side, if the server is remote the file is uploaded - server side. Returns the path on the file. + """Make the result file available server-side; if the server is remote, upload the file and return the file path. Parameters ---------- @@ -1698,7 +1702,6 @@ def find_static_rst(should_upload: bool = True, server=None, return_local_path=F Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.find_static_rst() >>> path @@ -1709,8 +1712,7 @@ def find_static_rst(should_upload: bool = True, server=None, return_local_path=F def find_complex_rst(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Make the result file available server side, if the server is remote the file is uploaded - server side. Returns the path on the file. + """Make the result file available server-side. If the server is remote, upload the file and return the file path. Parameters ---------- @@ -1730,7 +1732,6 @@ def find_complex_rst(should_upload: bool = True, server=None, return_local_path= Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.find_complex_rst() >>> path @@ -1741,8 +1742,7 @@ def find_complex_rst(should_upload: bool = True, server=None, return_local_path= def find_multishells_rst(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Make the result file available server side, if the server is remote the file is uploaded - server side. Returns the path on the file. + """Make the result file available server-side. If the server is remote, upload the file and return its path. Parameters ---------- @@ -1762,7 +1762,6 @@ def find_multishells_rst(should_upload: bool = True, server=None, return_local_p Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.find_multishells_rst() >>> path @@ -1775,8 +1774,7 @@ def find_multishells_rst(should_upload: bool = True, server=None, return_local_p def find_electric_therm(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Make the result file available server side, if the server is remote the file is uploaded - server side. Returns the path on the file. + """Make the result file available server-side. If the server is remote, upload the file and return its path. Parameters ---------- @@ -1796,7 +1794,6 @@ def find_electric_therm(should_upload: bool = True, server=None, return_local_pa Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.find_electric_therm() >>> path @@ -1809,8 +1806,7 @@ def find_electric_therm(should_upload: bool = True, server=None, return_local_pa def find_steady_therm(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Make the result file available server side, if the server is remote the file is uploaded - server side. Returns the path on the file. + """Make the result file available server-side. If the server is remote, upload the file and return its path. Parameters ---------- @@ -1830,7 +1826,6 @@ def find_steady_therm(should_upload: bool = True, server=None, return_local_path Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.find_steady_therm() >>> path @@ -1843,8 +1838,7 @@ def find_steady_therm(should_upload: bool = True, server=None, return_local_path def find_transient_therm(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Make the result file available server side, if the server is remote the file is uploaded - server side. Returns the path on the file. + """Make the result file available server-side. If the server is remote, upload the file and return its path. Parameters ---------- @@ -1864,7 +1858,6 @@ def find_transient_therm(should_upload: bool = True, server=None, return_local_p Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.find_transient_therm() >>> path @@ -1877,8 +1870,7 @@ def find_transient_therm(should_upload: bool = True, server=None, return_local_p def find_msup_transient(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Make the result file available server side, if the server is remote the file is uploaded - server side. Returns the path on the file. + """Make the result file available server-side. If the server is remote, upload the file and return its path. Parameters ---------- @@ -1898,7 +1890,6 @@ def find_msup_transient(should_upload: bool = True, server=None, return_local_pa Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.find_msup_transient() >>> path @@ -1911,8 +1902,7 @@ def find_msup_transient(should_upload: bool = True, server=None, return_local_pa def find_simple_cyclic(should_upload: bool = True, server=None, return_local_path=False) -> str: - """Make the result file available server side, if the server is remote the file is uploaded - server side. Returns the path on the file. + """Make the result file available server-side. If the server is remote, upload the file and return its path. Parameters ---------- @@ -1932,7 +1922,6 @@ def find_simple_cyclic(should_upload: bool = True, server=None, return_local_pat Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.find_simple_cyclic() >>> path @@ -1947,8 +1936,7 @@ def find_simple_cyclic(should_upload: bool = True, server=None, return_local_pat def find_distributed_msup_folder( should_upload: bool = True, server=None, return_local_path=False ) -> str: - """Make the result file available server side, if the server is remote the file is uploaded - server side. Returns the path on the file. + """Make the result file available server-side. If the server is remote, upload the file and return its path. Parameters ---------- @@ -1968,7 +1956,6 @@ def find_distributed_msup_folder( Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.find_distributed_msup_folder() >>> path @@ -2008,8 +1995,7 @@ def find_distributed_msup_folder( def download_average_filter_plugin( should_upload: bool = True, server=None, return_local_path=False ) -> Union[str, None]: - """Make the plugin available server side, if the server is remote the plugin is uploaded - server side. Returns the path of the plugin folder. + """Make the result file available server-side. If the server is remote, upload the file and return its path. Parameters ---------- @@ -2029,7 +2015,6 @@ def download_average_filter_plugin( Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.download_average_filter_plugin() @@ -2051,8 +2036,7 @@ def download_average_filter_plugin( def download_gltf_plugin( should_upload: bool = True, server=None, return_local_path=False ) -> Union[str, None]: - """Make the plugin available server side, if the server is remote the plugin is uploaded - server side. Returns the path of the plugin folder. + """Make the result file available server-side. If the server is remote, upload the file and return its path. Parameters ---------- @@ -2072,7 +2056,6 @@ def download_gltf_plugin( Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.download_gltf_plugin() @@ -2097,8 +2080,7 @@ def download_gltf_plugin( def download_easy_statistics( should_upload: bool = True, server=None, return_local_path=False ) -> Union[str, None]: - """Make the plugin available server side, if the server is remote the plugin is uploaded - server side. Returns the path of the plugin folder. + """Make the result file available server-side. If the server is remote, upload the file and return its path. Parameters ---------- @@ -2118,7 +2100,6 @@ def download_easy_statistics( Examples -------- - >>> from ansys.dpf.core import examples >>> path = examples.download_easy_statistics() diff --git a/src/ansys/dpf/core/examples/examples.py b/src/ansys/dpf/core/examples/examples.py index b6285df9bf..53402bcd6d 100644 --- a/src/ansys/dpf/core/examples/examples.py +++ b/src/ansys/dpf/core/examples/examples.py @@ -20,13 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_examples: - -Result Files Examples -===================== -Examples result files. -""" +"""Examples result files.""" import os from pathlib import Path @@ -38,7 +32,7 @@ def get_example_required_minimum_dpf_version(file: os.PathLike) -> str: - """Returns the minimal DPF server version required to run the example, as declared in a note. + """Return the minimal DPF server version required to run the example, as declared in a note. Parameters ---------- @@ -76,8 +70,7 @@ def get_example_required_minimum_dpf_version(file: os.PathLike) -> str: def find_files(local_path, should_upload=True, server=None, return_local_path=False): - """Make the result file available server side, if the server is remote the file is uploaded - server side. Returns the path on the file. + """Make the result file available server-side. If the server is remote, upload the file and return its path. Parameters ---------- @@ -117,7 +110,6 @@ def fluid_axial_model() -> DataSources: Examples -------- - >>> from ansys.dpf.core import examples >>> ds = examples.fluid_axial_model() """ diff --git a/src/ansys/dpf/core/faces.py b/src/ansys/dpf/core/faces.py index 77a75b11fe..7df31b83ba 100644 --- a/src/ansys/dpf/core/faces.py +++ b/src/ansys/dpf/core/faces.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_faces_apis: - -Faces - -""" +"""Faces.""" import numpy as np from ansys.dpf.core import scoping @@ -86,7 +81,7 @@ def node_ids(self): IDs of all nodes in the face. Returns - -------- + ------- list List of IDs for all nodes in the face. @@ -133,8 +128,9 @@ def index(self) -> int: def nodes(self): """ All nodes in the face. + Returns - -------- + ------- list List of all nodes in the face. @@ -164,6 +160,7 @@ def n_nodes(self) -> int: return len(self._nodes) def __str__(self): + """Provide more information in string representation.""" txt = "DPF Face %d\n" % self.id txt += "\tIndex: %7d\n" % self.index txt += "\tNodes: %7d\n" % self.n_nodes @@ -206,7 +203,7 @@ def connectivity(self): Ordered list of node indices of the face. Returns - -------- + ------- list Ordered list of node indices. @@ -247,16 +244,19 @@ def __init__(self, mesh): self._mapping_id_to_index = None def __str__(self): + """Provide custom string representation.""" return "DPF Faces object with %d faces" % len(self) def __getitem__(self, index): - """Retrieves face based on an index.""" + """Retrieve face based on an index.""" return self.face_by_index(index) def __len__(self): + """Retrieve the number of faces.""" return self.n_faces def __iter__(self): + """Provide for iterating in loops.""" for i in range(len(self)): yield self[i] @@ -399,7 +399,7 @@ def faces_nodes_connectivity_field(self): @property def n_faces(self) -> int: - """Number of faces""" + """Number of faces.""" return self._mesh._api.meshed_region_get_num_faces(self._mesh) def _build_mapping_id_to_index(self): @@ -428,8 +428,7 @@ def mapping_id_to_index(self) -> dict: def map_scoping(self, external_scope): """ - Retrieve the indices to map the scoping of these faces to - the scoping of a field. + Retrieve the indices to map the scoping of these faces to the scoping of a field. Parameters ---------- diff --git a/src/ansys/dpf/core/field.py b/src/ansys/dpf/core/field.py index 1bcd9a9234..c9746bfc17 100644 --- a/src/ansys/dpf/core/field.py +++ b/src/ansys/dpf/core/field.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_field: - -Field - -""" +"""Field.""" import numpy as np from ansys import dpf @@ -81,6 +76,7 @@ class Field(_FieldBase): Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global server. + Examples -------- Create a field from scratch. @@ -163,9 +159,7 @@ def __init__( field=None, server=None, ): - """Initialize the field either with an optional field message or - by connecting to a stub. - """ + """Initialize the field either with an optional field message or by connecting to a stub.""" super().__init__( nentities=nentities, nature=nature, @@ -298,7 +292,7 @@ def location(self, value): """Change the field location. Parameters - ------- + ---------- location : str or locations Location string, Options are in :class:`locations `. @@ -324,14 +318,17 @@ def location(self, value): @property def component_count(self): + """Number of components.""" return self._api.csfield_get_number_of_components(self) @property def elementary_data_count(self): + """Number of elementary data.""" return self._api.csfield_get_number_elementary_data(self) @property def size(self): + """Size of data.""" return self._api.csfield_get_data_size(self) def _set_scoping(self, scoping): @@ -361,6 +358,7 @@ def shell_layers(self, value): self.field_definition = fielddef def get_entity_data(self, index: int) -> dpf_array.DPFArray: + """Retrieve entity data by index.""" try: vec = dpf_vector.DPFVectorDouble(client=self._server.client) self._api.csfield_get_entity_data_for_dpf_vector( @@ -376,6 +374,7 @@ def get_entity_data(self, index: int) -> dpf_array.DPFArray: return data def get_entity_data_by_id(self, id: int) -> dpf_array.DPFArray: + """Retrieve entity data by id.""" try: vec = dpf_vector.DPFVectorDouble(client=self._server.client) self._api.csfield_get_entity_data_by_id_for_dpf_vector( @@ -394,6 +393,7 @@ def get_entity_data_by_id(self, id: int) -> dpf_array.DPFArray: return data def append(self, data, scopingid): + """Append data to the Field.""" if isinstance(data, list): if isinstance(data[0], list): data = np.array(data) @@ -539,7 +539,7 @@ def unit(self): """Units for the field. Returns - ---------- + ------- str Units for the field. @@ -559,7 +559,7 @@ def unit(self): @unit.setter def unit(self, value): - """Change the unit for the field + """Change the unit for the field. Parameters ---------- @@ -613,7 +613,7 @@ def name(self): @name.setter def name(self, value): - """Change the name of the field + """Change the name of the field. Parameters ---------- @@ -736,6 +736,7 @@ def __add__(self, field_b): return op def __pow__(self, value): + """Compute element-wise field[i]^2.""" if value != 2: raise ValueError('Only the value "2" is supported.') from ansys.dpf.core import dpf_operator, operators @@ -839,7 +840,6 @@ def deep_copy(self, server=None): >>> deep_copy = field.deep_copy(server=other_server) """ - f = Field( nentities=len(self.scoping), location=self.location, diff --git a/src/ansys/dpf/core/field_base.py b/src/ansys/dpf/core/field_base.py index 8a35397596..2948167429 100644 --- a/src/ansys/dpf/core/field_base.py +++ b/src/ansys/dpf/core/field_base.py @@ -19,6 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Provide base APIs for DPF's field concept and means of caching field data.""" import traceback import warnings @@ -107,7 +108,8 @@ def _field_create_internal_obj( ncomp_m=0, with_type=None, ): - """Returns a gRPC field message or C object instance of a new field. + """Return a gRPC field message or C object instance of a new field. + This new field is created with this functions parameter attributes Parameters @@ -227,7 +229,7 @@ def ndim(self): return self.component_count def __str__(self): - """Describes the entity. + """Describe the entity. Returns ------- @@ -297,7 +299,6 @@ def scoping(self): >>> #The fourth elementary data of the field corresponds to >>> #the element id number 586 in the mesh """ - return self._get_scoping() @scoping.setter @@ -306,10 +307,10 @@ def scoping(self, scoping): @abstractmethod def get_entity_data(self, index): - """Retrieves the elementary data of the scoping's index in an array. + """Retrieve the elementary data of the scoping's index in an array. Returns - -------- + ------- numpy.ndarray Examples @@ -763,7 +764,6 @@ def data(self): [ 1.03542516e-02 -3.53018374e-03 -3.98914380e-05]] """ - if self._ncomp > 1: return np.array(self._data_copy).reshape( len(self._data_copy) // self._ncomp, self._ncomp @@ -815,7 +815,6 @@ def component_count(self): int Number of components in each elementary data of the field. """ - return self._ncomp @property diff --git a/src/ansys/dpf/core/field_definition.py b/src/ansys/dpf/core/field_definition.py index c9484cff2e..31c7eb05eb 100644 --- a/src/ansys/dpf/core/field_definition.py +++ b/src/ansys/dpf/core/field_definition.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -FieldDefinition - -""" +"""FieldDefinition.""" import traceback import warnings @@ -141,7 +138,7 @@ def shell_layers(self): @property def dimensionality(self): - """Dimensionality + """Dimensionality. Returns ------- @@ -181,7 +178,8 @@ def dimensionality(self, value): ) def deep_copy(self, server=None): - """Creates a deep copy of the field_definition's data on a given server. + """Create a deep copy of the field_definition's data on a given server. + This can be useful to pass data from one server instance to another. Parameters @@ -200,6 +198,7 @@ def deep_copy(self, server=None): return out def __del__(self): + """Delete the current instance.""" try: self._deleter_func[0](self._deleter_func[1](self)) except: diff --git a/src/ansys/dpf/core/fields_container.py b/src/ansys/dpf/core/fields_container.py index f939d12eef..39307c00d1 100644 --- a/src/ansys/dpf/core/fields_container.py +++ b/src/ansys/dpf/core/fields_container.py @@ -21,9 +21,7 @@ # SOFTWARE. """ -.. _ref_fields_container: - -FieldsContainer +FieldsContainer. Contains classes associated with the DPF FieldsContainer. """ @@ -35,7 +33,6 @@ class FieldsContainer(CollectionBase[field.Field]): - entries_type = field.Field """Represents a fields container, which contains fields belonging to a common result. A fields container is a set of fields ordered by labels and IDs. Each field @@ -91,6 +88,8 @@ class FieldsContainer(CollectionBase[field.Field]): """ + entries_type = field.Field + def __init__(self, fields_container=None, server=None): super().__init__(collection=fields_container, server=server) if self._internal_obj is None: @@ -105,6 +104,7 @@ def __init__(self, fields_container=None, server=None): self._component_info = None # for norm/max/min def create_subtype(self, obj_by_copy): + """Create a field subtype.""" return field.Field(field=obj_by_copy, server=self._server) def get_fields_by_time_complex_ids(self, timeid=None, complexid=None): @@ -181,6 +181,20 @@ def get_field_by_time_complex_ids(self, timeid=None, complexid=None): return super()._get_entry(label_space) def __time_complex_label_space__(self, timeid=None, complexid=None): + """Return a label space dictionary mapping scoping to given id. + + Parameters + ---------- + timeid : int, optional + time based id, by default None + complexid : int, optional + complex id, by default None + + Returns + ------- + dict[str,int] + mapping of space type to given id. + """ label_space = {} if timeid is not None: label_space["time"] = timeid @@ -222,11 +236,10 @@ def get_fields(self, label_space): 2 """ - return super()._get_entries(label_space) def get_field(self, label_space_or_index): - """Retrieves the field at a requested index or label space. + """Retrieve the field at a requested index or label space. An exception is raised if the number of fields matching the request is greater than one. @@ -254,7 +267,7 @@ def get_field(self, label_space_or_index): return super()._get_entry(label_space_or_index) def get_field_by_time_id(self, timeid=None): - """Retrieves the complex field at a requested time. + """Retrieve the complex field at a requested time. Parameters ---------- @@ -494,7 +507,7 @@ def deep_copy(self, server=None): return fc def get_time_scoping(self): - """Retrieves the time scoping containing the time sets. + """Retrieve the time scoping containing the time sets. Returns ------- @@ -504,7 +517,8 @@ def get_time_scoping(self): return self.get_label_scoping("time") def plot(self, label_space: dict = None, **kwargs): - """Plots the fields in the FieldsContainer for the given LabelSpace. + """Plot the fields in the FieldsContainer for the given LabelSpace. + Check the labels available for the FieldsContainer with :func:`~fields_container.FieldsContainer.labels`. @@ -533,7 +547,7 @@ def plot(self, label_space: dict = None, **kwargs): plt.show_figure(**kwargs) def animate(self, save_as=None, deform_by=None, scale_factor=1.0, **kwargs): - """Creates an animation based on the Fields contained in the FieldsContainer. + """Create an animation based on the Fields contained in the FieldsContainer. This method creates a movie or a gif based on the time ids of a FieldsContainer. For kwargs see pyvista.Plotter.open_movie/add_text/show. @@ -675,6 +689,7 @@ def __sub__(self, fields_b): return op def __pow__(self, value): + """Compute element-wise field[i]^2.""" if value != 2: raise ValueError('DPF only the value is "2" supported') from ansys.dpf.core import dpf_operator diff --git a/src/ansys/dpf/core/fields_container_factory.py b/src/ansys/dpf/core/fields_container_factory.py index 079175f3b3..34c428a366 100644 --- a/src/ansys/dpf/core/fields_container_factory.py +++ b/src/ansys/dpf/core/fields_container_factory.py @@ -21,8 +21,7 @@ # SOFTWARE. """ -fields_container_factory - +fields_container_factory. Contains functions to simplify creating a fields container. """ diff --git a/src/ansys/dpf/core/fields_factory.py b/src/ansys/dpf/core/fields_factory.py index eac1d5c396..1e44c95909 100644 --- a/src/ansys/dpf/core/fields_factory.py +++ b/src/ansys/dpf/core/fields_factory.py @@ -21,8 +21,7 @@ # SOFTWARE. """ -fields_factory - +fields_factory. Contains functions to simplify creating fields. """ @@ -279,8 +278,7 @@ def create_vector_field(num_entities, num_comp, location=locations.nodal, server def create_overall_field( value, nature, num_entities, num_comp, location=locations.overall, server=None ): - """Create a specific `:class:`ansys.dpf.core.Field` with entities that have an - overall location. + """Create a specific `:class:`ansys.dpf.core.Field` with entities that have an overall location. Regarding the nature of the entity contained in the field, we set the same value for all elements. diff --git a/src/ansys/dpf/core/generic_data_container.py b/src/ansys/dpf/core/generic_data_container.py index b5e3a281fc..20b348d6d2 100644 --- a/src/ansys/dpf/core/generic_data_container.py +++ b/src/ansys/dpf/core/generic_data_container.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_generic_data_container: - -GenericDataContainer - -""" +"""GenericDataContainer.""" from __future__ import annotations import traceback @@ -137,7 +132,6 @@ def set_property( prop: Property object. """ - self._prop_description_instance = None if not isinstance(prop, (int, float, str, bytes, list, np.ndarray)) and server_meet_version( "8.1", self._server @@ -185,7 +179,7 @@ def get_property(self, property_name, output_type: Union[None, type, types] = No return any_dpf.cast(class_) def get_property_description(self): - """Get a dictionary description of properties by name and data type + """Get a dictionary description of properties by name and data type. Returns ------- @@ -217,6 +211,7 @@ def get_property_description(self): return self._prop_description_instance def __del__(self): + """Delete the current instance.""" if self._internal_obj is not None: try: self._deleter_func[0](self._deleter_func[1](self)) diff --git a/src/ansys/dpf/core/generic_support.py b/src/ansys/dpf/core/generic_support.py index 80943f49b3..e4a18e8cd0 100644 --- a/src/ansys/dpf/core/generic_support.py +++ b/src/ansys/dpf/core/generic_support.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_genericsupport: - -GenericSupport - -""" +"""GenericSupport.""" from ansys.dpf.gate import generic_support_capi, generic_support_grpcapi from ansys.dpf.core.support import Support diff --git a/src/ansys/dpf/core/geometry.py b/src/ansys/dpf/core/geometry.py index 3ea8d1ca39..9e592dc449 100644 --- a/src/ansys/dpf/core/geometry.py +++ b/src/ansys/dpf/core/geometry.py @@ -21,9 +21,7 @@ # SOFTWARE. """ -.. _ref_geometry: - -Geometry +Geometry. Module containing the different geometry objects. @@ -83,9 +81,11 @@ def __init__(self, coordinates, server=None): self._server = server def __getitem__(self, value): + """Retrieve coordinates data corresponding to a given value.""" return self.coordinates.data[value] def __len__(self): + """Retrieve the number of points.""" return self.n_points def __str__(self): @@ -387,7 +387,6 @@ def n_cells_y(self): def _discretize(self): """Discretize plane with a certain size and number of cells per direction.""" - # Get plane axis (local) from reference axis (global) and plane's normal self._axes_plane = get_plane_local_axis(self._normal_dir) @@ -430,7 +429,7 @@ def _discretize(self): self._mesh = mesh def _get_direction_from_vect(self, vect): - """Normal direction to the plane.""" + """Get normal direction to the plane.""" direction = [x - y for x, y in zip(vect[1], vect[0])] return normalize_vector(direction) @@ -457,6 +456,7 @@ def plot(self, mesh=None, **kwargs): def get_plane_local_axis(normal_dir): + """Determine local axis of the plane.""" axis_ref = [np.array([1, 0, 0]), np.array([0, 1, 0]), np.array([0, 0, 1])] if np.allclose(abs(normal_dir), [1.0, 0.0, 0.0]): plane_x = np.cross(axis_ref[1], normal_dir) @@ -473,8 +473,10 @@ def get_plane_local_axis(normal_dir): def get_global_coords_from_local(local_coords, axes_plane, center): + """Determine global coordinates from local coordinates.""" return np.dot(local_coords, axes_plane) + center def get_local_coords_from_global(global_coords, axes_plane, center): + """Determine local coordinates from global coordinates.""" return np.dot(axes_plane, (global_coords - np.array(center))) diff --git a/src/ansys/dpf/core/geometry_factory.py b/src/ansys/dpf/core/geometry_factory.py index 407d203c1d..bbd965e409 100644 --- a/src/ansys/dpf/core/geometry_factory.py +++ b/src/ansys/dpf/core/geometry_factory.py @@ -21,12 +21,9 @@ # SOFTWARE. """ -.. _ref_geometry_factory: - -Geometry Factory +Geometry Factory. Geometry factory module containing functions to create the different geometries. - """ import numpy as np diff --git a/src/ansys/dpf/core/help.py b/src/ansys/dpf/core/help.py index 519819aed6..ff55dfcfd9 100644 --- a/src/ansys/dpf/core/help.py +++ b/src/ansys/dpf/core/help.py @@ -195,6 +195,7 @@ def _norm_fc(fields): def _norm_op(oper): """Retrieve a chained norm operator. + Returns ------- oper : ansys.dpf.core.Operator @@ -288,7 +289,6 @@ def _min_max(field): oper : ansys.dpf.core.Operator Component-wise minimum/maximum operator over the input. """ - oper = dpf.core.Operator("min_max") oper.inputs.connect(field) return oper @@ -316,7 +316,6 @@ def _min_max_oper(oper): oper : ansys.dpf.core.Operator Component-wise minimum/maximum operator. """ - min_max_oper = dpf.core.Operator("min_max_fc") min_max_oper.connect(0, oper, 0) return min_max_oper @@ -424,7 +423,6 @@ def sqr(field): [ 1. 64.] """ - _check_type(field, (dpf.core.Field, dpf.core.FieldsContainer)) op = dpf.core.Operator("sqr") op.connect(0, field) diff --git a/src/ansys/dpf/core/helpers/streamlines.py b/src/ansys/dpf/core/helpers/streamlines.py index bcfd8f6f4f..de8bd8e3fb 100644 --- a/src/ansys/dpf/core/helpers/streamlines.py +++ b/src/ansys/dpf/core/helpers/streamlines.py @@ -32,10 +32,9 @@ class _PvFieldsContainerBase: def __init__(self, data): - """Instantiate Streamline - from pyvista.PolyData object. - This construction is only - intended to be used internally. + """Instantiate Streamline from pyvista.PolyData object. + + This construction is only intended to be used internally. Parameters ---------- @@ -73,8 +72,7 @@ def _as_pyvista_data_set(self): return self._pv_data_set def _as_fields_container(self): - """Returns a FieldsContainer representing the streamlines - related objects. + """Return a FieldsContainer representing the streamlines related objects. Returns ------- @@ -90,28 +88,21 @@ def _as_fields_container(self): class Streamlines(_PvFieldsContainerBase): - """Class to define the Streamlines object - scripting with `ansys-dpf-core`. - - """ + """Class to define the Streamlines object scripting with `ansys-dpf-core`.""" def __init__(self, data): super().__init__(data=data) class StreamlinesSource(_PvFieldsContainerBase): - """Class to define the StreamlinesSource - object scripting with `ansys-dpf-core`. - - """ + """Class to define the StreamlinesSource object scripting with `ansys-dpf-core`.""" def __init__(self, data): super().__init__(data=data) def compute_streamlines(meshed_region, field, **kwargs): - """Compute the streamlines for a given mesh and velocity - field. + """Compute the streamlines for a given mesh and velocity field. Parameters ---------- diff --git a/src/ansys/dpf/core/helpers/utils.py b/src/ansys/dpf/core/helpers/utils.py index b43986b884..21ab35da22 100644 --- a/src/ansys/dpf/core/helpers/utils.py +++ b/src/ansys/dpf/core/helpers/utils.py @@ -19,6 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Provides functions for argument filtering and text indenting.""" import inspect import sys @@ -26,7 +27,7 @@ def _sort_supported_kwargs(bound_method, **kwargs): - """Filters the kwargs for a given method.""" + """Filter the kwargs for a given method.""" # Ignore warnings unless specified if not sys.warnoptions: import warnings @@ -52,7 +53,7 @@ def _sort_supported_kwargs(bound_method, **kwargs): def indent(text: Any, subsequent_indent: str = "", initial_indent: Optional[str] = None) -> str: - """Indents each line of a given text. + r"""Indent each line of a given text. Parameters ---------- diff --git a/src/ansys/dpf/core/incremental.py b/src/ansys/dpf/core/incremental.py index 591b5f4504..105bc64229 100644 --- a/src/ansys/dpf/core/incremental.py +++ b/src/ansys/dpf/core/incremental.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_incremental: - -Incremental - -""" +"""Incremental.""" from ansys.dpf import core @@ -60,7 +55,7 @@ def __init__( scoping: core.Scoping, scoping_pin: int = None, ): - """Constructs an IncrementalHelper object. + """Construct an IncrementalHelper object. Given the first and the last operator of a workflow, as well as the scoping. @@ -140,8 +135,6 @@ def _compute_size(self, obj): raise NotImplementedError() def _prerun(self, _dict_inputs: Dict[int, Any]): - """""" - for pin_idx, val in _dict_inputs.items(): self._start_op.connect(pin_idx, val) self._start_op.run() @@ -298,7 +291,7 @@ def split_workflow_in_chunks( scoping_pin: int = None, end_input_pin: int = 0, ): - """Transforms a workflow into an incrementally evaluating one. + """Transform a workflow into an incrementally evaluating one. It wraps in one method the functionality of the IncrementalHelper class as well as the estimation of the chunk size. diff --git a/src/ansys/dpf/core/inputs.py b/src/ansys/dpf/core/inputs.py index d91233f307..938f9b2d8b 100644 --- a/src/ansys/dpf/core/inputs.py +++ b/src/ansys/dpf/core/inputs.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_inputs: - -Inputs - -""" +"""Inputs.""" import weakref from textwrap import wrap @@ -37,6 +32,7 @@ class Input: """ Intermediate class internally instantiated by the :class:`ansys.dpf.core.dpf_operator.Operator`. + Used to connect inputs to the Operator. Examples @@ -158,6 +154,7 @@ def connect(self, inpt): self.__inc_if_ellipsis() def __call__(self, inpt): + """Allow instances to be called like a function.""" self.connect(inpt) def _update_doc_str(self, docstr, class_name): @@ -173,6 +170,7 @@ def _update_doc_str(self, docstr, class_name): self.__class__ = child_class def __str__(self): + """Provide detailed string representation of the class.""" docstr = self._spec.name + " : " type_info = self._python_expected_types.copy() if self._spec.optional: @@ -216,6 +214,7 @@ def __str__(self): def connect(self, inpt): """Connect any input (an entity or an operator output) to any input pin of this operator. + Searches for the input type corresponding to the output. Parameters @@ -305,6 +304,7 @@ def __call__(self, inpt): class Inputs(_Inputs): """ Intermediate class internally instantiated by the :class:`ansys.dpf.core.dpf_operator.Operator`. + Used to connect inputs to the Operator by automatically checking types to connect correct inputs. diff --git a/src/ansys/dpf/core/ipconfig.py b/src/ansys/dpf/core/ipconfig.py index cc166c702b..8bed4760d8 100644 --- a/src/ansys/dpf/core/ipconfig.py +++ b/src/ansys/dpf/core/ipconfig.py @@ -19,6 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Provides for retrieving local ip address.""" import socket diff --git a/src/ansys/dpf/core/label_space.py b/src/ansys/dpf/core/label_space.py index 8c1233aec5..75a7aca8bc 100644 --- a/src/ansys/dpf/core/label_space.py +++ b/src/ansys/dpf/core/label_space.py @@ -20,9 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -Internal Usage -""" +"""Internal Usage.""" import warnings import traceback @@ -37,6 +35,8 @@ class LabelSpace: + """A class representing a label space, which allows storage and management of key-value pairs (labels).""" + def __init__(self, label_space=None, obj=None, server=None): # ############################ # step 1: get server @@ -70,13 +70,42 @@ def _data_processing_core_api(self): return core_api def fill(self, label_space: Dict[str, int]): + """ + Fill the label space with the provided dictionary of labels. + + Parameters + ---------- + label_space : dict + A dictionary where keys are labels (str) and values are indices (int) to be added to the label space. + + Returns + ------- + None + This method does not return anything, it modifies the internal label space. + """ for key, index in label_space.items(): self._api.label_space_add_data(self, key, index) def __str__(self): + """ + Return a string representation of the LabelSpace instance. + + Returns + ------- + str + A string representation of the label space, formatted as a dictionary. + """ return str(dict(self)) def __iter__(self): + """ + Iterate over the labels in the label space, yielding (key, value) pairs. + + Yields + ------ + tuple + A tuple of (key, value) for each label in the label space. + """ yield from [ ( self._api.label_space_get_labels_name(self, i), @@ -86,6 +115,14 @@ def __iter__(self): ] def __dict__(self): + """ + Return a dictionary representation of the LabelSpace instance. + + Returns + ------- + dict + A dictionary where keys are label names (str) and values are label indices (int). + """ if isinstance(self._internal_obj, dict): return self._internal_obj out = {} @@ -97,6 +134,13 @@ def __dict__(self): return out def __del__(self): + """ + Destructor for cleaning up the label space resources. + + Returns + ------- + None + """ try: self._deleter_func[0](self._deleter_func[1](self)) except: diff --git a/src/ansys/dpf/core/log.py b/src/ansys/dpf/core/log.py index feb71afba0..971c840a08 100644 --- a/src/ansys/dpf/core/log.py +++ b/src/ansys/dpf/core/log.py @@ -19,6 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Provides for setting up logging.""" import logging @@ -31,7 +32,6 @@ def setup_logger(loglevel="INFO"): loglevel : str, optional The level of the logger to set up. The default is ``"INFO"``. """ - # return existing log if this function has already been called if hasattr(setup_logger, "log"): setup_logger.log.setLevel(loglevel) diff --git a/src/ansys/dpf/core/mapping_types.py b/src/ansys/dpf/core/mapping_types.py index 1454914d22..a93934a64d 100644 --- a/src/ansys/dpf/core/mapping_types.py +++ b/src/ansys/dpf/core/mapping_types.py @@ -19,6 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Provides utilities for mapping and transforming data types between Python and C++ representations.""" import sys import inspect diff --git a/src/ansys/dpf/core/mesh_info.py b/src/ansys/dpf/core/mesh_info.py index de40c9a980..abbf33f06b 100644 --- a/src/ansys/dpf/core/mesh_info.py +++ b/src/ansys/dpf/core/mesh_info.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -MeshInfo - -""" +"""MeshInfo.""" from ansys.dpf.core import server as server_module from ansys.dpf.core.generic_data_container import GenericDataContainer @@ -66,7 +63,7 @@ def __init__( mesh_info=None, server=None, ): - """Initialize with a MeshInfo message""" + """Initialize with a MeshInfo message.""" # ############################ # step 1: get server @@ -90,6 +87,14 @@ def __init__( self._bodies_map = None def __str__(self): + """ + Return a string representation of the MeshInfo instance. + + Returns + ------- + str + A string representation of the information about a mesh space. + """ txt = "DPF MeshInfo\n" txt += "-" * 30 + "\n" txt += "with properties:\n" @@ -101,23 +106,18 @@ def __str__(self): @property def generic_data_container(self) -> GenericDataContainer: - """GenericDataContainer wrapped into the MeshInfo - that contains all the relative information of the derived class. + """GenericDataContainer wrapped into the MeshInfo that contains all the relative information of the derived class. Returns ------- :class:`ansys.dpf.core.generic_data_container.GenericDataContainer` """ - return self._generic_data_container @generic_data_container.setter def generic_data_container(self, value: GenericDataContainer): - """GenericDataContainer wrapped into the MeshInfo - that contains all the relative information of the derived class. - """ - + """GenericDataContainer wrapped into the MeshInfo that contains all the relative information of the derived class.""" if not isinstance(value, GenericDataContainer): raise ValueError("Input value must be a GenericDataContainer.") self._generic_data_container = value @@ -167,23 +167,23 @@ def set_property(self, property_name, prop): prop : Int, String, Float, Field, StringField, GenericDataContainer, Scoping object instance. """ - return self.generic_data_container.set_property(property_name, prop) @property def number_nodes(self): - """ + """Returns number of nodes in the mesh. + Returns ------- number_nodes : int Number of nodes of the mesh. """ - return self.generic_data_container.get_property("num_nodes") @property def number_faces(self): - """ + """Returns number of faces in the mesh. + Returns ------- number_faces : int @@ -196,13 +196,13 @@ def number_faces(self): @property def number_elements(self): - """ + """Returns number of elements in the mesh. + Returns ------- number_elements : int Number of elements of the mesh. """ - if "num_cells" in self._generic_data_container.get_property_description(): return self.generic_data_container.get_property("num_cells") else: @@ -210,13 +210,13 @@ def number_elements(self): @property def splittable_by(self): - """ + """Return name of properties according to which the mesh can be split by. + Returns ------- splittable by which entity : StringField Name of the properties according to which the mesh can be split by. """ - if "splittable_by" in self._generic_data_container.get_property_description(): return self.generic_data_container.get_property("splittable_by") else: @@ -224,13 +224,13 @@ def splittable_by(self): @property def available_elem_types(self): - """ + """Returns available mesh element types. + Returns ------- available element types : Scoping element type available for the mesh. """ - if "available_elem_types" in self._generic_data_container.get_property_description(): return self.generic_data_container.get_property("available_elem_types") else: @@ -238,13 +238,13 @@ def available_elem_types(self): @property def part_names(self): - """ + """Return part names of the mesh. + Returns ------- part_names : StringField part names of the mesh (if it can be split by parts) """ - if "part_names" in self._generic_data_container.get_property_description(): return self.generic_data_container.get_property("part_names") else: @@ -275,7 +275,8 @@ def parts(self) -> dict: @property def part_scoping(self): - """ + """Return part scoping of the mesh. + Returns ------- part_scoping : Scoping @@ -284,7 +285,6 @@ def part_scoping(self): .. warning: Currently unavailable for LegacyGrpc servers. """ - if "part_scoping" in self._generic_data_container.get_property_description(): return self.generic_data_container.get_property("part_scoping") else: @@ -292,13 +292,13 @@ def part_scoping(self): @property def body_names(self): - """ + """Return body names of the mesh. + Returns ------- body_names : StringField body names of the mesh (if it can be split by bodies) """ - if "body_names" in self._generic_data_container.get_property_description(): return self.generic_data_container.get_property("body_names") else: @@ -306,13 +306,13 @@ def body_names(self): @property def body_scoping(self): - """ + """Return body scoping of the mesh. + Returns ------- body_scoping : Scoping body Scoping of the mesh (if it can be split by bodies) """ - if "body_scoping" in self._generic_data_container.get_property_description(): return self.generic_data_container.get_property("body_scoping") else: @@ -343,7 +343,8 @@ def bodies(self) -> dict: @property def zone_names(self): - """ + """Return zone names of the mesh. + Returns ------- zone_names : StringField @@ -352,7 +353,6 @@ def zone_names(self): .. warning: Currently unavailable for LegacyGrpc servers. """ - if "zone_names" in self._generic_data_container.get_property_description(): return self.generic_data_container.get_property("zone_names") else: @@ -435,7 +435,8 @@ def cell_zones(self) -> dict: @property def zone_scoping(self): - """ + """Return zone scoping of the mesh. + Returns ------- zone_scoping : Scoping @@ -444,7 +445,6 @@ def zone_scoping(self): .. warning: Currently unavailable for LegacyGrpc servers. """ - if "zone_scoping" in self._generic_data_container.get_property_description(): return self.generic_data_container.get_property("zone_scoping") else: @@ -452,24 +452,20 @@ def zone_scoping(self): @number_nodes.setter def number_nodes(self, value): - """Set the number of nodes in the mesh""" - + """Set the number of nodes in the mesh.""" self.generic_data_container.set_property("num_nodes", value) @number_elements.setter def number_elements(self, value): - """Set the number of elements in the mesh""" - + """Set the number of elements in the mesh.""" self.generic_data_container.set_property("num_elements", value) @splittable_by.setter def splittable_by(self, value): - """Set name of the properties according to which the mesh can be split by""" - + """Set name of the properties according to which the mesh can be split by.""" self.generic_data_container.set_property("splittable_by", value) @available_elem_types.setter def available_elem_types(self, value): - """Set the available element types""" - + """Set the available element types.""" self.generic_data_container.set_property("available_elem_types", value) diff --git a/src/ansys/dpf/core/mesh_scoping_factory.py b/src/ansys/dpf/core/mesh_scoping_factory.py index 51712f7f57..836fe24d58 100644 --- a/src/ansys/dpf/core/mesh_scoping_factory.py +++ b/src/ansys/dpf/core/mesh_scoping_factory.py @@ -21,8 +21,7 @@ # SOFTWARE. """ -mesh_scoping_factory - +mesh_scoping_factory. Contains functions to simplify creating mesh scopings. """ diff --git a/src/ansys/dpf/core/meshed_region.py b/src/ansys/dpf/core/meshed_region.py index fbd437fdfd..e1900da41f 100644 --- a/src/ansys/dpf/core/meshed_region.py +++ b/src/ansys/dpf/core/meshed_region.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -MeshedRegion - -""" +"""MeshedRegion.""" import traceback import warnings @@ -47,7 +44,8 @@ def update_grid(func): - # Decorate mesh setters to centralize the update logic of pyvista objects. + """Decorate mesh setters to centralize the update logic of pyvista objects.""" + def wrapper(*args, **kwargs): mesh = args[0] if mesh._full_grid is not None: @@ -141,7 +139,8 @@ def __init__(self, num_nodes=None, num_elements=None, mesh=None, server=None): self.as_linear = None def _get_scoping(self, loc=locations.nodal): - """ + """Return ids of the elements or nodes of the mesh. + Parameters ---------- loc : str or ansys.dpf.core.common.locations, optional @@ -258,7 +257,7 @@ def unit(self): @unit.setter def unit(self, value): """ - Unit type. + Set unit type. Parameters ---------- @@ -287,12 +286,14 @@ def _set_unit(self, unit): return self._api.meshed_region_set_unit(self, unit) def __del__(self): + """Delete this instance of the meshed region.""" try: self._deleter_func[0](self._deleter_func[1](self)) except: warnings.warn(traceback.format_exc()) def __str__(self): + """Return string representation of the meshed region.""" from ansys.dpf.core.core import _description return _description(self._internal_obj, self._server) @@ -300,7 +301,7 @@ def __str__(self): @property def available_property_fields(self): """ - Returns a list of available property fields + Returns a list of available property fields. Returns ------- @@ -316,8 +317,7 @@ def available_property_fields(self): def property_field(self, property_name): """ - Property field getter. It can be coordinates (field), - element types (property field)... + Property field getter. It can be coordinates (field), element types (property field)... Returns ------- @@ -328,8 +328,7 @@ def property_field(self, property_name): @version_requires("3.0") def set_property_field(self, property_name, value): """ - Property field setter. It can be coordinates (field), - element types (property field)... + Property field setter. It can be coordinates (field), element types (property field)... Parameters ---------- @@ -481,9 +480,6 @@ def deform_by(self, deform_by, scale_factor=1.0): scale_factor : float, Field, FieldsContainer, optional Used to scale the mesh deformation. Defaults to 1.0. Can be a scalar Field (or a FieldsContainer with only one Field) to get a spatially non-homogeneous scaling. - Returns - ------- - """ from ansys.dpf.core.operators.math import add, scale, unit_convert @@ -665,8 +661,7 @@ def deep_copy(self, server=None): def field_of_properties(self, property_name): """ - Returns the ``Field`` or ``PropertyField`` associated - to a given property of the mesh + Return the ``Field`` or ``PropertyField`` associated to a given property of the mesh. Parameters ---------- diff --git a/src/ansys/dpf/core/meshes_container.py b/src/ansys/dpf/core/meshes_container.py index 93cc5b6860..a5c78dc4e4 100644 --- a/src/ansys/dpf/core/meshes_container.py +++ b/src/ansys/dpf/core/meshes_container.py @@ -22,7 +22,7 @@ # -*- coding: utf-8 -*- """ -MeshesContainer +MeshesContainer. Contains classes associated with the DPF MeshesContainer. """ @@ -34,7 +34,6 @@ class MeshesContainer(CollectionBase[meshed_region.MeshedRegion]): - entries_type = meshed_region.MeshedRegion """Represents a meshes container, which contains meshes split on a given space. Parameters @@ -49,6 +48,8 @@ class MeshesContainer(CollectionBase[meshed_region.MeshedRegion]): global server. """ + entries_type = meshed_region.MeshedRegion + def __init__(self, meshes_container=None, server=None): super().__init__(collection=meshes_container, server=server) if self._internal_obj is None: @@ -58,11 +59,11 @@ def __init__(self, meshes_container=None, server=None): self._internal_obj = self._api.collection_of_mesh_new() def create_subtype(self, obj_by_copy): + """Create a meshed region sub type.""" return meshed_region.MeshedRegion(mesh=obj_by_copy, server=self._server) def plot(self, fields_container=None, deform_by=None, scale_factor=1.0, **kwargs): - """Plot the meshes container with a specific result if - fields_container is specified. + """Plot the meshes container with a specific result if fields_container is specified. Parameters ---------- @@ -178,7 +179,7 @@ def get_mesh(self, label_space_or_index): return super()._get_entry(label_space_or_index) def __getitem__(self, key): - """Retrieves the mesh at a requested index. + """Retrieve the mesh at a requested index. Parameters ---------- diff --git a/src/ansys/dpf/core/misc.py b/src/ansys/dpf/core/misc.py index bc0b7354b7..7f6ed9fe3c 100644 --- a/src/ansys/dpf/core/misc.py +++ b/src/ansys/dpf/core/misc.py @@ -86,7 +86,9 @@ def is_ubuntu(): def get_ansys_path(ansys_path=None): - """Give input path back if given, else look for ANSYS_DPF_PATH, + """Return the input path if provided; otherwise, check ANSYS_DPF_PATH, AWP_ROOT, and the latest ansys-dpf-server modules. + + Give input path back if given, else look for ANSYS_DPF_PATH, then among AWP_ROOT and installed ansys-dpf-server modules to take the latest available. Parameters @@ -146,7 +148,9 @@ def _find_latest_ansys_versions(): def find_ansys(): - """Search for a standard ANSYS environment variable (AWP_ROOTXXX) or a standard installation + """Check ANSYS environment variables or default paths for the latest installation. + + Search for a standard ANSYS environment variable (AWP_ROOTXXX) or a standard installation location to find the path to the latest Ansys installation. Returns @@ -199,8 +203,10 @@ def find_ansys(): def is_pypim_configured(): """Check if the environment is configured for PyPIM, without using pypim. + This method is equivalent to ansys.platform.instancemanagement.is_configured(). It's reproduced here to avoid having hard dependencies. + Returns ------- bool diff --git a/src/ansys/dpf/core/model.py b/src/ansys/dpf/core/model.py index 88c4b4bf1f..470b8637ea 100644 --- a/src/ansys/dpf/core/model.py +++ b/src/ansys/dpf/core/model.py @@ -21,13 +21,10 @@ # SOFTWARE. """ -.. _ref_model: - -Model +Model. Module contains the Model class to manage file result models. - """ from ansys import dpf @@ -66,7 +63,6 @@ class Model: def __init__(self, data_sources=None, server=None): """Initialize connection with DPF server.""" - if server is None: server = dpf.core._global_server() @@ -147,8 +143,8 @@ def results(self): Result provider helper wrapping all types of provider available for a given result file. - Examples - -------- + Examples + -------- >>> from ansys.dpf import core as dpf >>> from ansys.dpf.core import examples >>> model = dpf.Model(examples.find_electric_therm()) @@ -220,6 +216,7 @@ def operator(self, name): return op def __str__(self): + """Return string representation of the model.""" txt = "DPF Model\n" txt += "-" * 30 + "\n" txt += str(self.results) @@ -264,9 +261,7 @@ def plot(self, color="w", show_edges=True, **kwargs): @property def mesh_by_default(self): - """If true, the mesh is connected by default to operators - supporting the mesh input - """ + """If true, the mesh is connected by default to operators supporting the mesh input.""" return self._mesh_by_default @mesh_by_default.setter @@ -439,7 +434,7 @@ def _set_data_sources(self, var_inp): self._cache_streams_provider() def _load_result_info(self): - """Returns a result info object""" + """Return a result info object.""" op = Operator("ResultInfoProvider", server=self._server) op.inputs.connect(self._stream_provider.outputs) try: @@ -455,7 +450,7 @@ def _load_result_info(self): return result_info def _load_mesh_info(self): - """Returns a mesh info object""" + """Return a mesh info object.""" op = Operator("mesh_info_provider", server=self._server) op.inputs.connect(self._stream_provider.outputs) try: @@ -560,7 +555,7 @@ def meshes_container(self): @property @version_requires("4.0") def meshes_provider(self): - """Meshes provider operator + """Meshes provider operator. This operator reads a meshes container (with potentially time or space varying meshes) from the result files. diff --git a/src/ansys/dpf/core/nodes.py b/src/ansys/dpf/core/nodes.py index 61ace034b6..a0cc905784 100644 --- a/src/ansys/dpf/core/nodes.py +++ b/src/ansys/dpf/core/nodes.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_nodes_apis: - -Nodes - -""" +"""Nodes.""" import numpy as np from ansys.dpf.core.common import nodal_properties, locations @@ -113,6 +108,7 @@ def nodal_connectivity(self): return self._mesh.nodes.nodal_connectivity_field.get_entity_data(self.index) def __str__(self): + """Return string representation of the node.""" txt = "DPF Node %7d\n" % self.id txt += "Index: %7d\n" % self.index txt += f"Location: {self.coordinates}\n" @@ -146,16 +142,19 @@ def __init__(self, mesh): self._mapping_id_to_index = None def __str__(self): + """Return custom str representation with information about number of nodes.""" return f"DPF Node collection with {len(self)} nodes\n" def __getitem__(self, index): - """Returns node based on index""" + """Return node based on index.""" return self.node_by_index(index) def __len__(self): + """Return the number of nodes.""" return self.n_nodes def __iter__(self): + """Provide for iterating through the nodes.""" for i in range(len(self)): yield self[i] @@ -164,12 +163,12 @@ def node_by_id(self, id): return self.__get_node(nodeid=id) def node_by_index(self, index): - """Array of node coordinates ordered by index""" + """Array of node coordinates ordered by index.""" return self.__get_node(nodeindex=index) def __get_node(self, nodeindex=None, nodeid=None): """ - Retrieves the node by its ID or index. + Retrieve the node by its ID or index. Parameters ---------- @@ -268,7 +267,7 @@ def coordinates_field(self, property_field): @property def nodal_connectivity_field(self): """ - Nodal connectivity field + Nodal connectivity field. Field containing each node ID for the elements indices connected to the given node. @@ -302,6 +301,7 @@ def _build_mapping_id_to_index(self): @property def mapping_id_to_index(self): + """Property storing mapping between IDs and indices of the entity.""" if self._mapping_id_to_index is None: self._mapping_id_to_index = self._build_mapping_id_to_index() return self._mapping_id_to_index diff --git a/src/ansys/dpf/core/operator_specification.py b/src/ansys/dpf/core/operator_specification.py index 4b5bddecdd..c3b134a9f2 100644 --- a/src/ansys/dpf/core/operator_specification.py +++ b/src/ansys/dpf/core/operator_specification.py @@ -21,9 +21,7 @@ # SOFTWARE. """ -.. _ref_operator_specification: - -Operator Specification +Operator Specification. The OperatorSpecification Provides a documentation for each Operator """ @@ -43,7 +41,7 @@ class PinSpecification: - """Documents an input or output pin of an Operator + """Documents an input or output pin of an Operator. Parameters ---------- @@ -99,7 +97,8 @@ def __init__( @property def type_names(self) -> list[str]: - """ + """Return the list of accepted types. + Returns ------- list[str], list[type] @@ -144,6 +143,7 @@ def _get_copy(other, changed_types) -> PinSpecification: ) def __repr__(self): + """Provide more detail in the representation of the instance.""" return "{class_name}({params})".format( class_name=self.__class__.__name__, params=", ".join( @@ -154,11 +154,13 @@ def __repr__(self): ) def __eq__(self, other): + """One instance is equal to the other if their string representation is the same.""" return str(self) == str(other) class ConfigSpecification(dict): - """Dictionary of the available configuration options and their specification + """Dictionary of the available configuration options and their specification. + (:class:`ansys.dpf.core.operator_specification.ConfigOptionSpec`) """ @@ -167,8 +169,9 @@ def __init__(self, *arg, **kw): class ConfigOptionSpec: - """Documentation of a configuration option available for a given - Operator (:class:`ansys.dpf.core.Operator`) + """Documentation of a configuration option available for a given Operator. + + (:class:`ansys.dpf.core.Operator`) Attributes ---------- @@ -207,6 +210,7 @@ def __init__(self, name: str, type_names: list, default_value_str: str, document self.document = document def __repr__(self): + """Provide more detail in the representation of the instance.""" return "{class_name}({params})".format( class_name=self.__class__.__name__, params=", ".join( @@ -217,25 +221,29 @@ def __repr__(self): class SpecificationBase: + """Interface description for the specification base class.""" + @property @abc.abstractmethod def description(self) -> Union[str, None]: + """To be implemented in the subclasses.""" pass @property @abc.abstractmethod def inputs(self) -> dict: + """To be implemented in the subclasses.""" pass @property @abc.abstractmethod def outputs(self) -> dict: + """To be implemented in the subclasses.""" pass class Specification(SpecificationBase): - """Documents an Operator with its description (what the Operator does), - its inputs and outputs and some properties. + """Documents an Operator with its description (what the Operator does), its inputs and outputs and some properties. Examples -------- @@ -306,11 +314,14 @@ def __init__( self._config_specification = None def __str__(self): + """Provide more details in the string representation of the instance.""" return "Description:\n" + str(self.description) + "\nProperties:\n" + str(self.properties) @property def properties(self) -> dict: - """some additional properties of the Operator, like the category, the exposure, + """Additional properties of the Operator. + + Some additional properties of the Operator, like the category, the exposure, the scripting and user names, and the plugin Examples @@ -338,7 +349,7 @@ def properties(self) -> dict: @property def description(self) -> str: - """Returns a description of the operation applied by the Operator + """Returns a description of the operation applied by the Operator. Returns ------- @@ -357,7 +368,7 @@ def description(self) -> str: @property def inputs(self) -> dict: - """Returns a dictionary mapping the input pin numbers to their ``PinSpecification`` + """Returns a dictionary mapping the input pin numbers to their ``PinSpecification``. Returns ------- @@ -380,7 +391,7 @@ def inputs(self) -> dict: @property def outputs(self) -> dict: - """Returns a dictionary mapping the output pin numbers to their ``PinSpecification`` + """Returns a dictionary mapping the output pin numbers to their ``PinSpecification``. Returns ------- @@ -441,7 +452,7 @@ def _fill_pins(self, binput, to_fill): @property def config_specification(self) -> ConfigSpecification: - """Documents the available configuration options supported by the Operator + """Documents the available configuration options supported by the Operator. Returns ------- @@ -471,6 +482,8 @@ def config_specification(self) -> ConfigSpecification: class CustomConfigOptionSpec(ConfigOptionSpec): + """Custom documentation of a configuration option available for a given operator.""" + def __init__(self, option_name: str, default_value, document: str): type_names = [mapping_types.map_types_to_cpp[type(default_value).__name__]] super().__init__( @@ -482,12 +495,16 @@ def __init__(self, option_name: str, default_value, document: str): class Exposures: + """Exposures class.""" + private = "private" public = "public" hidden = "hidden" class Categories: + """Categories class.""" + result = "result" math = "math" mesh = "mesh" @@ -558,11 +575,33 @@ def __init__( ) def __repr__(self): + """ + Return a string representation of the SpecificationProperties instance. + + Returns + ------- + str + A string representation of the instance, including all attributes + and their values. + """ keys = sorted(self.__dict__) items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys) return "{}({})".format(type(self).__name__, ", ".join(items)) def __setitem__(self, key, value): + """ + Set the value of a specified attribute. + + If a specification is defined, the value is also updated in the + operator specification. + + Parameters + ---------- + key : str + The name of the attribute to set. + value : any + The value to assign to the attribute. + """ if self._spec is not None: if value is not None: self._spec._api.operator_specification_set_property(self._spec, key, value) @@ -570,14 +609,43 @@ def __setitem__(self, key, value): setattr(self, key, value) def __getitem__(self, item: str): + """ + Get the value of a specified attribute. + + Parameters + ---------- + item : str + The name of the attribute to retrieve. + + Returns + ------- + any + The value of the specified attribute. + """ return getattr(self, item) def __eq__(self, other): + """ + Check if two SpecificationProperties instances are equal. + + Parameters + ---------- + other : SpecificationProperties + The other instance to compare against. + + Returns + ------- + bool + True if the two instances have the same attributes and values, + False otherwise. + """ return self.__dict__ == other.__dict__ class CustomSpecification(Specification): - """Allows to create an Operator Specification with its description (what the Operator does), + """Create an operator specification with its description. + + Allows to create an Operator Specification with its description (what the Operator does), its inputs and outputs and some properties. Inherits from Specification (which has only getters) to implement setters. @@ -631,7 +699,7 @@ def __init__(self, description=None, server=None): @property @version_requires("4.0") def description(self) -> str: - """Description of the operation applied by the Operator""" + """Description of the operation applied by the Operator.""" return super().description @description.setter @@ -641,7 +709,7 @@ def description(self, value) -> str: @property @version_requires("4.0") def inputs(self) -> dict: - """Dictionary mapping the input pin numbers to their ``PinSpecification`` + """Dictionary mapping the input pin numbers to their ``PinSpecification``. Returns ------- @@ -684,7 +752,7 @@ def inputs(self, val: dict): @property @version_requires("4.0") def outputs(self) -> dict: - """Returns a dictionary mapping the output pin numbers to their ``PinSpecification`` + """Returns a dictionary mapping the output pin numbers to their ``PinSpecification``. Returns ------- @@ -727,7 +795,7 @@ def outputs(self, val: dict): @property @version_requires("4.0") def config_specification(self) -> ConfigSpecification: - """Documents the available configuration options supported by the Operator + """Documents the available configuration options supported by the Operator. Returns ------- @@ -767,8 +835,11 @@ def config_specification(self, val: list): @property @version_requires("4.0") def properties(self) -> SpecificationProperties: - """Returns some additional properties of the Operator, like the category, the exposure, - the scripting and user names and the plugin""" + """Return additional properties of the Operator. + + Returns some additional properties of the Operator, like the category, the exposure, + the scripting and user names and the plugin + """ return SpecificationProperties(**super().properties, spec=self) @properties.setter diff --git a/src/ansys/dpf/core/outputs.py b/src/ansys/dpf/core/outputs.py index dcad4feb34..38be6d23a1 100644 --- a/src/ansys/dpf/core/outputs.py +++ b/src/ansys/dpf/core/outputs.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_outputs: - -Outputs - -""" +"""Outputs.""" from ansys.dpf.core.mapping_types import map_types_to_python from ansys.dpf.core.common import types @@ -36,6 +31,7 @@ class Output: """ Intermediate class internally instantiated by the :class:`ansys.dpf.core.dpf_operator.Operator`. + Used to evaluate and get outputs of the Operator. Examples @@ -59,7 +55,7 @@ def __init__(self, spec, pin, operator): self._python_expected_types.append(map_types_to_python[cpp_type]) def get_data(self): - """Retrieves the output of the operator.""" + """Retrieve the output of the operator.""" type_output = self._spec.type_names[0] if type_output == "abstract_meshed_region": @@ -101,9 +97,18 @@ def get_data(self): return derived_types[0][0](output) def __call__(self): + """Allow instances of the class to be callable for data retrieval purposes.""" return self.get_data() def __str__(self): + """ + Return a string representation of the Output instance. + + Returns + ------- + str + A string representation of the instance. + """ docstr = self._spec.name if self._spec.optional: docstr += " (optional)" @@ -116,7 +121,8 @@ def __str__(self): class _Outputs: - """ + """Base class subclassed by the :class:`ansys.dpf.core.outputs.Output` class. + Parameters ---------- dict_outputs : dict @@ -185,6 +191,7 @@ def _modify_output_spec_with_one_type(output_spec, type): class Outputs(_Outputs): """ Intermediate class internally instantiated by the :class:`ansys.dpf.core.dpf_operator.Operator`. + Used to list the available :class:`ansys.dpf.core.outputs.Output` s of the Operator. Examples diff --git a/src/ansys/dpf/core/path_utilities.py b/src/ansys/dpf/core/path_utilities.py index 98184460e9..f5326ce6b9 100644 --- a/src/ansys/dpf/core/path_utilities.py +++ b/src/ansys/dpf/core/path_utilities.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -path_utilities +path_utilities. Offer tools similar to os.path but taking the os of the server into account to create path. @@ -35,8 +35,8 @@ def join(*args, **kwargs): - """Join two strings to form a path, following the server - architecture. + """Join two strings to form a path, following the server architecture. + Using a server version below 3.0, please ensure that the python client and the server's os are similar before using this method. @@ -89,6 +89,7 @@ def join(*args, **kwargs): def to_server_os(path, server=None): + """Return path to the server depending on the os.""" path = str(path) server = server_module.get_or_create_server(server) path = server.docker_config.replace_with_mounted_volumes(path) diff --git a/src/ansys/dpf/core/plotter.py b/src/ansys/dpf/core/plotter.py index 4f37685e3d..6246abd2d1 100644 --- a/src/ansys/dpf/core/plotter.py +++ b/src/ansys/dpf/core/plotter.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -Plotter +Plotter. This module contains the DPF plotter class. @@ -51,8 +51,7 @@ class _InternalPlotterFactory: - """ - Factory for _InternalPlotter based on the backend.""" + """Factory for _InternalPlotter based on the backend.""" @staticmethod def get_plotter_class(): @@ -393,8 +392,7 @@ def _set_scalar_bar_title(kwargs): class DpfPlotter: - """DpfPlotter class. Can be used in order to plot - results over a mesh. + """DpfPlotter class. Can be used in order to plot results over a mesh. The current DpfPlotter is a PyVista based object. @@ -437,7 +435,7 @@ def labels(self): """Return a list of labels. Returns - -------- + ------- list List of Label(s). Each list member or member group will share same properties. @@ -474,12 +472,15 @@ def add_node_labels( ) def add_points(self, points, field=None, **kwargs): + """Add points to the plot.""" self._internal_plotter.add_points(points, field, **kwargs) def add_line(self, points, field=None, **kwargs): + """Add lines to the plot.""" self._internal_plotter.add_line(points, field, **kwargs) def add_plane(self, plane, field=None, **kwargs): + """Add a plane to the plot.""" self._internal_plotter.add_plane(plane, field, **kwargs) def add_mesh(self, meshed_region, deform_by=None, scale_factor=1.0, **kwargs): diff --git a/src/ansys/dpf/core/plugins.py b/src/ansys/dpf/core/plugins.py index 435c0f52ec..d0457eaa23 100644 --- a/src/ansys/dpf/core/plugins.py +++ b/src/ansys/dpf/core/plugins.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -Python DPF plugins utilities +Python DPF plugins utilities. Contains the utilities specific to installing and using Python DPF plugins. diff --git a/src/ansys/dpf/core/property_field.py b/src/ansys/dpf/core/property_field.py index 0545027fb1..c108f635cc 100644 --- a/src/ansys/dpf/core/property_field.py +++ b/src/ansys/dpf/core/property_field.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -PropertyField - -""" +"""PropertyField.""" import numpy as np from ansys.dpf.core.check_version import version_requires @@ -202,14 +199,17 @@ def location(self, value): @property def component_count(self): + """Return the number of components.""" return self._api.csproperty_field_elementary_data_size(self) @property def elementary_data_count(self): + """Return the number of elementary data.""" return self._api.csproperty_field_get_number_elementary_data(self) @property def size(self): + """Return the data size.""" return self._api.csproperty_field_get_data_size(self) def _set_scoping(self, scoping): @@ -221,6 +221,7 @@ def _get_scoping(self): ) def get_entity_data(self, index): + """Return the data associated with the entity by index.""" try: vec = dpf_vector.DPFVectorInt(client=self._server.client) self._api.csproperty_field_get_entity_data_for_dpf_vector( @@ -236,6 +237,7 @@ def get_entity_data(self, index): return data def get_entity_data_by_id(self, id): + """Return the data associated with entity by id.""" try: vec = dpf_vector.DPFVectorInt(client=self._server.client) self._api.csproperty_field_get_entity_data_by_id_for_dpf_vector( @@ -253,6 +255,11 @@ def get_entity_data_by_id(self, id): return data def append(self, data, scopingid): + """ + Append data to the property field. + + This method appends data to the property field for a specific scoping ID. + """ self._api.csproperty_field_push_back(self, scopingid, _get_size_of_list(data), data) def _get_data_pointer(self): @@ -352,7 +359,7 @@ def name(self): @name.setter @version_requires("8.1") def name(self, value): - """Change the name of the property field + """Change the name of the property field. Parameters ---------- diff --git a/src/ansys/dpf/core/property_fields_container.py b/src/ansys/dpf/core/property_fields_container.py index f2a4dfa661..d3c6ae3687 100644 --- a/src/ansys/dpf/core/property_fields_container.py +++ b/src/ansys/dpf/core/property_fields_container.py @@ -21,10 +21,8 @@ # SOFTWARE. """ -.. _ref_property_fields_container: +MockPropertyFieldsContainer. -_MockPropertyFieldsContainer -============================ Contains classes associated with the _MockPropertyFieldsContainer. """ @@ -45,7 +43,7 @@ class _LabelSpaceKV: """Class for internal use to associate a label space with a field.""" def __init__(self, _dict: Dict[str, int], _field: dpf.Field): - """Constructs an association between a dictionary and a field.""" + """Construct an association between a dictionary and a field.""" self._dict = _dict self._field = _field @@ -64,7 +62,7 @@ def field(self, value: dpf.Field): self._field = value def __str__(self): - """Returns a string representation of the association.""" + """Return a string representation of the association.""" field_str = str(self._field).replace("\n", "\n\t\t\t") return f"Label Space: {self._dict} with field\n\t\t\t{field_str}" @@ -77,7 +75,7 @@ def __init__( fields_container: _MockPropertyFieldsContainer = None, server: BaseServer = None, ): - """Constructs a _MockPropertyFieldsContainer.""" + """Construct a _MockPropertyFieldsContainer.""" # default constructor self._labels = [] # used by Dataframe self.scopings = [] @@ -103,7 +101,7 @@ def __init__( # Collection def __str__(self) -> str: - """Returns a string representation of a _MockPropertyFieldsContainer.""" + """Return a string representation of a _MockPropertyFieldsContainer.""" txt = f"DPF PropertyFieldsContainer with {len(self)} fields\n" for idx, ls in enumerate(self.label_spaces): txt += f"\t {idx}: {ls}\n" @@ -117,14 +115,14 @@ def labels(self) -> List[str]: @labels.setter def labels(self, labels: List[str]): - """Sets all the label of the _MockPropertyFieldsContainer.""" + """Set all the label of the _MockPropertyFieldsContainer.""" if len(self._labels) != 0: raise ValueError("labels already set") for l in labels: self.add_label(l) def add_label(self, label: str): - """Adds a label.""" + """Add a label.""" if label not in self._labels: self._labels.append(label) self.scopings.append([]) @@ -140,14 +138,14 @@ def get_label_space(self, idx) -> Dict: # used by Dataframe def get_label_scoping(self, label="time") -> dpf.Scoping: - """Returns a scoping on the fields concerned by the given label.""" + """Return a scoping on the fields concerned by the given label.""" if label in self.labels: scoping_ids = self.scopings[self.labels.index(label)] return dpf.Scoping(ids=scoping_ids, location="") raise KeyError(f"label {label} not found") def add_entry(self, label_space: Dict[str, int], value: dpf.Field): - """Adds a PropertyField associated with a dictionary.""" + """Add a PropertyField associated with a dictionary.""" new_id = self._new_id() if hasattr(value, "_server"): @@ -169,7 +167,7 @@ def add_field(self, label_space: Dict[str, int], field: dpf.Field): self.add_entry(label_space, field) def get_entries(self, label_space_or_index: Union[Dict[str, int], int]): - """Returns a list of fields from a complete or partial specification of a dictionary.""" + """Return a list of fields from a complete or partial specification of a dictionary.""" if isinstance(label_space_or_index, int): idx: int = label_space_or_index return [self.label_spaces[idx].field] @@ -198,7 +196,7 @@ def get_entries(self, label_space_or_index: Union[Dict[str, int], int]): raise KeyError(f"Key {bad_key} is not in labels: {self.labels}") def get_entry(self, label_space_or_index: Union[Dict[str, int], int]): - """Returns the field or (first field found) corresponding to the given dictionary.""" + """Return the field or (first field found) corresponding to the given dictionary.""" ret = self.get_entries(label_space_or_index) if len(ret) != 0: @@ -207,7 +205,7 @@ def get_entry(self, label_space_or_index: Union[Dict[str, int], int]): raise ValueError("Could not find corresponding entry") def _new_id(self) -> int: - """Helper method generating a new id when calling add_entry(...).""" + """Helper-method generating a new id when calling add_entry(...).""" if len(self.ids) == 0: self.last_id = 1 return self.last_id @@ -217,11 +215,11 @@ def _new_id(self) -> int: # used by Dataframe def get_fields(self, label_space: Dict[str, int]) -> List[dpf.Field]: - """Returns the list of fields associated with given label space.""" + """Return the list of fields associated with given label space.""" return self.get_entries(label_space) def get_field(self, label_space_or_index: Union[Dict[str, int], int]) -> dpf.Field: - """Retrieves the field at a requested index or label space.""" + """Retrieve the field at a requested index or label space.""" return self.get_entry(label_space_or_index) # used by Dataframe @@ -237,7 +235,7 @@ def _set_field(self, ls_idx, field): self.label_spaces[ls_idx].field = field def rescope(self, scoping: dpf.Scoping): # Used by post.Dataframe - """Helper function to reproduce functionality of rescope_fc Operator.""" + """Helper-function to reproduce functionality of rescope_fc Operator.""" copy_fc = _MockPropertyFieldsContainer(self, server=None) for idx, label_space in enumerate(copy_fc.label_spaces): pfield = PropertyField(location=label_space.field.location) diff --git a/src/ansys/dpf/core/result_info.py b/src/ansys/dpf/core/result_info.py index b87adab2c3..e4aaeab3c6 100644 --- a/src/ansys/dpf/core/result_info.py +++ b/src/ansys/dpf/core/result_info.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -ResultInfo - -""" +"""ResultInfo.""" import traceback import warnings @@ -55,9 +52,7 @@ @unique class physics_types(Enum): - """ - ``'Physics_types'`` enumerates the different types of physics that an analysis can have. - """ + """``'Physics_types'`` enumerates the different types of physics that an analysis can have.""" mechanical = 0 thermal = 1 @@ -132,7 +127,7 @@ def __init__( analysis_type: analysis_types = None, physics_type: physics_types = None, ): - """Initialize with a ResultInfo message""" + """Initialize with a ResultInfo message.""" # ############################ # step 1: get server self._server = server_module.get_or_create_server( @@ -168,6 +163,7 @@ def __init__( raise NotImplementedError("Cannot create a new ResultInfo via gRPC.") def __str__(self): + """Return a string representation of the instance providing detailed information.""" try: txt = ( "%s analysis\n" % self.analysis_type.capitalize() @@ -212,6 +208,7 @@ def _names(self): return [item.name for item in self.available_results] def __contains__(self, value): + """Check if a given name is present in available results.""" return value in self._names def add_result( @@ -313,7 +310,8 @@ def physics_type(self): return self._get_physics_type() def _get_physics_type(self): - """ + """Return the physics type associated with the result. + Returns ------- physics_type : str @@ -429,8 +427,7 @@ def main_title(self): @property def available_results(self): - """Available results, containing all information about results - present in the result files. + """Available results, containing all information about results present in the result files. Returns ------- @@ -451,7 +448,8 @@ def _data_processing_core_api(self): return core_api def _get_result(self, numres): - """ + """Return requested result. + Parameters ---------- numres : int @@ -459,7 +457,7 @@ def _get_result(self, numres): Returns ------- - result : Result + result : available_result.AvailableResult """ if numres >= len(self): raise IndexError("There are only %d results" % len(self)) @@ -550,7 +548,7 @@ def _get_result(self, numres): @property @version_requires("5.0") def available_qualifier_labels(self): - """Returns a list of labels defining result qualifiers + """Returns a list of labels defining result qualifiers. Returns ------- @@ -568,7 +566,7 @@ def available_qualifier_labels(self): @version_requires("5.0") def qualifier_label_support(self, label): - """Returns what supports an available qualifier label. + """Return what supports an available qualifier label. Parameters ---------- @@ -588,16 +586,38 @@ def qualifier_label_support(self, label): ) def __len__(self): + """ + Return the number of results available. + + If an exception occurs while attempting to retrieve the number of results, + the method returns 0. + + Returns + ------- + int + The number of results, or 0 if an error occurs. + """ try: return self.n_results except Exception as e: return 0 def __iter__(self): + """Return an iterator over the results.""" for i in range(len(self)): yield self[i] def __getitem__(self, key): + """ + Retrieve a result by index or name. + + Raises + ------ + ValueError + If the key is a string and not found in the result names. + TypeError + If the key is not an integer or string. + """ if isinstance(key, int): index = key elif isinstance(key, str): @@ -610,6 +630,17 @@ def __getitem__(self, key): return self._get_result(index) def __del__(self): + """ + Clean up resources associated with the instance. + + This method calls the deleter function to release resources. If an exception + occurs during deletion, a warning is issued. + + Raises + ------ + Warning + If an exception occurs while attempting to delete resources. + """ try: self._deleter_func[0](self._deleter_func[1](self)) except: diff --git a/src/ansys/dpf/core/results.py b/src/ansys/dpf/core/results.py index 98238f9e88..e775c3933e 100644 --- a/src/ansys/dpf/core/results.py +++ b/src/ansys/dpf/core/results.py @@ -21,12 +21,11 @@ # SOFTWARE. """ -.. _ref_results: - -Results +Results. This module contains the Results and Result classes that are created by the model -to easily access results in result files.""" +to easily access results in result files. +""" import functools @@ -51,8 +50,8 @@ class Results: With this wrapper, time and mesh scopings can easily be customized. - Examples - -------- + Examples + -------- Create a displacement result from the model and choose its time and mesh scopings. >>> from ansys.dpf import core as dpf @@ -66,8 +65,8 @@ class Results: With this wrapper, time and mesh scopings, location, and more can easily be customized. - Examples - -------- + Examples + -------- Create a stress result from the model and choose its time and mesh scopings. >>> from ansys.dpf import core as dpf @@ -80,8 +79,8 @@ class Results: Result provider helper wrapping all types of providers available for a given result file. - Examples - -------- + Examples + -------- >>> from ansys.dpf import core as dpf >>> from ansys.dpf.core import examples >>> model = dpf.Model(examples.find_electric_therm()) @@ -124,6 +123,21 @@ def __init__( self._str = str(result_info) def __result__(self, result_type, *args): + """ + Create and return a result of the specified type. + + Parameters + ---------- + result_type : any + The type of the result to generate. + *args : tuple + Additional arguments required for creating the result. + + Returns + ------- + Result + An instance of the `Result` class, providing access to the specified result. + """ return Result(self._connector, self._mesh_by_default, result_type, self._server) def _connect_operators(self, result_info): @@ -164,13 +178,42 @@ def _connect_operators(self, result_info): raise e def __str__(self): + """ + Return a string representation of the `Results` object. + + Returns + ------- + str + String description of the `Results` object. + """ return self._str def __iter__(self): + """ + Iterate over the available results. + + Yields + ------ + Result + Each result dynamically added to the `Results` object. + """ for key in self._op_map_rev: yield self.__class__.__dict__[key].fget() def __getitem__(self, val): + """ + Access a result by index. + + Parameters + ---------- + val : int + The index of the result to retrieve. + + Returns + ------- + Result + The result at the specified index. + """ n = 0 for key in self._op_map_rev: if n == val: @@ -178,6 +221,14 @@ def __getitem__(self, val): n += 1 def __len__(self): + """ + Return the number of results available. + + Returns + ------- + int + The number of results. + """ return len(self._op_map_rev) @@ -251,6 +302,7 @@ def __init__(self, connector, mesh_by_default, result_info, server): raise e def __call__(self, time_scoping=None, mesh_scoping=None): + """Provide for Result instances to be callable for operator retrieval.""" op = self._operator if time_scoping: op.inputs.time_scoping(time_scoping) @@ -268,8 +320,7 @@ def __call__(self, time_scoping=None, mesh_scoping=None): return op def eval(self): - """Evaluate the result provider with the previously specified - inputs and return the result fields container. + """Evaluate the result provider with the previously specified inputs and return the result fields container. Returns ------- @@ -367,7 +418,7 @@ def on_last_time_freq(self): return self def on_time_scoping(self, time_scoping): - """Sets the time scoping to a given one. + """Set the time scoping to a given one. Parameters ---------- @@ -428,7 +479,6 @@ def on_named_selection(self, named_selection): 40 """ - self._mesh_scoping = self._connector.named_selection(named_selection) return self @@ -461,6 +511,7 @@ def split_by_body(self): @property def split_by_shape(self): """Set the mesh scoping to a scopings container where each scoping is an element shape. + The evaluated fields container will have one field on 'solid', one on 'shell', one on 'beam' and one on 'unknown_shape'. @@ -584,6 +635,7 @@ def on_location(self, location): class CommonResults(Results): """Default implementation of the class:'Results'. + Is created by default by the 'Model' with the method:'results'. Create default result instances for common result types. @@ -606,8 +658,8 @@ def __init__(self, connector, mesh_by_default, result_info, server): @property def displacement(self): - """Result provider helper wrapping the regular - displacement operator. + """Result provider helper wrapping the regular displacement operator. + With this wrapper, time and mesh scopings can easily be customized. @@ -633,6 +685,7 @@ def displacement(self): def elastic_strain(self): """ Result provider helper wrapping the regular elastic strain operator. + With this wrapper, time and mesh scopings can easily be customized. @@ -658,6 +711,7 @@ def elastic_strain(self): def stress(self): """ Result provider helper wrapping the regular stress operator. + With this wrapper, time and mesh scopings can easily be customized. @@ -682,8 +736,8 @@ def stress(self): @property def structural_temperature(self): """ - Result provider helper wrapping the regular structural_temperature - operator. + Result provider helper wrapping the regular structural_temperature operator. + With this wrapper, time and mesh scopings can easily be customized. @@ -708,8 +762,8 @@ def structural_temperature(self): @property def temperature(self): """ - Result provider helper wrapping the regular temperature - operator. + Result provider helper wrapping the regular temperature operator. + With this wrapper, time and mesh scopings can easily be customized. @@ -733,8 +787,8 @@ def temperature(self): @property def electric_potential(self): """ - Result provider helper wrapping the regular electric_potential - operator. + Result provider helper wrapping the regular electric_potential operator. + With this wrapper, time and mesh scopings can easily be customized. diff --git a/src/ansys/dpf/core/runtime_config.py b/src/ansys/dpf/core/runtime_config.py index 8353923fe6..91b6e3bc22 100644 --- a/src/ansys/dpf/core/runtime_config.py +++ b/src/ansys/dpf/core/runtime_config.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -RuntimeConfig - -""" +"""RuntimeConfig.""" from ansys.dpf.core.data_tree import DataTree from ansys.dpf.core.common import types @@ -41,8 +38,9 @@ def __init__(self, data_tree, server=None): class RuntimeClientConfig(_RuntimeConfig): - """Enables to access and set runtime configuration - options to gRPC client. Mostly used to configure gRPC streaming and calls options. + """Enable accessing and setting runtime configuration options to gRPC client. + + Mostly used to configure gRPC streaming and calls options. Parameters ---------- @@ -67,8 +65,7 @@ def __init__(self, data_tree, server=None): @property def cache_enabled(self): - """Whether gRPC requests and responses are intercepted - to cache them and retrieve them when appropriate. + """Whether gRPC requests and responses are intercepted to cache them and retrieve them when appropriate. Returns ------- @@ -106,8 +103,7 @@ def streaming_buffer_size(self, value): @property def stream_floats_instead_of_doubles(self): - """Sets whether double values (8 bytes) should be converted and streamed as float values - (4 bytes) in gRPC streaming calls. + """Sets whether double values (8 bytes) should be converted and streamed as float values (4 bytes) in gRPC streaming calls. Returns ------- @@ -126,8 +122,9 @@ def stream_floats_instead_of_doubles(self, value): @property def return_arrays(self): - """All methods will return :class:`ansys.dpf.core.DPFArray` (instead of lists) - when possible. Default is ``True``. + """All methods will return :class:`ansys.dpf.core.DPFArray` (instead of lists) when possible. + + Default is ``True``. See for example, :func:`ansys.dpf.core.Scoping.ids`. Returns @@ -141,12 +138,12 @@ def return_arrays(self, value): self._data_tree.add(return_arrays=int(value)) def copy_config(self, config): + """Add config to data tree.""" config._data_tree.add(self._data_tree.to_dict()) class RuntimeCoreConfig(_RuntimeConfig): - """Enables to access and set runtime configuration - options to DataProcessingCore. + """Enables to access and set runtime configuration options to DataProcessingCore. Parameters ---------- @@ -179,8 +176,7 @@ def __init__(self, data_tree, server=None): @property def num_threads(self): - """Sets the default number of threads to use for all operators, - default is omp_get_num_threads. + """Sets the default number of threads to use for all operators, default is omp_get_num_threads. Returns ------- @@ -194,8 +190,7 @@ def num_threads(self, value): @property def license_timeout_in_seconds(self): - """Sets the default number of threads to use for all operators, - default is omp_get_num_threads. + """Sets the default number of threads to use for all operators, default is omp_get_num_threads. Returns ------- diff --git a/src/ansys/dpf/core/scoping.py b/src/ansys/dpf/core/scoping.py index b91dc392b1..983fd643ad 100644 --- a/src/ansys/dpf/core/scoping.py +++ b/src/ansys/dpf/core/scoping.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_scoping: - -Scoping - -""" +"""Scoping.""" import traceback import warnings @@ -62,6 +57,7 @@ class Scoping: Server with channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global server. + Examples -------- Create a mesh scoping. @@ -81,9 +77,7 @@ class Scoping: """ def __init__(self, scoping=None, server=None, ids=None, location=None): - """Initializes the scoping with an optional scoping message or - by connecting to a stub. - """ + """Initialize the scoping with an optional scoping message or by connecting to a stub.""" # step 1: get server self._server = server_module.get_or_create_server( scoping._server if isinstance(scoping, Scoping) else server @@ -127,7 +121,8 @@ def __init__(self, scoping=None, server=None, ids=None, location=None): self.location = location def _count(self): - """ + """Return the number of scoping ids. + Returns ------- count : int @@ -146,7 +141,8 @@ def _get_location(self): return self._api.scoping_get_location(self) def _set_location(self, loc=locations.nodal): - """ + """Set the location. + Parameters ---------- loc : str or core.locations enum @@ -157,7 +153,8 @@ def _set_location(self, loc=locations.nodal): @version_requires("2.1") def _set_ids(self, ids): - """ + """Set the ids. + Parameters ---------- ids : list of int @@ -179,7 +176,8 @@ def _set_ids(self, ids): self._api.scoping_set_ids(self, ids, len(ids)) def _get_ids(self, np_array=None): - """ + """Return an array of scoping ids. + Returns ------- ids : list[int], numpy.array (if np_array==True) @@ -309,8 +307,7 @@ def ids(self, value): @property def location(self): - """Location of the IDs as a string, such as ``"nodal"``, ``"elemental"``, - and ``"time_freq"``. + """Location of the IDs as a string, such as ``"nodal"``, ``"elemental"``, and ``"time_freq"``. Returns ------- @@ -324,9 +321,28 @@ def location(self, value): self._set_location(value) def __len__(self): + """ + Return the number of scoping ids. + + Returns + ------- + int + The number of scoping ids. + """ return self._count() def __del__(self): + """ + Clean up resources associated with the instance. + + This method calls the deleter function to release resources. If an exception + occurs during deletion, a warning is issued. + + Raises + ------ + Warning + If an exception occurs while attempting to delete resources. + """ try: self._deleter_func[0](self._deleter_func[1](self)) except Exception as e: @@ -334,6 +350,7 @@ def __del__(self): warnings.warn(traceback.format_exc()) def __iter__(self): + """Return an iterator over the scoping ids.""" return self.ids.__iter__() def __getitem__(self, key): @@ -341,7 +358,7 @@ def __getitem__(self, key): return self.id(key) def __setitem__(self, index, id): - """Retrieve the ID at a requested index.""" + """Set the ID at a given index.""" return self.set_id(index, id) @property @@ -444,7 +461,8 @@ def __init_map__(self): self._mapper = dict(zip(self._scoping_ids_copy, np.arange(self._count()))) def _count(self): - """ + """Return the number of scoping ids. + Returns ------- count : int @@ -464,7 +482,8 @@ def _get_location(self): @_setter def _set_location(self, loc=locations.nodal): - """ + """Set the location. + Parameters ---------- loc : str or core.locations enum @@ -476,7 +495,8 @@ def _set_location(self, loc=locations.nodal): @_setter @version_requires("2.1") def _set_ids(self, ids): - """ + """Set scoping ids. + Parameters ---------- ids : list of int @@ -496,7 +516,8 @@ def _set_ids(self, ids): self.__init_map__() def _get_ids(self, np_array=False): - """ + """Return the scoping ids. + Returns ------- ids : list[int], numpy.array (if np_array==True) diff --git a/src/ansys/dpf/core/scopings_container.py b/src/ansys/dpf/core/scopings_container.py index 108f159ba3..6752c884a2 100644 --- a/src/ansys/dpf/core/scopings_container.py +++ b/src/ansys/dpf/core/scopings_container.py @@ -23,7 +23,7 @@ # -*- coding: utf-8 -*- """ -ScopingsContainer +ScopingsContainer. Contains classes associated to the DPF ScopingsContainer """ @@ -33,9 +33,7 @@ class ScopingsContainer(CollectionBase[scoping.Scoping]): - entries_type = scoping.Scoping - """A class used to represent a ScopingsContainer which contains - scopings split on a given space + """A class used to represent a ScopingsContainer which contains scopings split on a given space. Parameters ---------- @@ -49,6 +47,8 @@ class ScopingsContainer(CollectionBase[scoping.Scoping]): ``None``, attempts to use the global server. """ + entries_type = scoping.Scoping + def __init__(self, scopings_container=None, server=None): super().__init__(collection=scopings_container, server=server) if self._internal_obj is None: @@ -60,10 +60,11 @@ def __init__(self, scopings_container=None, server=None): self._internal_obj = self._api.collection_of_scoping_new() def create_subtype(self, obj_by_copy): + """Create a Scoping subtype.""" return scoping.Scoping(scoping=obj_by_copy, server=self._server) def get_scopings(self, label_space): - """Returns the scopings at a requested label space + """Return the scopings at a requested label space. Parameters ---------- @@ -79,7 +80,8 @@ def get_scopings(self, label_space): return super()._get_entries(label_space) def get_scoping(self, label_space_or_index): - """Returns the scoping at a requested index or label space. + """Return the scoping at a requested index or label space. + Throws if the request returns several scoping Parameters @@ -97,7 +99,7 @@ def get_scoping(self, label_space_or_index): return super()._get_entry(label_space_or_index) def __getitem__(self, key): - """Returns the scoping at a requested index + """Return the scoping at a requested index. Parameters ---------- diff --git a/src/ansys/dpf/core/server.py b/src/ansys/dpf/core/server.py index 02ecefe77a..665d312865 100644 --- a/src/ansys/dpf/core/server.py +++ b/src/ansys/dpf/core/server.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -Server +Server. Contains the directives necessary to start the DPF server. """ @@ -53,6 +53,7 @@ def shutdown_global_server(): + """Shut down the global DPF server.""" try: if dpf.core.SERVER is not None: dpf.core.SERVER = None @@ -103,7 +104,7 @@ def _global_server() -> BaseServer: def set_server_configuration(server_config: ServerConfig) -> None: - """Sets, for the current python session, the default type of DPF server to use. + """Set the default type of DPF server to use for the current python session, . Parameters ---------- @@ -338,7 +339,6 @@ def connect_to_server( Examples -------- - >>> from ansys.dpf import core as dpf Create a server. @@ -400,7 +400,7 @@ def connect(): def get_or_create_server(server: BaseServer) -> Union[BaseServer, None]: - """Returns the given server or if None, creates a new one. + """Return the given server or if None, creates a new one. Parameters ---------- @@ -416,13 +416,12 @@ def get_or_create_server(server: BaseServer) -> Union[BaseServer, None]: def available_servers(): - """Searches all available installed DPF servers on the current machine. + """Search all available installed DPF servers on the current machine. This method binds new functions to the server module, which helps to choose the appropriate version. Examples -------- - >>> from ansys.dpf import core as dpf >>> #out = dpf.server.available_servers() diff --git a/src/ansys/dpf/core/server_context.py b/src/ansys/dpf/core/server_context.py index 4bc602e361..b6de5cf8a9 100644 --- a/src/ansys/dpf/core/server_context.py +++ b/src/ansys/dpf/core/server_context.py @@ -21,8 +21,7 @@ # SOFTWARE. """ -ServerContext - +ServerContext. Gives the ability to choose the context with which the server should be started. The context allows you to choose the licensing logic for operators. @@ -44,6 +43,8 @@ class LicensingContextType(Enum): + """Enum representing different types of licensing contexts.""" + none = 5 premium = 1 """Checks if at least one license increment exists @@ -53,10 +54,33 @@ class LicensingContextType(Enum): and does not allow operators to block an increment.""" def __int__(self): + """ + Return the integer values of the licensing context. + + Returns + ------- + int + Integer values corresponding to the licensing context. + """ return self.value @staticmethod def same_licensing_context(first, second): + """ + Determine if two licensing contexts are compatible. + + Parameters + ---------- + first : LicensingContextType + The first licensing context to compare. + second : LicensingContextType + The second licensing context to compare. + + Returns + ------- + bool + True if the licensing contexts are compatible, False otherwise. + """ if (first == LicensingContextType.none and second != LicensingContextType.none) or ( first != LicensingContextType.none and second == LicensingContextType.none ): @@ -74,6 +98,7 @@ def same_licensing_context(first, second): class LicenseContextManager: """Can optionally be used to check out a license before using licensed DPF Operators. + Improves performance if you are using multiple Operators that require licensing. It can also be used to force checkout before running a script when few Ansys license increments are available. @@ -153,13 +178,45 @@ def release_data(self): self._license_checkout_operator = None def __enter__(self): + """ + Enter the runtime context for the license context manager. + + This method is called when the object is used within a `with` statement. + It ensures that the license is checked out before the operations within the context block are executed. + + Returns + ------- + LicenseContextManager + The current instance of the license context manager. + """ return self def __exit__(self, type, value, tb): + """ + Exit the runtime context for the license context manager. + + This method is called at the end of a `with` statement. It ensures + that the license is checked in and any resources allocated are released. + + Parameters + ---------- + type : type or None + The exception type, if an exception occurred within the context block, or None otherwise. + value : Exception or None + The exception instance, if an exception occurred within the context block, or None otherwise. + tb : traceback or None + The traceback object, if an exception occurred within the context block, or None otherwise. + + Returns + ------- + bool + If True, suppresses the exception. Otherwise, the exception is propagated. + """ if tb is None: self.release_data() def __del__(self): + """Release the license when the instance is deleted.""" self.release_data() pass @@ -177,6 +234,7 @@ def status(self): class ServerContext: """The context defines whether DPF capabilities requiring a license checkout are allowed. + xml_path argument won't be taken into account if using LicensingContextType.entry. Parameters @@ -212,6 +270,7 @@ def xml_path(self): return self._xml_path def __str__(self): + """Return string representation of the ServerContext instance.""" return ( f"Server Context of type {self.licensing_context_type}" f" with {'no' if len(self.xml_path) == 0 else ''} xml path" @@ -219,6 +278,7 @@ def __str__(self): ) def __eq__(self, other): + """Compare two ServerContext instances for equality.""" if not isinstance(other, ServerContext): return False return os.path.normpath(self.xml_path) == os.path.normpath( @@ -228,6 +288,7 @@ def __eq__(self, other): ) def __ne__(self, other): + """Check that two server contexts are not equal.""" return not self == other @@ -267,8 +328,9 @@ class AvailableServerContexts: def set_default_server_context(context=AvailableServerContexts.premium) -> None: - """Sets this context as default for any new server. Also applies it to - the global server if it is running as Entry and requested context is Premium. + """Set this context as default for any new server. + + Also applies it to the global server if it is running as Entry and requested context is Premium. The context enables to choose whether DPF capabilities requiring a license checkout are allowed. diff --git a/src/ansys/dpf/core/server_factory.py b/src/ansys/dpf/core/server_factory.py index 493fa5e094..e70c242ea3 100644 --- a/src/ansys/dpf/core/server_factory.py +++ b/src/ansys/dpf/core/server_factory.py @@ -21,8 +21,7 @@ # SOFTWARE. """ -Server factory, server configuration and communication protocols - +Server factory, server configuration and communication protocols. Contains the server factory as well as the communication protocols and server configurations available. @@ -41,7 +40,7 @@ class CommunicationProtocols: - """Defines available communication protocols + """Defines available communication protocols. Attributes ---------- @@ -61,7 +60,9 @@ class CommunicationProtocols: class DockerConfig: - """Intermediate class encapsulating all the configuration options needed to run a docker + """Manage DPF Docker configuration and communication. + + Intermediate class encapsulating all the configuration options needed to run a docker image of DPF and holding tools to communicate with Docker. Parameters @@ -124,6 +125,7 @@ def docker_name(self) -> str: @property def mounted_volumes(self) -> dict: """Dictionary of key = local path and value = path of mounted volumes in the Docker Image. + To prevent from uploading result files on the Docker Image :func:`ansys.dpc.core.server_factory.RunningDockerConfig.replace_with_mounted_volumes` iterates through this dictionary to replace local path instances by their mapped value. @@ -140,6 +142,14 @@ def mounted_volumes(self, mounted_volumes: dict): @property def licensing_args(self) -> str: + """Generate licensing-related environment variables for the Docker container. + + Returns + ------- + str + String containing Docker environment variable settings for licensing, + including acceptance of license agreements and licensing file path. + """ la = os.environ.get("ANSYS_DPF_ACCEPT_LA", "N") lf = os.environ.get("ANSYSLMD_LICENSE_FILE", None) additional_option = " -e ANSYS_DPF_ACCEPT_LA=" + la + " " @@ -160,7 +170,9 @@ def extra_args(self) -> str: return self._extra_args def docker_run_cmd_command(self, docker_server_port: int, local_port: int) -> str: - """Creates the docker run command with the ``DockerConfig`` attributes as well + """Build the Docker run command using DockerConfig attributes and specified ports. + + Creates the docker run command with the ``DockerConfig`` attributes as well as the ``docker_server_port`` and ``local_port`` passed in as parameters. Parameters @@ -191,6 +203,16 @@ def docker_run_cmd_command(self, docker_server_port: int, local_port: int) -> st ) def __str__(self): + """Return a string representation of the DockerConfig object. + + Includes information about whether Docker is used, the Docker image name, + mounted volumes, and any extra arguments. + + Returns + ------- + str + Formatted string representation of the DockerConfig instance. + """ return ( "DockerConfig with: \n" f"\t- use_docker: {self.use_docker}\n" @@ -201,8 +223,7 @@ def __str__(self): @staticmethod def find_port_available_for_docker_bind(port: int) -> int: - """Checks for available internal ``docker_server_port`` by looking at the stdout of - all running Docker Containers. + """Check available internal docker_server_port from the stdout of running Docker containers. Parameters ---------- @@ -234,6 +255,7 @@ def find_port_available_for_docker_bind(port: int) -> int: class ServerConfig: """Provides an instance of ServerConfig object to manage the server type used. + The default parameters can be overwritten using the DPF_SERVER_TYPE environment variable. DPF_SERVER_TYPE=INPROCESS, DPF_SERVER_TYPE=GRPC, DPF_SERVER_TYPE=LEGACYGRPC can be used. @@ -283,24 +305,64 @@ def __init__( self.protocol = protocol def __str__(self): + """Return a string representation of the ServerConfig instance. + + This method provides a human-readable string summarizing the server configuration, + including the protocol and whether it's using legacy gRPC. + + Returns + ------- + str + String representation of the ServerConfig instance. + """ text = f"Server configuration: protocol={self.protocol}" if self.legacy: text += f" (legacy gRPC)" return text def __eq__(self, other: "ServerConfig"): + """Check if two ServerConfig instances are equal. + + Compares the current ServerConfig instance with another one to check if they have + the same protocol and legacy status. + + Parameters + ---------- + other : ServerConfig + The other ServerConfig instance to compare with. + + Returns + ------- + bool + True if the instances have the same protocol and legacy status, False otherwise. + """ if isinstance(other, ServerConfig): return self.legacy == other.legacy and self.protocol == other.protocol return False def __ne__(self, other): + """Check if two ServerConfig instances are not equal. + + Compares the current ServerConfig instance with another one to check if they have + different protocol or legacy status. + + Parameters + ---------- + other : ServerConfig + The other ServerConfig instance to compare with. + + Returns + ------- + bool + True if the instances have different protocol or legacy status, False otherwise. + """ return not self.__eq__(other) def get_default_server_config( server_lower_than_or_equal_to_0_3: bool = False, docker_config: DockerConfig = None ): - """Returns the default configuration depending on the server version. + """Return the default configuration depending on the server version. - if ansys.dpf.core.SERVER_CONFIGURATION is not None, then this variable is taken - if server_lower_than_or_equal_to_0_3 is True, then LegacyGrpcServer is taken @@ -348,7 +410,8 @@ def get_default_server_config( def get_default_remote_server_config(): - """Returns the default configuration for gRPC communication. + """Return the default configuration for gRPC communication. + Follows get_default_server_config Raises @@ -362,7 +425,7 @@ def get_default_remote_server_config(): class AvailableServerConfigs: - """Defines available server configurations + """Define available server configurations. Attributes ---------- @@ -393,8 +456,7 @@ class AvailableServerConfigs: class RunningDockerConfig: - """Holds all the configuration options and the process information of a running Docker image - of a DPF server. + """Holds all the configuration options and the process information of a running Docker image of a DPF server. Parameters ---------- @@ -471,6 +533,7 @@ def docker_name(self) -> str: @property def mounted_volumes(self) -> dict: """Dictionary of local path to docker path of volumes mounted in the Docker Image. + These paths are checked for when result files are looked for by the server to prevent from uploading them. @@ -482,7 +545,7 @@ def mounted_volumes(self) -> dict: @property def extra_args(self) -> str: - """Extra arguments used in the ``docker run`` command + """Extra arguments used in the ``docker run`` command. Returns ------- @@ -491,8 +554,7 @@ def extra_args(self) -> str: return self._docker_config.extra_args def replace_with_mounted_volumes(self, path: str) -> str: - """Replace local path found in the list of mounted - volumes by their mounted path in the docker. + """Replace local path found in the list of mounted volumes by their mounted path in the docker. Parameters ---------- @@ -511,7 +573,7 @@ def replace_with_mounted_volumes(self, path: str) -> str: return path def remove_docker_image(self) -> None: - """Stops and Removes the Docker image with its id==server_id""" + """Stop and Removes the Docker image with its id==server_id.""" if not self.use_docker or not self.server_id: return stop_cmd = f"docker stop {self.server_id}" @@ -586,13 +648,41 @@ def listen_to_process( docker_process.kill() def docker_run_cmd_command(self, docker_server_port: int, local_port: int) -> str: + """Return a docker run command using DockerConfig attributes and specified ports. + + Creates the docker run command with the ``DockerConfig`` attributes as well + as the ``docker_server_port`` and ``local_port`` passed in as parameters. + + Parameters + ---------- + docker_server_port : int + Port used inside the Docker Container to run the gRPC server. + local_port : int + Port exposed outside the Docker container bounded to the internal + ``docker_server_port``. + + Returns + ------- + str + """ return self._docker_config.docker_run_cmd_command(docker_server_port, local_port) def __str__(self): + """Return a string representation of the RunningDockerConfig instance. + + This method provides a human-readable string summarizing the docker configuration, and + the server id. + + Returns + ------- + str + String representation of the RunningDockerConfig instance. + """ return str(self._docker_config) + f"\t- server_id: {self.server_id}\n" def create_default_docker_config() -> DockerConfig: + """Return a docker configuration instance.""" return DockerConfig( use_docker="DPF_DOCKER" in os.environ.keys(), docker_name=os.environ.get("DPF_DOCKER", ""), @@ -608,6 +698,7 @@ def get_server_type_from_config( ansys_path: str = None, docker_config: DockerConfig = None, ): + """Return server type determined from the server configuration.""" from ansys.dpf.core.server_types import ( LegacyGrpcServer, GrpcServer, @@ -633,6 +724,7 @@ def get_server_type_from_config( @staticmethod def get_remote_server_type_from_config(config: ServerConfig = None): + """Return remote server type determined from the server configuration.""" if config is None: config = get_default_remote_server_config() return ServerFactory.get_server_type_from_config(config) diff --git a/src/ansys/dpf/core/server_types.py b/src/ansys/dpf/core/server_types.py index 398b7ce3bc..f2a5c0b313 100644 --- a/src/ansys/dpf/core/server_types.py +++ b/src/ansys/dpf/core/server_types.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -Server types +Server types. Contains the different kinds of servers available for the factory. @@ -67,7 +67,7 @@ def _get_dll_path(name, ansys_path=None): - """Helper function to get the right dll path for Linux or Windows""" + """Helper-function to get the right dll path for Linux or Windows.""" ISPOSIX = os.name == "posix" ANSYS_INSTALL = Path(core.misc.get_ansys_path(ansys_path)) api_path = load_api._get_path_in_install() @@ -301,6 +301,7 @@ def read_stderr(): def launch_remote_dpf(version=None): + """Launch a remote dpf server.""" try: import ansys.platform.instancemanagement as pypim except ImportError as e: @@ -351,6 +352,7 @@ def _compare_ansys_grpc_dpf_version(right_grpc_module_version_str: str, grpc_mod def check_ansys_grpc_dpf_version(server, timeout): + """Check DPF grpc server version.""" import grpc from packaging import version @@ -375,13 +377,18 @@ def check_ansys_grpc_dpf_version(server, timeout): class GhostServer: + """Class used to keep in memory the port used by previous servers.""" + ip: str _port: int close_time: float def __init__(self, ip: str, port: int, close_time: float = None): """ - Internal class used to keep in memory the port used by previous servers. + Class used to keep in memory the port used by previous servers. + + To be used internally. + Adds a timeout before reusing ports of shutdown servers. """ self.ip = ip @@ -398,15 +405,16 @@ def port(self) -> int: return self._port def __call__(self, *args, **kwargs): + """Provide for making the instance callable to simply return the instance itself.""" return self class BaseServer(abc.ABC): - """Abstract class for servers""" + """Abstract class for servers.""" @abc.abstractmethod def __init__(self): - """Base class for all types of servers: grpc, in process...""" + """Define the base class for all server types, including grpc, in-process, and others.""" # TODO: Use _server_id to compare servers for equality? self._server_id = None self._session_instance = None @@ -430,25 +438,30 @@ def set_as_global(self, as_global=True): core.SERVER = self def has_client(self): + """Check if server has a connected client.""" return not (self.client is None) @property @abc.abstractmethod def client(self): + """Must be implemented by subclasses.""" pass @property @abc.abstractmethod def version(self): + """Must be implemented by subclasses.""" pass @property @abc.abstractmethod def available_api_types(self): + """Must be implemented by subclasses.""" pass @abc.abstractmethod def get_api_for_type(self, capi, grpcapi): + """Must be implemented by subclasses.""" pass @property @@ -474,8 +487,7 @@ def _del_session(self): @property def session(self): - """Allows to plan events call backs from the server: - progress bar when workflows are running, logging... + """Plan event callbacks from the server, such as progress bars during workflow execution and logging. Returns ------- @@ -498,7 +510,7 @@ def _base_service(self): @property @abc.abstractmethod def os(self): - """Get the operating system of the server + """Get the operating system of the server. Returns ------- @@ -509,10 +521,12 @@ def os(self): @property def on_docker(self): + """Whether the DPF server should be started in a Docker Container by default.""" return self._docker_config.use_docker @property def docker_config(self): + """Return the docker config associated with the server.""" return self._docker_config @docker_config.setter @@ -522,14 +536,16 @@ def docker_config(self, val): @property @abc.abstractmethod def config(self): + """Must be implemented by subclasses.""" pass @abc.abstractmethod def shutdown(self): + """Must be implemented by subclasses.""" pass def release(self): - """Clears the available Operators and Releases licenses when necessary. + """Clear the available Operators and Releases licenses when necessary. Notes ----- @@ -538,7 +554,8 @@ def release(self): self._base_service.release_dpf() def apply_context(self, context): - """Defines the settings that will be used to load DPF's plugins. + """Define the settings that will be used to load DPF's plugins. + A DPF xml file can be used to list the plugins and set up variables. Parameters @@ -556,6 +573,7 @@ def apply_context(self, context): @property def context(self): """Returns the settings used to load DPF's plugins. + To update the context server side, use :func:`ansys.dpf.core.BaseServer.server_types.apply_context` @@ -607,6 +625,7 @@ def meet_version(self, required_version): @property @abc.abstractmethod def local_server(self) -> bool: + """Must be implemented by subclasses.""" pass @local_server.setter @@ -615,17 +634,27 @@ def local_server(self, val): pass def __str__(self): + """Return string representation of the instance.""" return f"DPF Server: {self.info}" @abc.abstractmethod def __eq__(self, other_server): + """Must be implemented by subclasses.""" pass def __ne__(self, other_server): - """Return true, if the servers are not equal""" + """Return true, if the servers are not equal.""" return not self.__eq__(other_server) def __del__(self): + """ + Clean up resources associated with the instance. + + Raises + ------ + Warning + If an exception occurs while attempting to delete resources. + """ try: if hasattr(core, "SERVER") and id(core.SERVER) == id(self): core.SERVER = None @@ -649,7 +678,7 @@ def __del__(self): class CServer(BaseServer, ABC): - """Abstract class for servers going through the DPFClientAPI""" + """Abstract class for servers going through the DPFClientAPI.""" def __init__(self, ansys_path=None, load_operators=True): super().__init__() @@ -659,12 +688,22 @@ def __init__(self, ansys_path=None, load_operators=True): @property def available_api_types(self): + """Return available api type, always c_api.""" return "c_api" def get_api_for_type(self, capi, grpcapi): + """Return api for type.""" return capi def __del__(self): + """ + Clean up resources associated with the instance. + + Raises + ------ + Warning + If an exception occurs while attempting to delete resources. + """ try: self._del_session() if self._own_process: @@ -675,12 +714,15 @@ def __del__(self): class GrpcClient: + """Client using the gRPC communication protocol.""" + def __init__(self): from ansys.dpf.gate import client_capi client_capi.ClientCAPI.init_client_environment(self) def set_address(self, address, server): + """Set client address.""" from ansys.dpf.core import misc, settings if misc.RUNTIME_CLIENT_CONFIG is not None: @@ -691,6 +733,17 @@ def set_address(self, address, server): self._internal_obj = client_capi.ClientCAPI.client_new_full_address(address) def __del__(self): + """ + Clean up resources associated with the instance. + + This method calls the deleter function to release resources. If an exception + occurs during deletion, a warning is issued. + + Raises + ------ + Warning + If an exception occurs while attempting to delete resources. + """ try: self._deleter_func[0](self._deleter_func[1](self)) except: @@ -698,7 +751,7 @@ def __del__(self): class GrpcServer(CServer): - """Server using the gRPC communication protocol""" + """Server using the gRPC communication protocol.""" def __init__( self, @@ -789,6 +842,13 @@ def _check_first_call(self, num_connection_tryouts): @property def version(self): + """Get the version of the server. + + Returns + ------- + version : str + The version of the server in 'major.minor' format. + """ if not self._version: from ansys.dpf.gate import data_processing_capi, integral_types @@ -801,6 +861,7 @@ def version(self): @property def os(self): + """Get the operating system on which the server is running.""" if not self._os: from ansys.dpf.gate import data_processing_capi @@ -819,6 +880,7 @@ def _create_shutdown_funcs(self): self._shutdown_func = (api.data_processing_release_server, self.client) def shutdown(self): + """Shutdown the server instance.""" if self.live: _ = self.info # initializing the info variable (giving access to ip and port): this can be required if start_local_server is called afterwards if self._remote_instance: @@ -838,7 +900,7 @@ def shutdown(self): self.live = False def __eq__(self, other_server): - """Return true, if ***** are equals""" + """Return true, if ***** are equals.""" if isinstance(other_server, GrpcServer): # """Return true, if the ip and the port are equals""" return self.address == other_server.address @@ -846,6 +908,13 @@ def __eq__(self, other_server): @property def client(self): + """Get the client associated with the server. + + Returns + ------- + client : GrpcClient + The GrpcClient instance associated with the server. + """ return self._client @property @@ -881,6 +950,7 @@ def port(self): @property def external_ip(self): """Public IP address of the server. + Is the same as :func:`ansys.dpf.core.GrpcServer.ip` in all cases except for servers using a gateway: for example, servers running in Docker Images might have an internal @@ -897,6 +967,7 @@ def external_ip(self): @property def external_port(self): """Public Port of the server. + Is the same as :func:`ansys.dpf.core.GrpcServer.port` in all cases except for servers using a gateway: for example, servers running in Docker Images might have an internal @@ -912,6 +983,13 @@ def external_port(self): @property def local_server(self): + """Get whether the server is running locally. + + Returns + ------- + local_server : bool + True if the server is running locally, False otherwise. + """ return self._local_server @local_server.setter @@ -920,11 +998,18 @@ def local_server(self, val): @property def config(self): + """Get the server configuration for the gRPC server. + + Returns + ------- + config : AvailableServerConfigs + The server configuration for the gRPC server from the AvailableServerConfigs. + """ return server_factory.AvailableServerConfigs.GrpcServer class InProcessServer(CServer): - """Server using the InProcess communication protocol""" + """Server using the InProcess communication protocol.""" def __init__( self, @@ -973,6 +1058,13 @@ def __init__( @property def version(self): + """Get the version of the InProcess server. + + Returns + ------- + version : str + The version of the InProcess server in the format "major.minor". + """ from ansys.dpf.gate import data_processing_capi, integral_types api = data_processing_capi.DataProcessingCAPI @@ -984,22 +1076,44 @@ def version(self): @property def os(self): + """Get the operating system of the InProcess server. + + Returns + ------- + os : str + The operating system name. For InProcess servers, + it typically returns the current OS, e.g., "posix" or "nt". + """ # Since it is InProcess, one could return the current os return os.name - def shutdown(self): + def shutdown(self): # noqa: D102 pass def __eq__(self, other_server): - """Return true, if the ip and the port are equals""" + """Return true, if the ip and the port are equals.""" return isinstance(other_server, InProcessServer) @property def client(self): + """Get the client for the InProcess server. + + Returns + ------- + client : None + InProcess servers do not have a client, so this property returns None. + """ return None @property def local_server(self): + """Get whether the InProcess server is running locally. + + Returns + ------- + local_server : bool + True, as the InProcess server is always local. + """ return True @local_server.setter @@ -1009,6 +1123,13 @@ def local_server(self, val): @property def config(self): + """Get the server configuration for the InProcess server. + + Returns + ------- + config : AvailableServerConfigs + The server configuration for the InProcess server from the AvailableServerConfigs. + """ return server_factory.AvailableServerConfigs.InProcessServer @@ -1033,10 +1154,11 @@ def get_system_path() -> str: class LegacyGrpcServer(BaseServer): """Provides an instance of the DPF server using InProcess gRPC. + Kept for backward-compatibility with dpf servers <0.5.0. Parameters - ----------- + ---------- ansys_path : str Path for the DPF executable. ip : str @@ -1159,20 +1281,46 @@ def _create_shutdown_funcs(self): @property def client(self): + """Get the client instance for the server. + + This property returns the current instance of the server itself as the client, + providing access to the server's functionalities through the `LegacyGrpcServer` instance. + """ return self @property def available_api_types(self): + """Get the list of available API types for the server. + + This property returns the list of API types that are available through + the current server instance, which are stored in the `_stubs` attribute. + + Returns + ------- + list + A list of available API types (stub objects) for the server. + """ return list(self._stubs.values()) def get_api_for_type(self, capi, grpcapi): + """Get the API for the given type.""" return grpcapi def create_stub_if_necessary(self, stub_name, stub_type): + """Create and store a gRPC stub if it doesn't already exist. + + This method checks if the specified stub (by `stub_name`) exists. If not, it creates + the stub using the given `stub_type` and stores it in the `_stubs` dictionary. + """ if self.channel and not stub_name in self._stubs: self._stubs[stub_name] = stub_type(self.channel) def get_stub(self, stub_name): + """Retrieve the gRPC stub for the given name. + + This method checks if the stub corresponding to `stub_name` exists in the `_stubs` + dictionary and returns it. If the stub does not exist, it returns `None`. + """ if not (stub_name in self._stubs.keys()): return None else: @@ -1201,6 +1349,7 @@ def port(self): @property def external_ip(self): """Public IP address of the server. + Is the same as :func:`ansys.dpf.core.LegacyGrpcServer.ip` in all cases except for servers using a gateway: for example, servers running in Docker Images might have an internal @@ -1217,6 +1366,7 @@ def external_ip(self): @property def external_port(self): """Public Port of the server. + Is the same as :func:`ansys.dpf.core.LegacyGrpcServer.port` in all cases except for servers using a gateway: for example, servers running in Docker Images might have an internal @@ -1242,7 +1392,7 @@ def version(self): @property def os(self): - """Get the operating system of the server + """Get the operating system of the server. Returns ------- @@ -1253,12 +1403,20 @@ def os(self): @property def info(self): + """Return information about the server instance.""" if not self._info_instance: self._info_instance = self._base_service.server_info return self._info_instance @property def local_server(self): + """Get whether the server is running locally. + + Returns + ------- + local_server : bool + True if the server is running locally, False otherwise. + """ return self._local_server @local_server.setter @@ -1266,6 +1424,7 @@ def local_server(self, val): self._local_server = val def shutdown(self): + """Shutdown server instance.""" if self._own_process and self.live: _ = self.info # initializing the info variable (giving access to ip and port): this can be required if start_local_server is called afterwards if self._remote_instance: @@ -1298,15 +1457,30 @@ def shutdown(self): @property def config(self): + """Get the server configuration for the LegacyGrpcServer server. + + Returns + ------- + config : AvailableServerConfigs + The server configuration for the LegacyGrpcServer server from the AvailableServerConfigs. + """ return server_factory.AvailableServerConfigs.LegacyGrpcServer def __eq__(self, other_server): - """Return true, if the ip and the port are equals""" + """Return true, if the ip and the port are equals.""" if isinstance(other_server, LegacyGrpcServer): return self.ip == other_server.ip and self.port == other_server.port return False def __del__(self): + """ + Clean up resources associated with the instance. + + Raises + ------ + Warning + If an exception occurs while attempting to delete resources. + """ try: self._del_session() if self._own_process: diff --git a/src/ansys/dpf/core/session.py b/src/ansys/dpf/core/session.py index fdcfbd5b5e..6cc12c97a5 100644 --- a/src/ansys/dpf/core/session.py +++ b/src/ansys/dpf/core/session.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -Session - -""" +"""Session.""" import abc import ctypes @@ -51,6 +48,22 @@ @capi.GenericCallBackType def progress_call_back(obj, nature, arg): + """ + Tracking callback function for the progress of operators in a workflow. + + This function updates a progress bar based on the operator's status. + + Returns + ------- + None + + Notes + ----- + If `nature` is 0, the number of started operators is incremented. + If `nature` is 1, the number of finished operators is incremented and the progress bar is updated. + If `nature` is 9, the counters for started and finished operators are reset. + If the progress bar exists, its progress is updated accordingly. + """ try: obj = ctypes.cast(obj, ctypes.POINTER(ctypes.py_object)) handler = obj.contents.value @@ -72,16 +85,30 @@ def progress_call_back(obj, nature, arg): class EventHandlerBase: + """ + Abstract base class for handling server events related to workflows. + + Subclasses must implement methods for starting event listening and adding + operators to workflows. + """ + @abc.abstractmethod def add_operator(self, operator, pin, identifier): + """Must be implemented by subclasses.""" pass @abc.abstractmethod def start_listening(self): + """Must be implemented by subclasses.""" pass class EventHandler(EventHandlerBase): + """Handle events for a server session, including operator tracking and workflow progress updates. + + Manages the listening of events and operator addition to workflows during execution. + """ + def __init__(self, session): self._session = weakref.ref(session) self.bar = None @@ -95,6 +122,17 @@ def __init__(self, session): ) def start_listening(self): + """Start listening for events from the server session. + + Displays a progress bar if available and initializes operator tracking. + + This method prepares the progress bar and resets counters for started and + finished operators. + + Returns + ------- + None + """ if not _progress_bar_is_available(): print("Progress bar is not available, please install progressbar2") return @@ -103,6 +141,7 @@ def start_listening(self): self.finished_operators = 0 def add_operator(self, operator, pin, identifier): + """Add an operator to a workflow in the server session.""" from ansys.dpf.core import workflow wf = workflow.Workflow(server=self._session()._server) @@ -113,12 +152,27 @@ def add_operator(self, operator, pin, identifier): class GrpcEventHandler(EventHandlerBase): + """Handle events for a server session using gRPC. + + Manages event listening and operator addition to track workflow progress and logging. + """ + def __init__(self, session): self._session = weakref.ref(session) self.bar = None self._session()._api.add_external_event_handler(self._session(), self, None) def start_listening(self): + """ + Start listening for events from the server session. + + Displays a progress bar if available, and logs workflow status. + + Returns + ------- + threading.Thread + A thread that listens for events in the background. + """ if not _progress_bar_is_available(): print("Progress bar is not available, please install progressbar2") return @@ -130,11 +184,25 @@ def start_listening(self): return thread def add_operator(self, operator, pin, identifier): + """ + Add an operator to the server session. + + Parameters + ---------- + operator : object + The operator to add. + pin : object + The pin to associate with the operator. + identifier : str + A unique identifier for the operator. + """ self._session()._api.add_operator(self._session(), identifier, operator, pin) class Session: - """A class used to create a user session on the server, it allows to plan events + """Create a class to manage server sessions and handle events like progress and logging. + + A class used to create a user session on the server, it allows to plan events call backs from the server: progress bar when workflows are running, logging... A session is started every time a ``'DpfServer'`` is created. @@ -174,8 +242,8 @@ def _server(self): @version_requires("3.0") def add_workflow(self, workflow, identifier): - """Add a workflow to the session. It allows to follow the workflow's - events while it's running. + """Add a workflow to the session. It allows to follow the workflow's events while it's running. + This method is automatically called when a workflow's output is requested. @@ -191,9 +259,9 @@ def add_workflow(self, workflow, identifier): @version_requires("3.0") def add_operator(self, operator, pin, identifier): - """Creates a workflow made of the input operator and all its ancestors - to the session. It allows to follow the workflow's - events while it's running. + """Create a workflow made of the input operator and all its ancestors to the session. + + It allows to follow the workflow's events while it's running. This method is automatically called when an operator's output is requested and the property :func:`ansys.dpf.core.dpf_operator.Operator.progress_bar` is set to ``'True'``. @@ -213,7 +281,8 @@ def add_operator(self, operator, pin, identifier): @version_requires("6.1") def handle_events_with_file_logger(self, file_path, verbosity_level=1): - """Adds an event handler of type ``file_logger`` server side. + """Add an event handler of type ``file_logger`` server side. + Events will then be caught and forwarded to the file stream. Parameters @@ -229,7 +298,8 @@ def handle_events_with_file_logger(self, file_path, verbosity_level=1): @version_requires("6.1") def start_emitting_rpc_log(self): - """Adds a signal emitter to the session. This emitter will catch all incoming rpc calls. + """Add a signal emitter to the session. This emitter will catch all incoming rpc calls. + Adding a handler will enable the logging ( use :func:`Session.handle_events_with_file_logger()`). @@ -240,9 +310,7 @@ def start_emitting_rpc_log(self): @version_requires("3.0") def listen_to_progress(self): - """Starts a progress bar and updates it every time an operator is - finished. - """ + """Start a progress bar and updates it every time an operator is finished.""" if self._handler is not None: return self._handler.start_listening() @@ -254,17 +322,29 @@ def _init_handler(self): @version_requires("3.0") def add_progress_system(self): - """Asks the session to start recording progress events. + """Ask the session to start recording progress events. + Called when the session is started. """ self._init_handler() @version_requires("3.0") def flush_workflows(self): - """This removes the handle on the workflow by the ``session``""" + """Remove the handle on the workflow by the ``session``.""" self._api.flush_workflows(self) def delete(self): + """ + Clean up resources associated with the instance. + + This method calls the deleter function to release resources. If an exception + occurs during deletion, a warning is issued. + + Raises + ------ + Warning + If an exception occurs while attempting to delete resources. + """ try: if not self._released: self._deleter_func[0](self._deleter_func[1](self)) @@ -273,4 +353,5 @@ def delete(self): self._released = True def __del__(self): + """Clean up resources associated with the instance.""" self.delete() diff --git a/src/ansys/dpf/core/settings.py b/src/ansys/dpf/core/settings.py index f5b61c1f1c..6e6053f0cf 100644 --- a/src/ansys/dpf/core/settings.py +++ b/src/ansys/dpf/core/settings.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -settings +settings. Customize the behavior of the module. """ @@ -39,7 +39,7 @@ def disable_off_screen_rendering() -> None: - """No pop up windows appears to plot data with ``matplotlib`` or ``pyvista``""" + """No pop up windows appears to plot data with ``matplotlib`` or ``pyvista``.""" # enable matplotlib off_screen plotting to avoid test interruption if module_exists("matplotlib"): import matplotlib as mpl @@ -54,6 +54,7 @@ def disable_off_screen_rendering() -> None: def set_default_pyvista_config(): + """Set default pyvista configuration.""" # Configure PyVista's ``rcParams`` for dpf if module_exists("pyvista"): import pyvista as pv @@ -65,6 +66,7 @@ def set_default_pyvista_config(): def bypass_pv_opengl_osmesa_crash(): + """Bypass pyvista opengl osmesa crash.""" if module_exists("pyvista"): import pyvista as pv @@ -72,7 +74,9 @@ def bypass_pv_opengl_osmesa_crash(): def disable_interpreter_properties_evaluation() -> bool: - """If ``jedi`` module is installed (autocompletion module for most of IDEs), disables the + """Disable property evaluation on tab key press if the jedi module is installed. + + If ``jedi`` module is installed (autocompletion module for most of IDEs), disables the property evaluation when tab key is pressed. To use in Jupyter Notebook if autocompletion becomes slow. @@ -91,11 +95,14 @@ def disable_interpreter_properties_evaluation() -> bool: def set_upload_chunk_size(num_bytes=misc.DEFAULT_FILE_CHUNK_SIZE) -> None: + """Set upload chunk size.""" misc.DEFAULT_FILE_CHUNK_SIZE = num_bytes def set_dynamic_available_results_capability(value) -> None: - """Disables the evaluation of the available results and + """Disable evaluation and dynamic creation of result properties when creating a "Model. + + Disables the evaluation of the available results and the dynamic creation of the results properties when a ''Model'' is created. Parameters @@ -105,7 +112,6 @@ def set_dynamic_available_results_capability(value) -> None: Examples -------- - >>> from ansys.dpf import core as dpf >>> dpf.settings.set_dynamic_available_results_capability(False) >>> dpf.settings.set_dynamic_available_results_capability(True) @@ -126,8 +132,7 @@ def _forward_to_gate(): def get_runtime_client_config(server=None): - """Get the runtime configuration information of Ans.Dpf.GrpcClient - binary. + """Get the runtime configuration information of Ans.Dpf.GrpcClient binary. Parameters ---------- @@ -170,8 +175,7 @@ def get_runtime_client_config(server=None): def get_runtime_core_config(server=None): - """Get the runtime configuration information of Ans.Dpf.GrpcClient - binary. + """Get the runtime configuration information of Ans.Dpf.GrpcClient binary. Parameters ---------- diff --git a/src/ansys/dpf/core/streams_container.py b/src/ansys/dpf/core/streams_container.py index 6c28971107..f54345319c 100644 --- a/src/ansys/dpf/core/streams_container.py +++ b/src/ansys/dpf/core/streams_container.py @@ -22,7 +22,7 @@ # -*- coding: utf-8 -*- """ -StreamsContainer +StreamsContainer. Contains classes associated with the DPF StreamsContainer. """ @@ -105,6 +105,7 @@ def _server(self, value): @property def datasources(self): + """Return the data sources.""" return data_sources.DataSources(data_sources=self._api.streams_get_data_sources(self)) def release_handles(self): diff --git a/src/ansys/dpf/core/string_field.py b/src/ansys/dpf/core/string_field.py index 7450fa1715..85bf23d8ad 100644 --- a/src/ansys/dpf/core/string_field.py +++ b/src/ansys/dpf/core/string_field.py @@ -20,10 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -StringField - -""" +"""StringField.""" import numpy as np from ansys.dpf.core.common import natures, locations, _get_size_of_list @@ -180,14 +177,17 @@ def location(self, value): @property def component_count(self): + """Return the number of component, always 1.""" return 1 @property def elementary_data_count(self): + """Return elementary data count.""" return self._api.csstring_field_get_data_size(self) @property def size(self): + """Return elementary data size.""" return self._api.csstring_field_get_data_size(self) def _set_scoping(self, scoping): @@ -199,6 +199,7 @@ def _get_scoping(self): ) def get_entity_data(self, index): + """Return entity data.""" try: vec = dpf_vector.DPFVectorString(client=self._server.client) self._api.csstring_field_get_entity_data_for_dpf_vector( @@ -210,6 +211,7 @@ def get_entity_data(self, index): return data def get_entity_data_by_id(self, id): + """Return entity data corresponding to the provided id.""" try: vec = dpf_vector.DPFVectorString(client=self._server.client) self._api.csstring_field_get_entity_data_by_id_for_dpf_vector( @@ -224,6 +226,11 @@ def get_entity_data_by_id(self, id): return data def append(self, data: List[str], scopingid: int): + """ + Append data to the string field. + + This method appends data to the string field for a specific scoping ID. + """ string_list = integral_types.MutableListString(data) self._api.csstring_field_push_back(self, scopingid, _get_size_of_list(data), string_list) diff --git a/src/ansys/dpf/core/support.py b/src/ansys/dpf/core/support.py index 180238d28e..ebe2fdb12d 100644 --- a/src/ansys/dpf/core/support.py +++ b/src/ansys/dpf/core/support.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_support: - -Support - -""" +"""Support.""" import traceback import warnings @@ -37,8 +32,7 @@ class Support: - """Base class for support (supporting Field's location, Scoping's location, - Collection's labels...) + """Base class for support (supporting Field's location, Scoping's location, Collection's labels...). Field, PropertyField and StringField support can be accessed generically via this base class. @@ -88,7 +82,7 @@ def __init__(self, support, server=None): @version_requires("5.0") def field_support_by_property(self, property_name: str): - """Returns a Field supporting (describing) a given property. + """Return a Field supporting (describing) a given property. Returns ------- @@ -106,7 +100,7 @@ def field_support_by_property(self, property_name: str): @version_requires("5.0") def prop_field_support_by_property(self, property_name: str): - """Returns a PropertyField supporting (describing) a given property. + """Return a PropertyField supporting (describing) a given property. Returns ------- @@ -124,7 +118,7 @@ def prop_field_support_by_property(self, property_name: str): @version_requires("5.0") def string_field_support_by_property(self, property_name: str): - """Returns a StringField supporting (describing) a given property. + """Return a StringField supporting (describing) a given property. Returns ------- @@ -142,7 +136,7 @@ def string_field_support_by_property(self, property_name: str): @version_requires("5.0") def available_field_supported_properties(self): - """Returns the list of property names supported by a Field. + """Return the list of property names supported by a Field. Returns ------- @@ -160,7 +154,7 @@ def available_field_supported_properties(self): @version_requires("5.0") def available_prop_field_supported_properties(self): - """Returns the list of property names supported by a PropertyField. + """Return the list of property names supported by a PropertyField. Returns ------- @@ -180,7 +174,7 @@ def available_prop_field_supported_properties(self): @version_requires("5.0") def available_string_field_supported_properties(self): - """Returns the list of property names supported by a StringField. + """Return the list of property names supported by a StringField. Returns ------- @@ -199,6 +193,17 @@ def available_string_field_supported_properties(self): return coll_obj.get_integral_entries() def __del__(self): + """ + Clean up resources associated with the instance. + + This method calls the deleter function to release resources. If an exception + occurs during deletion, a warning is issued. + + Raises + ------ + Warning + If an exception occurs while attempting to delete resources. + """ try: self._deleter_func[0](self._deleter_func[1](self)) except: diff --git a/src/ansys/dpf/core/time_freq_scoping_factory.py b/src/ansys/dpf/core/time_freq_scoping_factory.py index 985b10c67b..30476ec066 100644 --- a/src/ansys/dpf/core/time_freq_scoping_factory.py +++ b/src/ansys/dpf/core/time_freq_scoping_factory.py @@ -21,8 +21,7 @@ # SOFTWARE. """ -time_freq_scoping_factory - +time_freq_scoping_factory. Contains functions to simplify creating time frequency scopings. """ @@ -188,12 +187,15 @@ def scoping_by_step_and_substep_from_model(load_step_id, subset_id, model, serve Returns ------- scoping : Scoping - Scoping based on a given step/substep of a model's time_freq_support.""" + Scoping based on a given step/substep of a model's time_freq_support. + """ return scoping_by_step_and_substep(load_step_id, subset_id, model.metadata.time_freq_support) def scoping_on_all_time_freqs(obj: Union[TimeFreqSupport, Model, DataSources]): - """Create a specific :class:`ansys.dpf.core.Scoping` with all time or + """Create a Scoping with all time or frequency sets. + + Create a specific :class:`ansys.dpf.core.Scoping` with all time or frequency sets of a :class:`ansys.dpf.core.TimeFreqSupport` or a class:`ansys.dpf.core.Model` Parameters diff --git a/src/ansys/dpf/core/time_freq_support.py b/src/ansys/dpf/core/time_freq_support.py index fe0afff160..49c3485419 100644 --- a/src/ansys/dpf/core/time_freq_support.py +++ b/src/ansys/dpf/core/time_freq_support.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_timefreqsupport: - -TimeFreqSupport - -""" +"""TimeFreqSupport.""" from ansys.dpf.gate import time_freq_support_capi, time_freq_support_grpcapi @@ -102,6 +97,7 @@ def __str__(self): @property def time_frequencies(self): """Field of time frequencies or time values for the active result. + Frequencies field can have one value by set. Examples @@ -127,6 +123,7 @@ def time_frequencies(self): def _set_time_frequencies(self, frequencies): """Set the time frequencies of the time_freq_support. + Frequencies field can have one value by set. Parameters @@ -139,6 +136,7 @@ def _set_time_frequencies(self, frequencies): @time_frequencies.setter def time_frequencies(self, value): """Time frequencies that define the time_freq_support of the analysis. + Frequencies field can have one value by set. Parameters @@ -151,6 +149,7 @@ def time_frequencies(self, value): @property def complex_frequencies(self): """Field of complex frequencies for the active result. + Complex frequencies field can have one value by set. Examples @@ -167,6 +166,7 @@ def complex_frequencies(self): def _set_complex_frequencies(self, complex_frequencies): """Set the frequencies of the time_freq_support. + Complex frequencies field can have one value by set. Parameters @@ -179,6 +179,7 @@ def _set_complex_frequencies(self, complex_frequencies): @complex_frequencies.setter def complex_frequencies(self, value): """Complex frequencies that define the time_freq_support of the analysis. + Complex frequencies field can have one value by set. Parameters @@ -191,6 +192,7 @@ def complex_frequencies(self, value): @property def rpms(self): """Field of RPMs for the active result. + The RPM field has one value by load step. Returns ``None`` if the result has no RPMs. @@ -199,6 +201,7 @@ def rpms(self): def _set_rpms(self, rpms): """Set the RPMs values of the time_freq_support. + RPMs field has one value by load step. Parameters @@ -211,6 +214,7 @@ def _set_rpms(self, rpms): @rpms.setter def rpms(self, value): """RPMs that define the time_freq_support of the analysis. + RPMs field has one value by load step. Parameters @@ -266,8 +270,7 @@ def n_sets(self): return self._sets_count() def get_frequency(self, step=0, substep=0, cumulative_index=None, cplx=False): - """Retrieve the frequency corresponding to a requested step/substep or - cumulative index. + """Retrieve the frequency corresponding to a requested step/substep or cumulative index. Parameters ---------- @@ -288,9 +291,7 @@ def get_frequency(self, step=0, substep=0, cumulative_index=None, cplx=False): return self._get_frequency(step, substep, cumulative_index, cplx) def _get_frequency(self, step, substep, cumulative_index, cplx): - """Retrieves the frequency corresponding to the requested step/substep or - cumulative index. - """ + """Retrieve the frequency corresponding to the requested step/substep or cumulative index.""" if cumulative_index is None: # Use by_step methods if cplx: @@ -313,8 +314,7 @@ def _get_frequency(self, step, substep, cumulative_index, cplx): ) def get_cumulative_index(self, step=0, substep=0, freq=None, cplx=False): - """Retrieves the cumulative index corresponding to the requested step/substep - or frequency. + """Retrieve the cumulative index corresponding to the requested step/substep or frequency. Parameters ---------- @@ -336,8 +336,7 @@ def get_cumulative_index(self, step=0, substep=0, freq=None, cplx=False): return self._get_cumulative_index(step, substep, freq, cplx) def _get_cumulative_index(self, step, substep, freq, cplx): - """Retrieve the cumulative index corresponding to the requested step/substep - or frequency.""" + """Retrieve the cumulative index corresponding to the requested step/substep or frequency.""" if freq is None: if cplx is False: return self._api.time_freq_support_get_time_freq_cummulative_index_by_step( @@ -360,7 +359,8 @@ def _get_cumulative_index(self, step, substep, freq, cplx): ) def _sets_count(self): - """ + """Return the number of time frequency support sets. + Returns ------- count : int @@ -368,8 +368,7 @@ def _sets_count(self): return self._api.time_freq_support_get_number_sets(self) def _get_frequencies(self, cplx=False): - """Retrieves a field of all the frequencies in the model - (complex or real). + """Retrieve a field of all the frequencies in the model (complex or real). Parameters ---------- @@ -381,7 +380,6 @@ def _get_frequencies(self, cplx=False): field : dpf.core.Field Field of all the frequencies in the model (complex or real). """ - # attributes_list = self._get_attributes_list() if cplx: # and "freq_complex" in attributes_list: # return attributes_list["freq_complex"] @@ -394,7 +392,7 @@ def _get_frequencies(self, cplx=False): return dpf.core.Field(server=self._server, field=freq) def _get_rpms(self): - """Retrieves a field of all the RPMs in the model. + """Retrieve a field of all the RPMs in the model. Returns ------- @@ -406,7 +404,7 @@ def _get_rpms(self): return dpf.core.Field(server=self._server, field=rpm) def _get_harmonic_indices(self, stage_num=0): - """Retrieves a field of all the harmonic indices in the model. + """Retrieve a field of all the harmonic indices in the model. Returns ------- @@ -429,6 +427,7 @@ def append_step( step_harmonic_indices=None, ): """Append a step with all its field values in the time frequencies support. + The RPM value is a step (or load step)-based value. The values for time frequencies, complex frequencies, and harmonic indices are set-based. There is one set value for each step/substep combination. @@ -471,7 +470,6 @@ def append_step( >>> tfq3.append_step(1, [0.1, 0.21, 1.0], rpm_value = 2.0, step_harmonic_indices = {1: [1.0, 2.0, 3.0], 2: [1.0, 2.0, 2.5]}) """ # noqa: E501 - time_frequencies = self.time_frequencies if time_frequencies is None: time_frequencies = core.Field( diff --git a/src/ansys/dpf/core/unit_system.py b/src/ansys/dpf/core/unit_system.py index 369b65bc3d..92a1ee60af 100644 --- a/src/ansys/dpf/core/unit_system.py +++ b/src/ansys/dpf/core/unit_system.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_unit_system: - -UnitSystem - -""" +"""UnitSystem.""" from ansys.dpf import core as dpf from ansys.dpf.core import errors as dpf_errors @@ -42,7 +37,7 @@ class UnitSystem: def __init__(self, name, ID=None, unit_names=None): """ - Creates a new UnitSystem from its name and its Ansys ID + Create a new UnitSystem from its name and its Ansys ID. Parameters ---------- @@ -102,14 +97,17 @@ def __init__(self, name, ID=None, unit_names=None): @property def ID(self) -> int: + """Return ID of the unit system.""" return self._ID @property def name(self) -> str: + """Return the name of the unit system.""" return self._name @property def unit_names(self) -> str: + """Return unit names making up the unit system.""" if self._unit_names == "": # Ansys UnitSystem unit_system_strings = dpf.Operator("unit_system_strings") unit_system_strings.connect(0, self._ID) @@ -126,7 +124,7 @@ class unit_systems: Class available with server's version starting at 6.1 (Ansys 2023R2). Attributes - ----------- + ---------- solver_mks : Metric (m, kg, N, s, J, Pa, degC, C, rad) solver_cgs : Metric (cm, g, dyne, s, erg, dyne*cm^-2, degC, C, rad) diff --git a/src/ansys/dpf/core/vtk_helper.py b/src/ansys/dpf/core/vtk_helper.py index d0a0db37d9..54beeb0613 100644 --- a/src/ansys/dpf/core/vtk_helper.py +++ b/src/ansys/dpf/core/vtk_helper.py @@ -20,6 +20,8 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Provides for vtk helper functions.""" + import numpy as np import pyvista as pv from typing import Union @@ -150,8 +152,7 @@ def __init__( def dpf_mesh_to_vtk_op(mesh, nodes=None, as_linear=True): - """Return a pyvista unstructured grid given DPF node and element - definitions from operators (server > 6.2) + """Return a pyvista unstructured grid given DPF node and element definitions from operators (server > 6.2). Parameters ---------- @@ -191,8 +192,7 @@ def dpf_mesh_to_vtk_op(mesh, nodes=None, as_linear=True): def dpf_mesh_to_vtk_py(mesh, nodes, as_linear): - """Return a pyvista unstructured grid given DPF node and element - definitions in pure Python (server <= 6.2) + """Return a pyvista unstructured grid given DPF node and element definitions in pure Python (server <= 6.2). Parameters ---------- @@ -266,7 +266,7 @@ def dpf_mesh_to_vtk_py(mesh, nodes, as_linear): cells = np.insert(cells, ind, polyhedron) def compute_offset(): - """Return the starting point of a cell in the cells array""" + """Return the starting point of a cell in the cells array.""" return insert_ind + np.arange(insert_ind.size) cells_insert_ind = compute_offset() @@ -391,6 +391,7 @@ def dpf_mesh_to_vtk( def vtk_update_coordinates(vtk_grid, coordinates_array): + """Update coordinates in vtk.""" from copy import copy vtk_grid.points = copy(coordinates_array) diff --git a/src/ansys/dpf/core/workflow.py b/src/ansys/dpf/core/workflow.py index 37deda3b89..9ba05e9b89 100644 --- a/src/ansys/dpf/core/workflow.py +++ b/src/ansys/dpf/core/workflow.py @@ -20,12 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -""" -.. _ref_workflow_apis: - -Workflow - -""" +"""Workflow.""" import logging import os @@ -132,8 +127,7 @@ def _api(self) -> workflow_abstract_api.WorkflowAbstractAPI: @property @version_requires("3.0") def progress_bar(self) -> bool: - """With this property, the user can choose to print a progress bar when - the workflow's output is requested, default is True""" + """Enable or disable progress bar display when requesting workflow output (default: True).""" return self._progress_bar @progress_bar.setter @@ -445,6 +439,7 @@ def _type_to_output_method(self): def get_output(self, pin_name, output_type): """Retrieve the output of the operator on the pin number. + A progress bar following the workflow state is printed. Parameters @@ -634,7 +629,7 @@ def record(self, identifier="", transfer_ownership=True): @staticmethod def get_recorded_workflow(id, server=None): - """Retrieve a workflow registered (with workflow.record()) + """Retrieve a workflow registered (with workflow.record()). Parameters ---------- @@ -642,7 +637,7 @@ def get_recorded_workflow(id, server=None): ID given by the method "record". Returns - ---------- + ------- workflow : core.Workflow() workflow registered in dpf's registry (server side) @@ -672,7 +667,7 @@ def info(self): """Dictionary with the operator names and the exposed input and output names. Returns - ---------- + ------- info : dictionarry str->list str Dictionary with ``"operator_names"``, ``"input_names"``, and ``"output_names"`` key. """ @@ -687,7 +682,7 @@ def operator_names(self): """List of the names of operators added in the workflow. Returns - ---------- + ------- names : list str """ num = self._api.work_flow_number_of_operators(self) @@ -701,7 +696,7 @@ def input_names(self): """List of the input names exposed in the workflow with set_input_name. Returns - ---------- + ------- names : list str """ num = self._api.work_flow_number_of_input(self) @@ -715,7 +710,7 @@ def output_names(self): """List of the output names exposed in the workflow with set_output_name. Returns - ---------- + ------- names : list str """ num = self._api.work_flow_number_of_output(self) @@ -827,8 +822,9 @@ def connect_with(self, left_workflow, output_input_names=None): @version_requires("3.0") def create_on_other_server(self, *args, **kwargs): - """Create a new instance of a workflow on another server. The new - Workflow has the same operators, exposed inputs and output pins as + """Create a new instance of a workflow on another server. + + The new Workflow has the same operators, exposed inputs and output pins as this workflow. Connections between operators and between data and operators are kept (except for exposed pins). @@ -961,7 +957,7 @@ def view( return image_path def to_graphviz(self, path: Union[os.PathLike, str]): - """Saves the workflow to a GraphViz file.""" + """Save the workflow to a GraphViz file.""" return self._api.work_flow_export_graphviz(self, str(path)) @version_requires("10.0") @@ -985,6 +981,17 @@ def get_topology(self): return workflow_topology def __del__(self): + """ + Clean up resources associated with the instance. + + This method calls the deleter function to release resources. If an exception + occurs during deletion, a warning is issued. + + Raises + ------ + Warning + If an exception occurs while attempting to delete resources. + """ try: if hasattr(self, "_internal_obj"): if self._internal_obj is not None and self._internal_obj != "None": diff --git a/src/ansys/dpf/core/workflow_topology/__init__.py b/src/ansys/dpf/core/workflow_topology/__init__.py index 1b670cd721..1d580e3953 100644 --- a/src/ansys/dpf/core/workflow_topology/__init__.py +++ b/src/ansys/dpf/core/workflow_topology/__init__.py @@ -19,6 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Workflow topology module.""" from .workflow_topology import WorkflowTopology from .operator_connection import OperatorConnection diff --git a/src/ansys/dpf/core/workflow_topology/data_connection.py b/src/ansys/dpf/core/workflow_topology/data_connection.py index e9e4ef981d..31a7dbd1b3 100644 --- a/src/ansys/dpf/core/workflow_topology/data_connection.py +++ b/src/ansys/dpf/core/workflow_topology/data_connection.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -DataConnection +DataConnection. This module contains the `DataConnection` and `DataConnectionsCollection` classes, which represent individual connections between data and operator, diff --git a/src/ansys/dpf/core/workflow_topology/exposed_pin.py b/src/ansys/dpf/core/workflow_topology/exposed_pin.py index 80315abc1c..ddcdf17739 100644 --- a/src/ansys/dpf/core/workflow_topology/exposed_pin.py +++ b/src/ansys/dpf/core/workflow_topology/exposed_pin.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -ExposedPin +ExposedPin. This module contains the `ExposedPin` and `ExposedPinsCollection` classes, which represent individual exposed pins and a collection of exposed pins in a workflow, diff --git a/src/ansys/dpf/core/workflow_topology/operator_connection.py b/src/ansys/dpf/core/workflow_topology/operator_connection.py index 5cbcfaba55..bd8b84fb86 100644 --- a/src/ansys/dpf/core/workflow_topology/operator_connection.py +++ b/src/ansys/dpf/core/workflow_topology/operator_connection.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -OperatorConnection +OperatorConnection. This module contains the `OperatorConnection` and `OperatorConnectionsCollection` classes, which represent individual connections between operators and a diff --git a/src/ansys/dpf/core/workflow_topology/workflow_topology.py b/src/ansys/dpf/core/workflow_topology/workflow_topology.py index 39cd20a340..75b503a5d7 100644 --- a/src/ansys/dpf/core/workflow_topology/workflow_topology.py +++ b/src/ansys/dpf/core/workflow_topology/workflow_topology.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -WorkflowTopology +WorkflowTopology. This module contains the `WorkflowTopology` class, which represents the structure and relationships within a workflow, including its operators, @@ -38,9 +38,7 @@ class WorkflowTopology(CustomContainerBase): - """ - Represents the topology of a workflow, including its operators, connections, and exposed input/output pins. - """ + """Represent the workflow topology, including operators, connections, and input/output pins.""" def __init__(self, container: GenericDataContainer) -> None: """