Skip to content

Commit

Permalink
Allow GenericDataContainer as input/output of a CustomOperator and fi…
Browse files Browse the repository at this point in the history
…x dpf-site.zip update (#1376)

* Make available_operator_names available at root level

Signed-off-by: paul.profizi <[email protected]>

* Add typehint to get_or_create_server

Signed-off-by: paul.profizi <[email protected]>

* Fix get_api_for_type argument naming

Signed-off-by: paul.profizi <[email protected]>

* Allow GenericDataContainer as output of custom_operator

Signed-off-by: paul.profizi <[email protected]>

* Add test and add GDC as input. Waiting for dpf-site.zip update feature

Signed-off-by: paul.profizi <[email protected]>

* Change update of dpf-site.zip to only change ansys-dpf-core (WIP)

Signed-off-by: paul.profizi <[email protected]>

* Update of dpf-site.zip works for non-editable install of ansys-dpf-core

Signed-off-by: paul.profizi <[email protected]>

* Use update of dpf-site.zip in test_python_plugins.py

Signed-off-by: paul.profizi <[email protected]>

* Fix type_to_input method for generic_data_container in _custom_operators_helpers.py

Signed-off-by: paul.profizi <[email protected]>

* Working dpf-site.zip update when ansys-dpf-core installed non-editable

Signed-off-by: paul.profizi <[email protected]>

* Working update of dpf-site.zip for both editable and non-editable installs of ansys-dpf-core

Signed-off-by: paul.profizi <[email protected]>

* Skip test for DPF<7.0

Signed-off-by: paul.profizi <[email protected]>

* Add test_set_get_int_generic_data_container

Signed-off-by: paul.profizi <[email protected]>

* Revert "Add test_set_get_int_generic_data_container"

This reverts commit 8ca296c.

* Change operator.get_output to return None when response from server is None (instead of instantiating an empty DPF object)

Signed-off-by: paul.profizi <[email protected]>

* Skip LegacyGrpc in tests on GenericDataContainer as output of an Operator until server fix.

Signed-off-by: paul.profizi <[email protected]>

* Remove duplicate

Signed-off-by: paul.profizi <[email protected]>

---------

Signed-off-by: paul.profizi <[email protected]>
  • Loading branch information
PProfizi authored Jan 29, 2024
1 parent 2c95832 commit 6b73f99
Show file tree
Hide file tree
Showing 10 changed files with 144 additions and 60 deletions.
1 change: 1 addition & 0 deletions src/ansys/dpf/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@
from ansys.dpf.core.mesh_info import MeshInfo
from ansys.dpf.core.generic_data_container import GenericDataContainer

from ansys.dpf.core.dpf_operator import available_operator_names

# for matplotlib
# solves "QApplication: invalid style override passed, ignoring it."
Expand Down
11 changes: 10 additions & 1 deletion src/ansys/dpf/core/_custom_operators_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
meshes_container,
result_info,
string_field,
custom_type_field,
custom_type_field, generic_data_container,
)

external_operator_api = external_operator_capi.ExternalOperatorCAPI
Expand Down Expand Up @@ -70,6 +70,10 @@ def __operator_main__(operator_functor, data):
custom_type_field.CustomTypeField,
external_operator_api.external_operator_put_out_custom_type_field,
),
(
generic_data_container.GenericDataContainer,
external_operator_api.external_operator_put_out_generic_data_container,
)
]

_type_to_input_method = [
Expand Down Expand Up @@ -140,6 +144,11 @@ def __operator_main__(operator_functor, data):
external_operator_api.external_operator_get_in_data_tree,
"data_tree",
),
(
generic_data_container.GenericDataContainer,
external_operator_api.external_operator_get_in_generic_data_container,
"generic_data_container",
)
# TO DO : (dpf_operator.Operator, external_operator_api.external_operator_get_in_operator,
# "operator"),
]
75 changes: 63 additions & 12 deletions src/ansys/dpf/core/custom_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,9 @@
import ctypes
import os
import pathlib
import re
import shutil
import tempfile
import warnings
import zipfile

Expand All @@ -37,7 +39,9 @@
from ansys.dpf.gate import object_handler, capi, dpf_vector, integral_types


def update_virtual_environment_for_custom_operators(restore_original: bool = False):
def update_virtual_environment_for_custom_operators(
restore_original: bool = False,
):
"""Updates the dpf-site.zip file used to start a venv for Python custom operators to run in.
It updates the site-packages in dpf-site.zip with the site-packages of the current venv.
Expand All @@ -54,6 +58,7 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal
"""
# Get the path to the dpf-site.zip in the current DPF server
server = dpf.server.get_or_create_server(dpf.SERVER)
print(server.ansys_path)
if server.has_client():
raise NotImplementedError(
"Updating the dpf-site.zip of a DPF Server is only available when InProcess."
Expand All @@ -71,6 +76,10 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal
else:
warnings.warn("No original dpf-site.zip found. Current is most likely the original.")
else:
# Store original dpf-site.zip for this DPF Server if no original is stored
if not os.path.exists(os.path.dirname(original_dpf_site_zip_path)):
os.mkdir(os.path.dirname(original_dpf_site_zip_path))
shutil.move(src=current_dpf_site_zip_path, dst=original_dpf_site_zip_path)
# Get the current paths to site_packages
import site
paths_to_current_site_packages = site.getsitepackages()
Expand All @@ -83,17 +92,59 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal
if current_site_packages_path is None:
warnings.warn("Could not find a currently loaded site-packages folder to update from.")
return
# Store original dpf-site.zip for this DPF Server if no original is stored
if not os.path.exists(os.path.dirname(original_dpf_site_zip_path)):
os.mkdir(os.path.dirname(original_dpf_site_zip_path))
shutil.move(src=current_dpf_site_zip_path, dst=original_dpf_site_zip_path)
# Zip the current site-packages at the destination
with zipfile.ZipFile(current_dpf_site_zip_path, mode="w") as archive:
for file_path in current_site_packages_path.rglob("*"):
archive.write(
filename=file_path,
arcname=file_path.relative_to(current_site_packages_path)
)
# If an ansys.dpf.core.path file exists, then the installation is editable
path_file = os.path.join(current_site_packages_path, "ansys.dpf.core.pth")
if os.path.exists(path_file):
# Treat editable installation of ansys-dpf-core
with open(path_file, "r") as f:
current_site_packages_path = f.readline()
with tempfile.TemporaryDirectory() as tmpdir:
os.mkdir(os.path.join(tmpdir, "ansys_dpf_core"))
ansys_dir = os.path.join(tmpdir, "ansys_dpf_core")
os.mkdir(os.path.join(ansys_dir, "ansys"))
os.mkdir(os.path.join(ansys_dir, "ansys", "dpf"))
os.mkdir(os.path.join(ansys_dir, "ansys", "grpc"))
shutil.copytree(
src=os.path.join(current_site_packages_path, "ansys", "dpf", "core"),
dst=os.path.join(ansys_dir, "ansys", "dpf", "core"),
ignore=lambda directory, contents: ["__pycache__", "result_files"],
)
shutil.copytree(
src=os.path.join(current_site_packages_path, "ansys", "dpf", "gate"),
dst=os.path.join(ansys_dir, "ansys", "dpf", "gate"),
ignore=lambda directory, contents: ["__pycache__"],
)
shutil.copytree(
src=os.path.join(current_site_packages_path, "ansys", "grpc", "dpf"),
dst=os.path.join(ansys_dir, "ansys", "grpc", "dpf"),
ignore=lambda directory, contents: ["__pycache__"],
)
# Find the .dist_info folder
pattern = re.compile(r'^ansys_dpf_core\S*')
for p in pathlib.Path(current_site_packages_path).iterdir():
if p.is_dir():
# print(p.stem)
if re.search(pattern, p.stem):
dist_info_path = p
break
shutil.copytree(
src=dist_info_path,
dst=os.path.join(ansys_dir, dist_info_path.name),
)
# Zip the files as dpf-site.zip
base_name = os.path.join(tmpdir, "ansys_dpf_core_zip")
base_dir = "."
root_dir = os.path.join(tmpdir, "ansys_dpf_core") # OK
shutil.make_archive(base_name=base_name, root_dir=root_dir, base_dir=base_dir, format='zip')
# Include files of interest from the original dpf-site.zip and the ansys_dpf_core.zip
with zipfile.ZipFile(current_dpf_site_zip_path, "w") as archive:
with zipfile.ZipFile(original_dpf_site_zip_path, mode="r") as original:
for item in original.infolist():
if "ansys" not in item.filename:
archive.writestr(item, original.read(item))
with zipfile.ZipFile(base_name+'.zip', mode="r") as original:
for item in original.infolist():
archive.writestr(item, original.read(item))


def record_operator(operator_type, *args) -> None:
Expand Down
17 changes: 13 additions & 4 deletions src/ansys/dpf/core/dpf_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,17 +529,26 @@ def get_output(self, pin=0, output_type=None):
for type_tuple in self._type_to_output_method:
if output_type is type_tuple[0]:
if len(type_tuple) >= 3:
internal_obj = type_tuple[1](self, pin)
if internal_obj is None:
self._progress_thread = None
return
if isinstance(type_tuple[2], str):
parameters = {type_tuple[2]: type_tuple[1](self, pin)}
parameters = {type_tuple[2]: internal_obj}
out = output_type(**parameters, server=self._server)
else:
out = type_tuple[2](type_tuple[1](self, pin))
out = type_tuple[2](internal_obj)
if out is None:
internal_obj = type_tuple[1](self, pin)
if internal_obj is None:
self._progress_thread = None
return
try:
return output_type(type_tuple[1](self, pin), server=self._server)
return output_type(internal_obj, server=self._server)
except TypeError:
self._progress_thread = None
return output_type(type_tuple[1](self, pin))
return output_type(internal_obj)

if out is not None:
self._progress_thread = None
return out
Expand Down
5 changes: 3 additions & 2 deletions src/ansys/dpf/core/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import inspect
import warnings
import traceback
from typing import Union

from ansys import dpf

Expand All @@ -22,7 +23,7 @@
ServerFactory,
CommunicationProtocols,
)
from ansys.dpf.core.server_types import DPF_DEFAULT_PORT, LOCALHOST, RUNNING_DOCKER
from ansys.dpf.core.server_types import DPF_DEFAULT_PORT, LOCALHOST, RUNNING_DOCKER, BaseServer
from ansys.dpf.core import server_context


Expand Down Expand Up @@ -382,7 +383,7 @@ def connect():
raise e


def get_or_create_server(server):
def get_or_create_server(server: BaseServer) -> Union[BaseServer, None]:
"""Returns the given server or if None, creates a new one.
Parameters
Expand Down
2 changes: 1 addition & 1 deletion src/ansys/dpf/core/server_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,7 @@ def available_api_types(self):
pass

@abc.abstractmethod
def get_api_for_type(self, c_api, grpc_api):
def get_api_for_type(self, capi, grpcapi):
pass

@property
Expand Down
58 changes: 18 additions & 40 deletions tests/test_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ def test_connect_get_out_all_types_operator(server_type):
dpf.core.TimeFreqSupport(server=server_type),
dpf.core.Workflow(server=server_type),
dpf.core.DataTree(server=server_type),
# dpf.core.GenericDataContainer(server=server_type), # Fails for LegacyGrpc
dpf.core.StringField(server=server_type),
dpf.core.CustomTypeField(np.float64, server=server_type),
]
Expand Down Expand Up @@ -261,18 +262,6 @@ def test_connect_operator_output_operator(server_type):
assert len(fOut.data) == 3


@pytest.mark.skipif(
not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0,
reason="Connect an operator as an input is supported starting server version 7.0",
)
def test_connect_generic_data_container_operator(server_type):
op = dpf.core.Operator("forward", server=server_type)
inpt = dpf.core.GenericDataContainer(server=server_type)
op.connect(0, inpt)
output = op.get_output(0, dpf.core.types.generic_data_container)
assert output is not None


@pytest.mark.skipif(
not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_6_2,
reason="Connect an operator as an input is supported starting server version 6.2",
Expand Down Expand Up @@ -632,19 +621,19 @@ def test_connect_model(plate_msup, server_type):
assert np.allclose(fc[0].data[0], [5.12304110e-14, 3.64308310e-04, 5.79805917e-06])


def test_operator_several_output_types(plate_msup, server_type):
inpt = dpf.core.Field(nentities=3, server=server_type)
def test_operator_several_output_types_remote(plate_msup, server_type_remote_process):
inpt = dpf.core.Field(nentities=3, server=server_type_remote_process)
inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9]
inpt.scoping.ids = [1, 2, 3]
inpt.unit = "m"
op = dpf.core.Operator("unit_convert", server=server_type)
op = dpf.core.Operator("unit_convert", server=server_type_remote_process)
op.inputs.entity_to_convert(inpt)
op.inputs.unit_name("mm")
f = op.outputs.converted_entity_as_field()
assert f.unit == "mm"
assert np.allclose(f.data.flatten("C"), np.array([1, 2, 3, 4, 5, 6, 7, 8, 9]) * 1000)

model = dpf.core.Model(plate_msup, server=server_type)
model = dpf.core.Model(plate_msup, server=server_type_remote_process)
din = model.metadata.meshed_region.nodes.coordinates_field.data

assert model.metadata.meshed_region.nodes.coordinates_field.unit == "m"
Expand All @@ -657,26 +646,6 @@ def test_operator_several_output_types(plate_msup, server_type):
assert np.allclose(m.nodes.coordinates_field.data, np.array(din) * 1000)


def test_operator_several_output_types2(server_type):
inpt = dpf.core.Field(nentities=3, server=server_type)
inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9]
inpt.scoping.ids = [1, 2, 3]
inpt.unit = "m"
uc = dpf.core.Operator("Rescope", server=server_type)
uc.inputs.fields(inpt)
uc.inputs.mesh_scoping(dpf.core.Scoping(ids=[1, 2]))
f = uc.outputs.fields_as_field()
assert np.allclose(f.data.flatten("C"), [1, 2, 3, 4, 5, 6])

fc = dpf.core.FieldsContainer(server=server_type)
fc.labels = ["time"]
fc.add_field({"time": 1}, inpt)

uc.inputs.fields(fc)
fc2 = uc.outputs.fields_as_fields_container()
assert np.allclose(fc2[0].data.flatten("C"), [1, 2, 3, 4, 5, 6])


def test_create_operator_config(server_type):
conf = dpf.core.Config(server=server_type)
assert conf.config_option_documentation("mutex") == ""
Expand Down Expand Up @@ -865,14 +834,23 @@ def test_connect_get_output_double_list_operator(server_type):


@conftest.raises_for_servers_version_under("4.0")
def test_connect_get_output_data_tree_operator():
d = dpf.core.DataTree({"name": "Paul"})
op = dpf.core.operators.utility.forward(d)
def test_connect_get_output_data_tree_operator(server_type):
d = dpf.core.DataTree({"name": "Paul"}, server=server_type)
op = dpf.core.operators.utility.forward(d, server=server_type)
d_out = op.get_output(0, dpf.core.types.data_tree)
assert d_out.get_as("name") == "Paul"


def test_operator_several_output_types(plate_msup):
@conftest.raises_for_servers_version_under("7.0")
def test_connect_get_output_generic_data_container_operator(server_clayer):
gdc = dpf.core.GenericDataContainer(server=server_clayer)
gdc.set_property("n", 1)
op = dpf.core.operators.utility.forward(gdc, server=server_clayer)
gdc_out = op.get_output(0, dpf.core.types.generic_data_container)
assert gdc_out.get_property("n") == 1


def test_operator_several_output_types_copy(plate_msup):
inpt = dpf.core.Field(nentities=3)
inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9]
inpt.scoping.ids = [1, 2, 3]
Expand Down
16 changes: 16 additions & 0 deletions tests/test_python_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from conftest import SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_4_0
from ansys.dpf import core as dpf
import conftest
from ansys.dpf.core.custom_operator import update_virtual_environment_for_custom_operators
from ansys.dpf.core.errors import DPFServerException
from ansys.dpf.core.operator_specification import (
CustomSpecification,
Expand All @@ -26,6 +27,9 @@
if platform.system() == "Linux":
pytest.skip("Known failures for the Ubuntu-latest GitHub pipelines", allow_module_level=True)

update_virtual_environment_for_custom_operators(restore_original=True)
update_virtual_environment_for_custom_operators()


@pytest.fixture(scope="module")
def load_all_types_plugin(testfiles_dir):
Expand Down Expand Up @@ -200,6 +204,18 @@ def test_data_tree(server_type_remote_process, testfiles_dir):
assert dt.get_as("name") == "Paul"


@pytest.mark.skipif(not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0, reason="Requires DPF 7.0")
def test_generic_data_container(server_clayer_remote_process, testfiles_dir):
load_all_types_plugin_with_serv(server_clayer_remote_process, testfiles_dir)
gdc = dpf.GenericDataContainer(server=server_clayer_remote_process)
gdc.set_property(property_name="n", prop=1)
op = dpf.Operator("custom_forward_generic_data_container", server=server_clayer_remote_process)
op.connect(0, gdc)
gdc2: dpf.GenericDataContainer = op.get_output(0, dpf.types.generic_data_container)
assert gdc2 is not None
assert gdc2.get_property("n") == 1


@conftest.raises_for_servers_version_under("4.0")
def test_syntax_error(server_type_remote_process, testfiles_dir):
dpf.load_library(
Expand Down
18 changes: 18 additions & 0 deletions tests/testfiles/pythonPlugins/all_types/dpf_types_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
types,
workflow,
data_tree,
generic_data_container,
)


Expand Down Expand Up @@ -192,3 +193,20 @@ def specification(self):
@property
def name(self):
return "custom_forward_data_tree"


class ForwardGenericDataContainerOperator(CustomOperatorBase):
def run(self):
f = self.get_input(0, generic_data_container.GenericDataContainer)
assert not f is None
f = self.get_input(0, types.generic_data_container)
self.set_output(0, f)
self.set_succeeded()

@property
def specification(self):
return None

@property
def name(self):
return "custom_forward_generic_data_container"
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,4 @@ def load_operators(*args):
record_operator(dpf_types_op.ForwardMeshesContainerOperator, *args)
record_operator(dpf_types_op.ForwardWorkflowOperator, *args)
record_operator(dpf_types_op.ForwardDataTreeOperator, *args)
record_operator(dpf_types_op.ForwardGenericDataContainerOperator, *args)

0 comments on commit 6b73f99

Please sign in to comment.