diff --git a/docs/source/_static/dpf_operators.html b/docs/source/_static/dpf_operators.html
index 970295ea49..a93bf5e9df 100644
--- a/docs/source/_static/dpf_operators.html
+++ b/docs/source/_static/dpf_operators.html
@@ -2106,7 +2106,7 @@
Configurating operators
Only linear analysis are supported without On Demand Expansion.
All coordinates are global coordinates.
Euler Angles need to be included in the database.
- Get the XZ shear component (02 component).">Inputs
Outputs
Configurations
Scripting
result: cms dst table provider
Inputs
Outputs
Configurations
Scripting
result: spectrum data
Inputs
Outputs
Configurations
Scripting
invariant: eigen vectors (on field)
Inputs
Outputs
Configurations
Scripting
result: mapdl material properties
Inputs
Outputs
Configurations
Scripting
result: mapdl_section
Inputs
Outputs
Configurations
Scripting
result: rom data provider
Inputs
Outputs
Configurations
Scripting
result: compute invariant terms motion
Inputs
Outputs
Configurations
Scripting
result: write motion dfmf file
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded element heat flux
Inputs
Outputs
Configurations
Scripting
mesh: mesh plan clipper
Inputs
Outputs
Configurations
Scripting
mesh: mesh_to_graphics_edges
Inputs
Outputs
Configurations
Scripting
serialization: migrate to vtk
Inputs
Outputs
Configurations
Scripting
mesh: combine levelset
Inputs
Outputs
Configurations
Scripting
mesh: exclude levelset
Inputs
Outputs
Configurations
Scripting
mesh: make plane levelset
Inputs
Outputs
Configurations
Scripting
mapping: fft
Inputs
Outputs
Configurations
Scripting
math: fft gradient evaluation
Inputs
Outputs
Configurations
Scripting
math: fft multi harmonic solution minmax
Inputs
Outputs
Configurations
Scripting
math: qr solve
Inputs
Outputs
Configurations
Scripting
math: svd
Inputs
Outputs
Configurations
Scripting
mapping: prep sampling fft
Inputs
Outputs
Configurations
Scripting
math: window welch
Inputs
Outputs
Configurations
Scripting
serialization: hdf5dpf generate result file
Inputs
Outputs
Configurations
Scripting
result: migrate to h5dpf
Inputs
Outputs
Configurations
Scripting
utility: hdf5dpf workflow provider
Inputs
Outputs
Configurations
Scripting
other: hdf5dpf mesh property provider
Inputs
Outputs
Configurations
Scripting
serialization: vtu export
Inputs
Outputs
Configurations
Scripting
result: compute total strain Y
Inputs
Outputs
Configurations
Scripting
result: cms dst table provider
Inputs
Outputs
Configurations
Scripting
result: spectrum data
Inputs
Outputs
Configurations
Scripting
invariant: eigen vectors (on field)
Inputs
Outputs
Configurations
Scripting
result: mapdl material properties
Inputs
Outputs
Configurations
Scripting
result: mapdl_section
Inputs
Outputs
Configurations
Scripting
result: rom data provider
Inputs
Outputs
Configurations
Scripting
result: compute invariant terms motion
Inputs
Outputs
Configurations
Scripting
result: write motion dfmf file
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded element heat flux
Inputs
Outputs
Configurations
Scripting
mesh: mesh plan clipper
Inputs
Outputs
Configurations
Scripting
mesh: mesh_to_graphics_edges
Inputs
Outputs
Configurations
Scripting
serialization: migrate to vtk
Inputs
Outputs
Configurations
Scripting
mesh: combine levelset
Inputs
Outputs
Configurations
Scripting
mesh: exclude levelset
Inputs
Outputs
Configurations
Scripting
mesh: make plane levelset
Inputs
Outputs
Configurations
Scripting
mapping: fft
Inputs
Outputs
Configurations
Scripting
math: fft gradient evaluation
Inputs
Outputs
Configurations
Scripting
math: fft multi harmonic solution minmax
Inputs
Outputs
Configurations
Scripting
math: qr solve
Inputs
Outputs
Configurations
Scripting
math: svd
Inputs
Outputs
Configurations
Scripting
mapping: prep sampling fft
Inputs
Outputs
Configurations
Scripting
math: window welch
Inputs
Outputs
Configurations
Scripting
serialization: hdf5dpf generate result file
Inputs
Outputs
Configurations
Scripting
result: migrate to h5dpf
Inputs
Outputs
Configurations
Scripting
utility: hdf5dpf workflow provider
Inputs
Outputs
Configurations
Scripting
other: hdf5dpf mesh property provider
Inputs
Outputs
Configurations
Scripting
serialization: vtu export
Inputs
Outputs
Configurations
Scripting
result: compute total strain Y
>> op.inputs.data_sources.connect(my_data_sources)
>>> my_export_floats = bool()
>>> op.inputs.export_floats.connect(my_export_floats)
- >>> my_compression_worfklow = dpf.Workflow()
- >>> op.inputs.compression_worfklow.connect(my_compression_worfklow)
+ >>> my_compression_workflow = dpf.Workflow()
+ >>> op.inputs.compression_workflow.connect(my_compression_workflow)
>>> my_filtering_workflow = dpf.Workflow()
>>> op.inputs.filtering_workflow.connect(my_filtering_workflow)
+ >>> my_h5_native_compression = int()
+ >>> op.inputs.h5_native_compression.connect(my_h5_native_compression)
>>> my_requested_location = str()
>>> op.inputs.requested_location.connect(my_requested_location)
>>> my_separate_dofs = bool()
@@ -312,14 +336,18 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
self._export_floats = Input(migrate_to_h5dpf._spec().input_pin(5), 5, op, -1)
self._inputs.append(self._export_floats)
- self._compression_worfklow = Input(
+ self._compression_workflow = Input(
migrate_to_h5dpf._spec().input_pin(6), 6, op, -1
)
- self._inputs.append(self._compression_worfklow)
+ self._inputs.append(self._compression_workflow)
self._filtering_workflow = Input(
migrate_to_h5dpf._spec().input_pin(7), 7, op, -1
)
self._inputs.append(self._filtering_workflow)
+ self._h5_native_compression = Input(
+ migrate_to_h5dpf._spec().input_pin(8), 8, op, -1
+ )
+ self._inputs.append(self._h5_native_compression)
self._requested_location = Input(
migrate_to_h5dpf._spec().input_pin(9), 9, op, -1
)
@@ -454,25 +482,25 @@ def export_floats(self):
return self._export_floats
@property
- def compression_worfklow(self):
- """Allows to connect compression_worfklow input to the operator.
+ def compression_workflow(self):
+ """Allows to connect compression_workflow input to the operator.
Beta option: applies input compression
workflow
Parameters
----------
- my_compression_worfklow : Workflow
+ my_compression_workflow : Workflow
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.migrate_to_h5dpf()
- >>> op.inputs.compression_worfklow.connect(my_compression_worfklow)
+ >>> op.inputs.compression_workflow.connect(my_compression_workflow)
>>> # or
- >>> op.inputs.compression_worfklow(my_compression_worfklow)
+ >>> op.inputs.compression_workflow(my_compression_workflow)
"""
- return self._compression_worfklow
+ return self._compression_workflow
@property
def filtering_workflow(self):
@@ -494,6 +522,30 @@ def filtering_workflow(self):
"""
return self._filtering_workflow
+ @property
+ def h5_native_compression(self):
+ """Allows to connect h5_native_compression input to the operator.
+
+ Integer value that defines the h5 native
+ compression used 0: no compression
+ (default)1-9: gzip level compression
+ : 9 gives us maximum compression but
+ at the slowest speed.
+
+ Parameters
+ ----------
+ my_h5_native_compression : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.migrate_to_h5dpf()
+ >>> op.inputs.h5_native_compression.connect(my_h5_native_compression)
+ >>> # or
+ >>> op.inputs.h5_native_compression(my_h5_native_compression)
+ """
+ return self._h5_native_compression
+
@property
def requested_location(self):
"""Allows to connect requested_location input to the operator.
diff --git a/src/ansys/dpf/core/operators/serialization/hdf5dpf_generate_result_file.py b/src/ansys/dpf/core/operators/serialization/hdf5dpf_generate_result_file.py
index 626cea33ac..1ac5416f6d 100644
--- a/src/ansys/dpf/core/operators/serialization/hdf5dpf_generate_result_file.py
+++ b/src/ansys/dpf/core/operators/serialization/hdf5dpf_generate_result_file.py
@@ -15,6 +15,12 @@ class hdf5dpf_generate_result_file(Operator):
Parameters
----------
+ h5_native_compression : int, optional
+ Integer value that defines the h5 native
+ compression used 0: no compression
+ (default)1-9: gzip level compression
+ : 9 gives us maximum compression but
+ at the slowest speed.
export_floats : bool, optional
Converts double to float to reduce file size
(default is true)
@@ -63,6 +69,8 @@ class hdf5dpf_generate_result_file(Operator):
>>> op = dpf.operators.serialization.hdf5dpf_generate_result_file()
>>> # Make input connections
+ >>> my_h5_native_compression = int()
+ >>> op.inputs.h5_native_compression.connect(my_h5_native_compression)
>>> my_export_floats = bool()
>>> op.inputs.export_floats.connect(my_export_floats)
>>> my_filename = str()
@@ -80,6 +88,7 @@ class hdf5dpf_generate_result_file(Operator):
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.serialization.hdf5dpf_generate_result_file(
+ ... h5_native_compression=my_h5_native_compression,
... export_floats=my_export_floats,
... filename=my_filename,
... mesh_provider_out=my_mesh_provider_out,
@@ -95,6 +104,7 @@ class hdf5dpf_generate_result_file(Operator):
def __init__(
self,
+ h5_native_compression=None,
export_floats=None,
filename=None,
mesh_provider_out=None,
@@ -110,6 +120,8 @@ def __init__(
)
self._inputs = InputsHdf5DpfGenerateResultFile(self)
self._outputs = OutputsHdf5DpfGenerateResultFile(self)
+ if h5_native_compression is not None:
+ self.inputs.h5_native_compression.connect(h5_native_compression)
if export_floats is not None:
self.inputs.export_floats.connect(export_floats)
if filename is not None:
@@ -131,6 +143,16 @@ def _spec():
spec = Specification(
description=description,
map_input_pin_spec={
+ -2: PinSpecification(
+ name="h5_native_compression",
+ type_names=["int32"],
+ optional=True,
+ document="""Integer value that defines the h5 native
+ compression used 0: no compression
+ (default)1-9: gzip level compression
+ : 9 gives us maximum compression but
+ at the slowest speed.""",
+ ),
-1: PinSpecification(
name="export_floats",
type_names=["bool"],
@@ -257,6 +279,8 @@ class InputsHdf5DpfGenerateResultFile(_Inputs):
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.serialization.hdf5dpf_generate_result_file()
+ >>> my_h5_native_compression = int()
+ >>> op.inputs.h5_native_compression.connect(my_h5_native_compression)
>>> my_export_floats = bool()
>>> op.inputs.export_floats.connect(my_export_floats)
>>> my_filename = str()
@@ -275,6 +299,10 @@ class InputsHdf5DpfGenerateResultFile(_Inputs):
def __init__(self, op: Operator):
super().__init__(hdf5dpf_generate_result_file._spec().inputs, op)
+ self._h5_native_compression = Input(
+ hdf5dpf_generate_result_file._spec().input_pin(-2), -2, op, -1
+ )
+ self._inputs.append(self._h5_native_compression)
self._export_floats = Input(
hdf5dpf_generate_result_file._spec().input_pin(-1), -1, op, -1
)
@@ -304,6 +332,30 @@ def __init__(self, op: Operator):
)
self._inputs.append(self._input_name2)
+ @property
+ def h5_native_compression(self):
+ """Allows to connect h5_native_compression input to the operator.
+
+ Integer value that defines the h5 native
+ compression used 0: no compression
+ (default)1-9: gzip level compression
+ : 9 gives us maximum compression but
+ at the slowest speed.
+
+ Parameters
+ ----------
+ my_h5_native_compression : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.serialization.hdf5dpf_generate_result_file()
+ >>> op.inputs.h5_native_compression.connect(my_h5_native_compression)
+ >>> # or
+ >>> op.inputs.h5_native_compression(my_h5_native_compression)
+ """
+ return self._h5_native_compression
+
@property
def export_floats(self):
"""Allows to connect export_floats input to the operator.
diff --git a/src/ansys/dpf/gate/_version.py b/src/ansys/dpf/gate/_version.py
index e62d53dcaa..4a7256cdd6 100644
--- a/src/ansys/dpf/gate/_version.py
+++ b/src/ansys/dpf/gate/_version.py
@@ -1,7 +1,7 @@
"""Version for ansys-dpf-gate"""
# major, minor, patch
-version_info = 0, 4, "2.dev0"
-__ansys_version__ = "241"
+version_info = 0, 5, "0.dev0"
+__ansys_version__ = "242"
# Nice string for the version
__version__ = ".".join(map(str, version_info))
diff --git a/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll b/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll
index 73e18912e6..6c752f4253 100644
Binary files a/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll and b/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll differ
diff --git a/src/ansys/dpf/gatebin/DPFClientAPI.dll b/src/ansys/dpf/gatebin/DPFClientAPI.dll
index cb2477c1f9..6bd4e1f7c5 100644
Binary files a/src/ansys/dpf/gatebin/DPFClientAPI.dll and b/src/ansys/dpf/gatebin/DPFClientAPI.dll differ
diff --git a/src/ansys/dpf/gatebin/_version.py b/src/ansys/dpf/gatebin/_version.py
index 5716a08eb5..e56fcba2b3 100644
--- a/src/ansys/dpf/gatebin/_version.py
+++ b/src/ansys/dpf/gatebin/_version.py
@@ -1,6 +1,6 @@
"""Version for ansys-dpf-gatebin"""
# major, minor, patch
-version_info = 0, 4, "2.dev0"
+version_info = 0, 5, "0.dev0"
# Nice string for the version
__version__ = ".".join(map(str, version_info))
\ No newline at end of file
diff --git a/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so b/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so
index 0c17c36d21..9c461caf2f 100644
Binary files a/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so and b/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so differ
diff --git a/src/ansys/dpf/gatebin/libDPFClientAPI.so b/src/ansys/dpf/gatebin/libDPFClientAPI.so
index 988926b265..fddf24ec34 100644
Binary files a/src/ansys/dpf/gatebin/libDPFClientAPI.so and b/src/ansys/dpf/gatebin/libDPFClientAPI.so differ
diff --git a/src/ansys/grpc/dpf/_version.py b/src/ansys/grpc/dpf/_version.py
index dcd30a579e..b3c3bdaa67 100644
--- a/src/ansys/grpc/dpf/_version.py
+++ b/src/ansys/grpc/dpf/_version.py
@@ -1,2 +1,2 @@
"""ansys-grpc-dpf python protocol version"""
-__version__ = '0.8.2dev0' # major.minor.patch
+__version__ = '0.9.0dev0' # major.minor.patch