diff --git a/.ci/fluent_test_runner.py b/.ci/fluent_test_runner.py index d7bcf4a771b..ad03c9b5ea0 100644 --- a/.ci/fluent_test_runner.py +++ b/.ci/fluent_test_runner.py @@ -1,6 +1,7 @@ """Script to run Fluent test in Docker container.""" import argparse +import concurrent.futures import logging import os from pathlib import Path @@ -21,11 +22,16 @@ class FluentRuntimeError(RuntimeError): pass -def run_fluent_test(journal_file: Path, launcher_args: str = "") -> None: - """Run Fluent test. +def _run_single_test( + src_test_dir: Path, journal_file: Path, launcher_args: str +) -> None: + """Run a single Fluent test. Parameters ---------- + src_test_dir : Path + Path to the Fluent test directory in the host. + journal_file : Path Absolute path to the journal file. @@ -41,12 +47,27 @@ def run_fluent_test(journal_file: Path, launcher_args: str = "") -> None: src_pyfluent_dir = str(Path(pyfluent.__file__).parent) verion_for_file_name = FluentVersion.current_dev().number dst_pyfluent_dir = f"/ansys_inc/v{verion_for_file_name}/commonfiles/CPython/3_10/linx64/Release/python/lib/python3.10/site-packages/ansys/fluent/core" - src_test_dir = str(journal_file.parent) + src_gen_dir = ( + Path(pyfluent.__file__).parent / "ansys" / "fluent" / "core" / "generated" + ) + dst_gen_dir = f"/ansys_inc/v{verion_for_file_name}/fluent/fluent{FluentVersion.current_dev()!r}/cortex/pylib/flapi/generated" dst_test_dir = "/testing" + working_dir = Path(dst_test_dir) + parent = journal_file.parent + parents = [] + while parent != src_test_dir: + parents.append(parent.name) + parent = parent.parent + parents.reverse() + for parent in parents: + working_dir /= parent + working_dir = str(working_dir) + src_test_dir = str(src_test_dir) logging.debug(f"src_pyfluent_dir: {src_pyfluent_dir}") logging.debug(f"dst_pyfluent_dir: {dst_pyfluent_dir}") logging.debug(f"src_test_dir: {src_test_dir}") logging.debug(f"dst_test_dir: {dst_test_dir}") + logging.debug(f"working_dir: {working_dir}") docker_client = docker.from_env() version_for_image_tag = FluentVersion.current_dev().docker_image_tag @@ -55,34 +76,55 @@ def run_fluent_test(journal_file: Path, launcher_args: str = "") -> None: image=image_name, volumes=[ f"{src_pyfluent_dir}:{dst_pyfluent_dir}", + f"{src_gen_dir}:{dst_gen_dir}", # Try removing this after pyfluent is updated in commonfiles f"{src_test_dir}:{dst_test_dir}", ], - working_dir=dst_test_dir, + working_dir=working_dir, environment={"ANSYSLMD_LICENSE_FILE": os.environ["ANSYSLMD_LICENSE_FILE"]}, - command=f"3ddp {launcher_args} -gu -py -i {journal_file.name}", + command=f"{launcher_args} -gu -py -i {journal_file.name}", detach=True, stdout=True, stderr=True, + auto_remove=True, ) - while True: - container.reload() - if container.status == "exited": - break - stderr = container.logs(stdout=False, stderr=True) - if stderr: - stderr = stderr.decode() - for line in stderr.split("\n"): - if line.strip().startswith("Error:"): - if "Expected exception" in line: # for check_assert.py - container.stop() - else: - raise FluentRuntimeError(line) - sleep(1) - logging.debug(container.logs(stderr=True).decode()) - container.remove() - - -MAX_TEST_PATH_LENGTH = 40 + try: + while True: + container.reload() + if container.status == "exited": + break + stderr = container.logs(stdout=False, stderr=True) + if stderr: + stderr = stderr.decode() + for line in stderr.splitlines(): + if line.strip().startswith("Error:"): + if "Expected exception" in line: # for check_assert.py + container.stop() + else: + raise FluentRuntimeError(stderr) + sleep(1) + print(container.logs(stderr=True).decode()) + container.remove() + except docker.errors.DockerException: + pass + + +MAX_TEST_PATH_LENGTH = 100 + + +def _run_single_test_with_status_print( + src_test_dir: Path, journal_file: Path, launcher_args: str, test_file_relpath: str +) -> bool: + try: + _run_single_test(src_test_dir, journal_file, launcher_args) + print( + f"{test_file_relpath}{(MAX_TEST_PATH_LENGTH + 10 - len(test_file_relpath)) * '·'}PASSED" + ) + except FluentRuntimeError as e: + print( + f"{test_file_relpath}{(MAX_TEST_PATH_LENGTH + 10 - len(test_file_relpath)) * '·'}FAILED" + ) + print(e) + return True if __name__ == "__main__": @@ -93,27 +135,34 @@ def run_fluent_test(journal_file: Path, launcher_args: str = "") -> None: ) args = parser.parse_args() test_dir = Path.cwd() / args.test_dir - with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir: - copytree(test_dir, tmpdir, dirs_exist_ok=True) - exception_occurred = False - for test_file in Path(tmpdir).rglob("*.py"): + with TemporaryDirectory(ignore_cleanup_errors=True) as src_test_dir: + copytree(test_dir, src_test_dir, dirs_exist_ok=True) + statuses = [] + arguments = [] + src_test_dir = Path(src_test_dir) + for test_file in (src_test_dir / "fluent").rglob("*.py"): config_file = test_file.with_suffix(".yaml") launcher_args = "" if config_file.exists(): configs = yaml.safe_load(config_file.read_text()) launcher_args = configs.get("launcher_args", "") - test_file_relpath = str(test_file.relative_to(tmpdir)) - print(f"Running {test_file_relpath}", end="", flush=True) - try: - run_fluent_test(test_file, launcher_args) - print( - f"{(MAX_TEST_PATH_LENGTH + 10 - len(test_file_relpath)) * '·'}PASSED" - ) - except FluentRuntimeError as e: - print( - f"{(MAX_TEST_PATH_LENGTH + 10 - len(test_file_relpath)) * '·'}FAILED" - ) - print(e) - exception_occurred = True - if exception_occurred: + test_file_relpath = str(test_file.relative_to(src_test_dir)) + arguments.append( + (src_test_dir, test_file, launcher_args, test_file_relpath) + ) + max_workers = int(os.getenv("MAX_WORKERS_FLUENT_TESTS", 4)) + if max_workers > 1: + with concurrent.futures.ThreadPoolExecutor( + max_workers=max_workers + ) as executor: + futures = [ + executor.submit(_run_single_test_with_status_print, *args) + for args in arguments + ] + for future in concurrent.futures.as_completed(futures): + statuses.append(future.result()) + else: + for args in arguments: + statuses.append(_run_single_test_with_status_print(*args)) + if any(statuses): exit(1) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 527477151ad..ef0187821db 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -117,14 +117,16 @@ jobs: - name: Install Quarto uses: quarto-dev/quarto-actions/setup@v2 - with: - tinytex: true - + - name: Check Quarto Version shell: bash run: | quarto --version + - name: Verify tinytex is installed + run: | + quarto install tinytex --no-prompt --update-path + - name: "Install Poppler for PDF to PNG conversion" shell: bash run: | @@ -197,6 +199,7 @@ jobs: make build-doc-source env: FLUENT_IMAGE_TAG: ${{ env.DOC_DEPLOYMENT_IMAGE_TAG }} + PYFLUENT_DOC_SKIP_CHEATSHEET: 1 - name: Zip HTML Documentation before upload run: | diff --git a/.github/workflows/test-fluent-journals.yml b/.github/workflows/test-fluent-journals.yml index 05acfd213a3..b96b5aa6b5a 100644 --- a/.github/workflows/test-fluent-journals.yml +++ b/.github/workflows/test-fluent-journals.yml @@ -82,4 +82,10 @@ jobs: - name: Run Fluent tests run: | - python .ci/fluent_test_runner.py tests/fluent + make write-and-run-fluent-tests + env: + MAX_WORKERS_FLUENT_TESTS: 1 + + - name: Cleanup previous docker containers + if: always() + run: make cleanup-previous-docker-containers diff --git a/.github/workflows/test-run-dev-version-nightly.yml b/.github/workflows/test-run-dev-version-nightly.yml index ea1569cc824..9a6f07d381b 100644 --- a/.github/workflows/test-run-dev-version-nightly.yml +++ b/.github/workflows/test-run-dev-version-nightly.yml @@ -13,15 +13,14 @@ env: PYFLUENT_WATCHDOG_DEBUG: 'OFF' PYFLUENT_HIDE_LOG_SECRETS: 1 MAIN_PYTHON_VERSION: '3.10' + FLUENT_IMAGE_TAG: v25.2.0 + FLUENT_VERSION: 252 jobs: test: name: Unit Testing runs-on: [self-hosted, pyfluent] timeout-minutes: 120 - env: - FLUENT_IMAGE_TAG: v25.2.0 - FLUENT_VERSION: 252 steps: - uses: actions/checkout@v4 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2bc2a0b5ee9..fee3eadf280 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ repos: hooks: - id: black args: [ - --target-version=py39 + --target-version=py310 ] - repo: https://github.com/pycqa/isort diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md index 803bd936d15..4fc1b8a5037 100644 --- a/CONTRIBUTORS.md +++ b/CONTRIBUTORS.md @@ -13,4 +13,4 @@ * [Harshal Pohekar](https://github.com/hpohekar) * [Mainak Kundu](https://github.com/mkundu1) * [Prithwish Mukherjee](https://github.com/prmukherj) -* [Raphael Luciano](https://github.com/raph-luc) \ No newline at end of file +* [Raphael Luciano](https://github.com/raph-luc) diff --git a/Makefile b/Makefile index 7c2e9627696..ba9d5d99837 100644 --- a/Makefile +++ b/Makefile @@ -23,8 +23,8 @@ docker-pull: test-import: @python -c "import ansys.fluent.core as pyfluent" -PYTESTEXTRA = --cache-clear --cov=ansys.fluent --cov-report=xml:cov_xml.xml --cov-report=html -PYTESTRERUN = --last-failed --last-failed-no-failures none +PYTESTEXTRA = --cache-clear --cov=ansys.fluent --cov-report=xml:cov_xml.xml --cov-report=html -n 4 +PYTESTRERUN = --last-failed --last-failed-no-failures none -n 4 unittest: unittest-dev-242 @@ -179,3 +179,9 @@ cleanup-previous-docker-containers: docker stop $(docker ps -a -q); \ fi @if [ -n "$(docker ps -a -q)" ]; then docker rm -vf $(docker ps -a -q); fi + +write-and-run-fluent-tests: + @pip install -r requirements/requirements_build.txt + @poetry install --with test -E reader + @poetry run python -m pytest --write-fluent-journals + @poetry run python .ci/fluent_test_runner.py tests diff --git a/codegen/allapigen.py b/codegen/allapigen.py index dfedf169521..74ef2cd4702 100644 --- a/codegen/allapigen.py +++ b/codegen/allapigen.py @@ -3,8 +3,9 @@ from time import time from ansys.fluent.core import CODEGEN_OUTDIR, FluentMode, FluentVersion, launch_fluent -from ansys.fluent.core.codegen import StaticInfoType, allapigen, print_fluent_version -from ansys.fluent.core.search import _search +from ansys.fluent.core.codegen import StaticInfoType, allapigen +from ansys.fluent.core.codegen.print_fluent_version import print_fluent_version +from ansys.fluent.core.search import _generate_api_data from ansys.fluent.core.utils.fluent_version import get_version_for_file_name if __name__ == "__main__": @@ -60,10 +61,10 @@ t1 = time() print(f"Time to fetch static info: {t1 - t0:.2f} seconds") CODEGEN_OUTDIR.mkdir(parents=True, exist_ok=True) - print_fluent_version.generate(version, solver.scheme_eval.scheme_eval) + print_fluent_version(solver._app_utilities) solver.exit() allapigen.generate(version, static_infos) t2 = time() print(f"Time to generate APIs: {t2 - t1:.2f} seconds") - _search("", version=version, write_api_tree_data=True) + _generate_api_data(version=version) diff --git a/doc/api_rstgen.py b/doc/api_rstgen.py index d65a1928910..daafaa8e178 100644 --- a/doc/api_rstgen.py +++ b/doc/api_rstgen.py @@ -173,8 +173,8 @@ def _generate_api_source_rst_files(folder: str, files: list): f".. automodule:: ansys.fluent.core.{folder}.{file}\n" ) else: - rst.write(f"ansys.fluent.core.{file}\n") - rst.write(f'{"="*(len(f"ansys.fluent.core.{file}"))}\n\n') + rst.write(f"{file}\n") + rst.write(f'{"="*(len(f"{file}"))}\n\n') rst.write(f".. automodule:: ansys.fluent.core.{file}\n") if "root" not in file: _write_common_rst_members(rst_file=rst) @@ -191,8 +191,8 @@ def _generate_api_index_rst_files(): file = _get_file_path(folder, "index") with open(file, "w", encoding="utf8") as index: index.write(f".. _ref_{folder}:\n\n") - index.write(f"ansys.fluent.core.{folder}\n") - index.write(f'{"="*(len(f"ansys.fluent.core.{folder}"))}\n\n') + index.write(f"{folder}\n") + index.write(f'{"="*(len(f"{folder}"))}\n\n') index.write(f".. automodule:: ansys.fluent.core.{folder}\n") _write_common_rst_members(rst_file=index) index.write(".. toctree::\n") diff --git a/doc/source/api/index.rst b/doc/source/api/index.rst index 335561e1781..e788635c47b 100644 --- a/doc/source/api/index.rst +++ b/doc/source/api/index.rst @@ -10,6 +10,7 @@ full guidelines on their use. .. toctree:: :maxdepth: 2 :hidden: + :caption: ansys.fluent.core filereader/index launcher/index diff --git a/doc/source/cheatsheet/cheat_sheet.qmd b/doc/source/cheatsheet/cheat_sheet.qmd index 713e4c15621..8c6fbc9bdc7 100644 --- a/doc/source/cheatsheet/cheat_sheet.qmd +++ b/doc/source/cheatsheet/cheat_sheet.qmd @@ -36,7 +36,6 @@ jupyter: ### Launch and exit a meshing session ```{python} -#| eval: false import ansys.fluent.core as pyfluent meshing = pyfluent.launch_fluent( mode=pyfluent.FluentMode.MESHING) @@ -46,7 +45,6 @@ meshing.exit() ### Launch and exit a solver session ```{python} -#| eval: false solver = pyfluent.launch_fluent( mode=pyfluent.FluentMode.SOLVER) solver.exit() @@ -55,7 +53,6 @@ solver.exit() ### Dimension, Precision, Processor count, Product version ```{python} -#| eval: false solver = pyfluent.launch_fluent( dimension=pyfluent.Dimension.THREE, precision=pyfluent.Precision.DOUBLE, @@ -67,7 +64,6 @@ solver = pyfluent.launch_fluent( ### Connect to an existing instance of Fluent ```{python} -#| eval: false fluent = pyfluent.connect_to_fluent( ip="127.0.0.1", port=50000, @@ -77,7 +73,6 @@ fluent = pyfluent.connect_to_fluent( ### Watertight geometry meshing workflow ```{python} -#| eval: false import ansys.fluent.core as pyfluent from ansys.fluent.core import examples import_file_name = examples.download_file('mixing_elbow.pmdb', 'pyfluent/mixing_elbow') @@ -94,7 +89,6 @@ wt.import_geometry() ### Add local sizing ```{python} -#| eval: false wt.add_local_sizing.add_child_to_task() wt.add_local_sizing() ``` @@ -102,7 +96,6 @@ wt.add_local_sizing() ### Generate surface mesh ```{python} -#| eval: false csm = wt.create_surface_mesh csmc = csm.cfd_surface_mesh_controls csmc.max_size.set_state(0.3) @@ -112,7 +105,6 @@ wt.create_surface_mesh() ### Describe geometry ```{python} -#| eval: false wt.describe_geometry.update_child_tasks( setup_type_changed=False) wt.describe_geometry.setup_type.set_state("The geometry consists of only fluid regions with no voids") @@ -124,7 +116,6 @@ wt.describe_geometry() ### Update boundaries ```{python} -#| eval: false ub = wt.update_boundaries ub.boundary_label_list.set_state(["wall-inlet"]) ub.boundary_label_type_list.set_state(["wall"]) @@ -138,14 +129,12 @@ ub() ### Update regions ```{python} -#| eval: false wt.update_regions() ``` ### Add boundary layers ```{python} -#| eval: false wt.add_boundary_layer.add_child_to_task() wt.add_boundary_layer.insert_compound_child_task() wt.task("smooth-transition_1" @@ -158,7 +147,6 @@ wt.task("smooth-transition_1")() ### Generate volume mesh ```{python} -#| eval: false wt.create_volume_mesh.volume_fill.set_state( "poly-hexcore") vfc = wt.create_volume_mesh.volume_fill_controls @@ -169,7 +157,6 @@ wt.create_volume_mesh() ### Switch to solution mode ```{python} -#| eval: false solver = meshing.switch_to_solver() ``` @@ -182,8 +169,7 @@ file_name = examples.download_file("mixing_elbow.cas.h5", "pyfluent/mixing_elbow solver = pyfluent.launch_fluent() solver.settings.file.read_case( file_name=file_name) -bc = solver.settings.setup.boundary_conditions -cold_inlet = bc.velocity_inlet["cold-inlet"] +cold_inlet = pyfluent.VelocityInlet(solver, name="cold-inlet") cold_inlet.momentum.velocity.set_state(0.4) inlet_turbulence = cold_inlet.turbulence turbulence_specification = inlet_turbulence.turbulence_specification @@ -199,21 +185,19 @@ cold_inlet.thermal.temperature.set_state(293.15) ### Cell zone conditions ```{python} -#| eval: false -cz = solver.settings.setup.cell_zone_conditions -cz.fluid["elbow-fluid"].laminar.set_state(True) +elbow_fluid = pyfluent.FluidCellZone(solver, name="elbow-fluid") +elbow_fluid.laminar.set_state(True) ``` ### Copy material from database ```{python} -#| eval: false import ansys.fluent.core as pyfluent from ansys.fluent.core import examples file_name = examples.download_file("mixing_elbow.cas.h5", "pyfluent/mixing_elbow") solver = pyfluent.launch_fluent() solver.settings.file.read_case(file_name=file_name) -materials = solver.settings.setup.materials +materials = pyfluent.Materials(solver) fluids = materials.fluid fluids.make_a_copy(from_="air",to="air-2") air_copy = fluids["air-2"] @@ -225,7 +209,6 @@ cz.fluid["elbow-fluid"].material.set_state("air-2") ### Access the object state using pprint ```{python} -#| eval: false from pprint import pprint pprint(air_copy.get_state(), width=1) pprint(air_copy.viscosity.option.allowed_values(), width=1) @@ -234,7 +217,6 @@ pprint(air_copy.viscosity.option.allowed_values(), width=1) ### Create new material ```{python} -#| eval: false mysolid = materials.solid.create("mysolid") mysolid.chemical_formula.set_state("SiO2") mysolid.density.value.set_state(2650) @@ -245,13 +227,12 @@ mysolid.thermal_conductivity.value.set_state(7.6) ### Energy model ```{python} -#| eval: false import ansys.fluent.core as pyfluent from ansys.fluent.core import examples file_name = examples.download_file("mixing_elbow.cas.h5", "pyfluent/mixing_elbow") solver = pyfluent.launch_fluent() solver.settings.file.read_case(file_name=file_name) -energy = solver.settings.setup.models.energy +energy = pyfluent.Energy(solver) energy.enabled.get_state() from pprint import pprint pprint(energy.get_state(), width=1) @@ -266,8 +247,7 @@ pprint(energy.get_state(), width=1) ### Viscous model ```{python} -#| eval: false -vs = solver.settings.setup.models.viscous +vs = pyfluent.Viscous(solver) from pprint import pprint pprint(vs.get_state(), width=1) pprint(vs.model.allowed_values(), width=1) @@ -287,8 +267,7 @@ vs_ops.production_kato_launder_enabled.is_active() ### Discrete phase model ```{python} -#| eval: false -dpm = solver.settings.setup.models.discrete_phase +dpm = pyfluent.DiscretePhase(solver) dpm_models = dpm.physical_models dpm_vmf = dpm_models.virtual_mass_force dpm_vmf.enabled.get_state() @@ -300,8 +279,7 @@ dpm_vmf.virtual_mass_factor.get_state() ### Radiation model ```{python} -#| eval: false -rn = solver.settings.setup.models.radiation +rn = pyfluent.Radiation(solver) from pprint import pprint pprint(rn.get_state(), width=1) pprint(rn.model.allowed_values(), width=1) @@ -324,9 +302,8 @@ pprint(rn.get_state(), width=1) ### Species model ```{python} -#| eval: false solver.settings.file.read_case(file_name=file_name) -species = solver.settings.setup.models.species +species = pyfluent.Species(solver) species.get_state() from pprint import pprint pprint(species.model.option.allowed_values(), width=1) @@ -339,8 +316,7 @@ species.model.material.allowed_values() ### Battery model ```{python} -#| eval: false -battery = solver.settings.setup.models.battery +battery = pyfluent.Battery(solver) battery.enabled.set_state(True) battery.solution_method.allowed_values() ``` @@ -348,8 +324,8 @@ battery.solution_method.allowed_values() ### Steady or transient solution model ```{python} -#| eval: false -solver_time = solver.settings.setup.general.solver.time +setup = pyfluent.Setup(solver) +solver_time = setup.general.solver.time solver_time.get_state() solver_time.allowed_values() solver_time.set_state("unsteady-1st-order") @@ -358,8 +334,8 @@ solver_time.set_state("unsteady-1st-order") ### Pressure-based or density-based solver ```{python} -#| eval: false -solver_type = solver.settings.setup.general.solver.type +setup = pyfluent.Setup(solver) +solver_type = setup.general.solver.type solver_type.get_state() solver_type.allowed_values() solver_type.set_state("density-based-explicit") @@ -369,8 +345,7 @@ solver_type.get_state() ### Velocity coupling scheme and gradient options ```{python} -#| eval: false -methods = solver.settings.solution.methods +methods = pyfluent.Methods(solver) flow_scheme = methods.p_v_coupling.flow_scheme flow_scheme.allowed_values() flow_scheme.set_state("Coupled") @@ -382,8 +357,8 @@ gradient_scheme.set_state("green-gauss-node-based") ### Solution controls ```{python} -#| eval: false -pvc = solver.settings.solution.controls.p_v_controls +controls = pyfluent.Controls(solver) +pvc = controls.p_v_controls emur = pvc.explicit_momentum_under_relaxation emur.min() emur.max() @@ -397,8 +372,7 @@ flow_courant_number.set_state(0.3) ### Create a report definition ```{python} -#| eval: false -rep_defs = solver.settings.solution.report_definitions +rep_defs = pyfluent.ReportDefinitions(solver) surface = rep_defs.surface defn_name = "outlet-temp-avg" surface[defn_name] = {} @@ -410,7 +384,6 @@ out_temp.field.set_state("temperature") ### Initialize and solve ```{python} -#| eval: false solution = solver.settings.solution solution.initialization.hybrid_initialize() solution.run_calculation.iterate(iter_count=100) @@ -419,7 +392,6 @@ solution.run_calculation.iterate(iter_count=100) ### CaseFile reader ```{python} -#| eval: false from ansys.fluent.core import examples from ansys.fluent.core.filereader.case_file import CaseFile case_file_name = examples.download_file( @@ -435,7 +407,6 @@ reader.num_dimensions() ### Additionl features ```{python} -#| eval: false reader = CaseFile( project_file_name="Dir1/Dir2/project.flprj") reader.rp_vars() @@ -445,7 +416,6 @@ reader.config_vars() ### Extract mesh data ```{python} -#| eval: false from ansys.fluent.core import examples from ansys.fluent.core.filereader.case_file import CaseFile case_file_name = examples.download_file("elbow1.cas.h5", "pyfluent/file_session") @@ -460,7 +430,6 @@ reader.get_mesh().get_vertices(3) ### DataFile reader ```{python} -#| eval: false from ansys.fluent.core import examples from ansys.fluent.core.filereader.data_file import DataFile from ansys.fluent.core.filereader.case_file import CaseFile @@ -477,7 +446,6 @@ reader.get_face_variables("phase-1") ### Single-phase FileSession ```{python} -#| eval: false from ansys.fluent.core import examples from ansys.fluent.core.file_session import FileSession case_file_name = examples.download_file("elbow1.cas.h5", "pyfluent/file_session") @@ -493,7 +461,6 @@ fs.fields.field_info.get_scalar_fields_info() ### Multiphase FileSession ```{python} -#| eval: false from ansys.fluent.core import examples from ansys.fluent.core.file_session import FileSession case_file_name = examples.download_file( @@ -513,7 +480,6 @@ fs.fields.field_info.get_scalar_fields_info() ### Post-processing using [ansys-fluent-visualization](https://visualization.fluent.docs.pyansys.com/version/stable/) ```{python} -#| eval: false from ansys.fluent.visualization import set_config set_config(blocking=True, set_view_on_display="isometric") import ansys.fluent.core as pyfluent @@ -529,7 +495,6 @@ graphics = Graphics(session=fileSession) ### Display mesh at wall ```{python} -#| eval: false mesh1 = graphics.Meshes["mesh-1"] mesh1.show_edges = True mesh1.surfaces_list = [ "wall"] @@ -539,7 +504,6 @@ mesh1.display("w1") ### Display temperature contour at symmetry ```{python} -#| eval: false contour1 = graphics.Contours["mesh-1"] contour1.node_values = False contour1.field = "SV_T" @@ -550,7 +514,6 @@ contour1.display('w2') ### Display velocity vector data at symmetry and wall ```{python} -#| eval: false velocity_vector = graphics.Vectors["velocity-vector"] velocity_vector.field = "SV_T" velocity_vector.surfaces_list = ['symmetry', 'wall'] @@ -562,84 +525,88 @@ velocity_vector.display("w3") ### Accessing field data objects ```{python} -#| eval: false import ansys.fluent.core as pyfluent from ansys.fluent.core import examples -import_file_name = examples.download_file("mixing_elbow.msh.h5", "pyfluent/mixing_elbow") +case_path = examples.download_file( + file_name="exhaust_system.cas.h5", + directory="pyfluent/exhaust_system" +) +data_path = examples.download_file( + file_name="exhaust_system.dat.h5", + directory="pyfluent/exhaust_system" +) solver = pyfluent.launch_fluent( mode=pyfluent.FluentMode.SOLVER) -solver.settings.file.read(file_type="case", file_name=import_file_name) -init = solver.settings.solution.initialization -init.hybrid_initialize() +solver.settings.file.read_case_data( + file_name=case_path +) field_data = solver.fields.field_data ``` ### Get surface data ```{python} -#| eval: false from ansys.fluent.core.services.field_data import SurfaceDataType -vertices_data = field_data.get_surface_data( - surface_name="cold-inlet", - data_type=SurfaceDataType.Vertices) -vertices_data.size -vertices_data.surface_id -vertices_data[5].x -vertices_data[5].y -vertices_data[5].z -faces_normal_data = field_data.get_surface_data( - data_type=SurfaceDataType.FacesNormal, surface_name="cold-inlet" +data = field_data.get_surface_data( + surfaces=["inlet"], + data_types=[SurfaceDataType.Vertices] ) -faces_centroid_data = field_data.get_surface_data( - data_type=SurfaceDataType.FacesCentroid, surface_name="cold-inlet" +data["inlet"][SurfaceDataType.Vertices].shape +data["inlet"][SurfaceDataType.Vertices][5] +faces_normal_and_centroid_data = field_data.get_surface_data( + data_types=[SurfaceDataType.FacesNormal, + SurfaceDataType.FacesCentroid], + surfaces=["inlet"] ) +inlet = faces_normal_and_centroid_data["inlet"] +inlet[SurfaceDataType.FacesNormal].shape +inlet[SurfaceDataType.FacesCentroid][15] faces_connectivity_data = field_data.get_surface_data( - data_type=SurfaceDataType.FacesConnectivity, surface_name="cold-inlet" + data_types=[SurfaceDataType.FacesConnectivity], + surfaces=["inlet"] ) -faces_connectivity_data[5].node_count -faces_connectivity_data[5].node_indices +inlet = faces_connectivity_data["inlet"] +inlet[SurfaceDataType.FacesConnectivity][5] ``` ### Get scalar field data ```{python} -#| eval: false abs_press_data = field_data.get_scalar_field_data( field_name="absolute-pressure", - surface_name="cold-inlet") -abs_press_data.size -abs_press_data[120].scalar_data + surfaces=["inlet"] +) +abs_press_data["inlet"].shape +abs_press_data["inlet"][120] ``` ### Get vector field data ```{python} -#| eval: false velocity_vector_data = field_data.get_vector_field_data( field_name="velocity", - surface_name="cold-inlet") -velocity_vector_data.size -velocity_vector_data.scale + surfaces=["inlet", "inlet1"] +) +velocity_vector_data["inlet"].shape +velocity_vector_data["inlet1"].shape ``` ### Get pathlines field data ```{python} -#| eval: false path_lines_data = field_data.get_pathlines_field_data( field_name="velocity", - surface_name="cold-inlet") -path_lines_data["vertices"].size -path_lines_data["lines"].size -path_lines_data["velocity"].size -path_lines_data["lines"][100].node_count -path_lines_data["lines"][100].node_indices + surfaces=["inlet"] +) +path_lines_data["inlet"]["vertices"].shape +len(path_lines_data["inlet"]["lines"]) +path_lines_data["inlet"]["velocity"].shape +path_lines_data["inlet"]["lines"][100] ``` ### Accessing field info objects ```{python} -#| eval: false import ansys.fluent.core as pyfluent solver = pyfluent.launch_fluent( mode=pyfluent.FluentMode.SOLVER) @@ -652,7 +619,6 @@ field_info = solver.fields.field_info ### Get fields info and range ```{python} -#| eval: false field_info.get_fields_info() field_info.get_range("velocity") field_info.get_range("cell-weight") @@ -661,7 +627,6 @@ field_info.get_range("cell-weight") ### Get vector fields and surfaces info ```{python} -#| eval: false field_info.get_vector_fields_info() field_info.get_surfaces_info() ``` @@ -669,32 +634,50 @@ field_info.get_surfaces_info() ### Accessing reduction functions ```{python} -#| eval: false import ansys.fluent.core as pyfluent +from ansys.fluent.core.solver.function import reduction from ansys.fluent.core.examples import download_file -solver = pyfluent.launch_fluent( - mode=pyfluent.FluentMode.SOLVER) -case_path = download_file("Static_Mixer_main.cas.h5", "pyfluent/static_mixer") -solver.settings.file.read(file_type="case", file_name=case_path) -init = solver.settings.solution.initialization -init.hybrid_initialize() -bc = solver.settings.setup.boundary_conditions -solver.fields.reduction.area_average( - expression="AbsolutePressure", - locations= - bc.velocity_inlet + +solver1 = pyfluent.launch_fluent( + mode=pyfluent.FluentMode.SOLVER +) +case_path = download_file( + file_name="exhaust_system.cas.h5", + directory="pyfluent/exhaust_system") +data_path = download_file( + file_name="exhaust_system.dat.h5", + directory="pyfluent/exhaust_system") +solver1.settings.file.read_case_data( + file_name=case_path +) + +solver2 = pyfluent.launch_fluent( + mode=pyfluent.FluentMode.SOLVER +) +case_path = download_file( + "elbow1.cas.h5", + "pyfluent/file_session" +) +data_path = download_file( + "elbow1.dat.h5", + "pyfluent/file_session" ) +solver2.settings.file.read_case_data( + file_name=case_path +) + +solver = solver1 ``` ### Usage of reduction context ```{python} -#| eval: false +import ansys.fluent.core as pyfluent init = solver.settings.solution.initialization init.hybrid_initialize() -bc = solver.settings.setup.boundary_conditions +inlet = pyfluent.VelocityInlets(solver) solver.fields.reduction.area( - locations=[bc.velocity_inlet["inlet1"]] + locations=[inlet["inlet1"]] ) solver.fields.reduction.area( locations=["inlet1"], @@ -704,7 +687,6 @@ solver.fields.reduction.area( ### Current reduction capabilities ```{python} -#| eval: false reduction.area(locations) reduction.area_average(expression, locations) reduction.area_integral(expression, locations) @@ -732,39 +714,35 @@ reduction.sum_if(expression, condition, locations, weight) ### Reduction example use cases ```{python} -#| eval: false -bc = solver.settings.setup.boundary_conditions +import ansys.fluent.core as pyfluent +inlet = pyfluent.VelocityInlets(solver) area_inlet_1 = solver.fields.reduction.area( - locations=[bc.velocity_inlet["inlet1"]]) + locations=[inlet["inlet1"]]) area_inlet = solver.fields.reduction.area( - locations=[bc.velocity_inlet]) -solver.fields.reduction.area_integral( - expression="AbsolutePressure", - locations=[bc.velocity_inlet["inlet1"]]) + locations=[inlet]) solver.fields.reduction.centroid( - locations=[bc.velocity_inlet["inlet2"]]) -solver.fields.reduction.moment( - expression="Force(['wall'])", - locations=[bc.velocity_inlet["inlet2"]]) -solver.fields.reduction.moment( - expression="['inlet1']", - locations=[bc.velocity_inlet["inlet2"]]) + locations=[inlet["inlet2"]]) +po_1 = pyfluent.PressureOutlets(solver1) +po_2 = pyfluent.PressureOutlets(solver2) +solver.fields.reduction.minimum( + expression="AbsolutePressure", + locations=[po_1, po_2], +) solver.fields.reduction.sum( expression="AbsolutePressure", - locations=[bc.velocity_inlet], + locations=[inlet], weight="Area") solver.fields.reduction.sum_if( expression="AbsolutePressure", condition="AbsolutePressure > 0[Pa]", - locations=[bc.velocity_inlet], + locations=[inlet], weight="Area") ``` ### Accessing solution variable objects ```{python} -#| eval: false import ansys.fluent.core as pyfluent from ansys.fluent.core import examples import_filename = examples.download_file("mixing_elbow.msh.h5", "pyfluent/mixing_elbow") @@ -778,7 +756,6 @@ solution_variable_data = solver.fields.solution_variable_data ### Get zone information ```{python} -#| eval: false zones_info = solution_variable_info.get_zones_info() zones_info.domains zones_info.zones @@ -793,7 +770,6 @@ zone_info.zone_type ### Get solution variable information ```{python} -#| eval: false wall_fluid_info = solution_variable_info.get_variables_info( zone_names=['wall' , "fluid"], domain_name="mixture") @@ -808,7 +784,6 @@ solution_variable_info_centroid.field_type ### Get solution variable data ```{python} -#| eval: false sv_t_wall_fluid= solution_variable_data.get_data( solution_variable_name="SV_T", zone_names=["fluid", "wall"], @@ -824,7 +799,6 @@ fluid_temp ### Set solution variable data ```{python} -#| eval: false wall_temp_array = solution_variable_data.create_empty_array( "SV_T", "wall", "mixture") fluid_temp_array = solution_variable_data.create_empty_array( @@ -842,24 +816,22 @@ solution_variable_data.set_data( ### Multiple requests in a single transaction ```{python} -#| eval: false transaction = solver.fields.field_data.new_transaction() transaction.add_surfaces_request( - surface_ids=[1], provide_vertices=True, provide_faces=False, provide_faces_centroid=True -) -transaction.add_surfaces_request( - surface_ids=[2], provide_vertices=True, provide_faces=True + surfaces=[1], + data_types=[SurfaceDataType.Vertices, + SurfaceDataType.FacesCentroid] ) transaction.add_scalar_fields_request( - surface_ids=[1,2], field_name="temperature", node_value=True, boundary_value=True + surfaces=[1, 2], + field_name="pressure", + node_value=True, + boundary_value=True ) transaction.add_vector_fields_request( - surface_ids=[1,2], - field_name="velocity") -transaction.add_pathlines_fields_request( - surface_ids=[1,2], - field_name="temperature") + surfaces=[1, 2], field_name="velocity" +) payload_data = transaction.get_fields() ``` @@ -883,7 +855,6 @@ sd.surface_ids.allowed_values() ### Monitor convergence of a solution ```{python} -#| eval: false # get started with case and data loaded import ansys.fluent.core as pyfluent from ansys.fluent.core import examples @@ -937,7 +908,6 @@ solver.solution.run_calculation.iterate( ### Observing events ```{python} -#| eval: false from ansys.fluent.core import MeshingEvent, SolverEvent def on_case_loaded(session, event_info): print("Case loaded. Index = ", event_info.index) @@ -952,7 +922,6 @@ callback_id = solver.events.register_callback( ### Transfer a case or mesh file between PyFluent sessions ```{python} -#| eval: false import ansys.fluent.core as pyfluent from ansys.fluent.core.examples import download_file from ansys.fluent.core.utils.data_transfer import transfer_case @@ -970,12 +939,9 @@ solver_session = pyfluent.launch_fluent( mode=pyfluent.FluentMode.SOLVER ) transfer_case( - source_instance=meshing, - solvers=[solver], - file_type="mesh", - file_name_stem='', - num_files_to_try=1, - clean_up_temp_file=True, + source_instance=meshing, solvers=[solver], + file_type="mesh", file_name_stem='', + num_files_to_try=1, clean_up_temp_file=True, overwrite_previous=True ) ``` @@ -983,7 +949,6 @@ transfer_case( ### [PyAnsys Units](https://units.docs.pyansys.com/version/stable/) to work in arbitrary physical examples ```{python} -#| eval: false from ansys.units import Quantity bc = solver.settings.setup.boundary_conditions vi = bc.velocity_inlet @@ -1004,7 +969,6 @@ hyd_dia.as_quantity() ### Local file transfer service ```{python} -#| eval: false import ansys.fluent.core as pyfluent from ansys.fluent.core import examples from ansys.fluent.core.utils.file_transfer_service import LocalFileTransferStrategy @@ -1031,7 +995,6 @@ meshing_session.download( ### Remote file transfer service ```{python} -#| eval: false import ansys.fluent.core as pyfluent from ansys.fluent.core import examples from ansys.fluent.core.utils.file_transfer_service import RemoteFileTransferStrategy @@ -1058,7 +1021,6 @@ solver_session.download( ### Record Fluent interactions as Python scripts (journals) ```{python} -#| eval: false solver.journal.start( file_name="pyfluent_journal.py") solver.journal.stop() @@ -1067,7 +1029,6 @@ solver.journal.stop() ### PyFluent logging functionality ```{python} -#| eval: false import ansys.fluent.core as pyfluent config_dict = pyfluent.logging.get_default_config() config_dict['handlers']['pyfluent_file'][ @@ -1083,7 +1044,6 @@ pyfluent.logging.set_global_level('DEBUG') ### API search ```{python} -#| eval: false # Semantic search import ansys.fluent.core as pyfluent pyfluent.search("font") @@ -1121,7 +1081,6 @@ sudo docker run -it --name ansys-inc -e ANSYSLMD_LICENSE_FILE=`. * - FLUENT_IMAGE_TAG - Specifies the Docker image tag while starting a Fluent container in :func:`launch_fluent() `. + * - PYFLUENT_CODEGEN_OUTDIR + - Specifies the directory where API files are written out during codegen. * - PYFLUENT_CODEGEN_SKIP_BUILTIN_SETTINGS - Skips the generation of built-in settings during codegen. * - PYFLUENT_CONTAINER_MOUNT_SOURCE @@ -29,6 +31,10 @@ Following is a list of environment variables that can be set to control various - Specifies the path inside the container where the host path is mounted while starting a Fluent container in :func:`launch_fluent() `. * - PYFLUENT_FLUENT_DEBUG - Starts Fluent in debug mode while launching Fluent in :func:`launch_fluent() `. + * - PYFLUENT_DOC_SKIP_CHEATSHEET: + - Skips the generation of cheatsheet. + * - PYFLUENT_DOC_SKIP_EXAMPLES + - Skips the generation of examples documentation. * - PYFLUENT_FLUENT_IP - Specifies the IP address of the Fluent server in :func:`connect_to_fluent() `. * - PYFLUENT_FLUENT_PORT @@ -47,8 +53,6 @@ Following is a list of environment variables that can be set to control various - Shows the Fluent GUI while launching Fluent in :func:`launch_fluent() `. * - PYFLUENT_SKIP_API_UPGRADE_ADVICE - Disables printing of TUI to settings API upgrade advice. - * - PYFLUENT_SKIP_EXAMPLES_DOC - - Skips the generation of examples documentation. * - PYFLUENT_TIMEOUT_FORCE_EXIT - Enables force exit while exiting a Fluent session and specifies the timeout in seconds. * - PYFLUENT_WATCHDOG_DEBUG diff --git a/doc/source/user_guide/events.rst b/doc/source/user_guide/events.rst index d38879b01c1..1bd18e53a0b 100644 --- a/doc/source/user_guide/events.rst +++ b/doc/source/user_guide/events.rst @@ -18,13 +18,19 @@ The following code triggers a callback at the end of every iteration. .. code-block:: python - >>> from ansys.fluent.core import SolverEvent + >>> from ansys.fluent.core import SolverEvent, IterationEndedEventInfo >>> - >>> def on_iteration_ended(session, event_info): + >>> def on_iteration_ended(session, event_info: IterationEndedEventInfo): >>> print("Iteration ended. Index = ", event_info.index) >>> >>> callback_id = solver.events.register_callback(SolverEvent.ITERATION_ENDED, on_iteration_ended) - >>> + +The general signature of the callback function is ``cb(session, event_info, )``, where ``session`` is the session instance +and ``event_info`` instance holds information about the event. The event information classes for each event are documented in the +API reference of the :obj:`~ansys.fluent.core.streaming_services.events_streaming` module. See the callback function +``on_case_loaded_with_args()`` in the below examples for an example of how to pass additional arguments to the callback +function. + Examples -------- @@ -32,6 +38,7 @@ Examples .. code-block:: python >>> from ansys.fluent.core import MeshingEvent, SolverEvent + >>> from ansys.fluent.core import CaseLoadedEventInfo, DataLoadedEventInfo, SolutionInitializedEventInfo, IterationEndedEventInfo >>> from ansys.fluent.core.utils.event_loop import execute_in_event_loop_threadsafe >>> from ansys.fluent.visualization.matplotlib import matplot_windows_manager >>> from ansys.fluent.visualization.pyvista import pyvista_windows_manager @@ -48,7 +55,7 @@ Examples >>> contour2.surfaces_list = ["symmetry"] >>> >>> @execute_in_event_loop_threadsafe - >>> def auto_refersh_call_back_iteration(session, event_info): + >>> def auto_refersh_call_back_iteration(session, event_info: IterationEndedEventInfo): >>> if event_info.index % 5 == 0: >>> pyvista_windows_manager.refresh_windows(session.id, ["contour-1", "contour-2"]) >>> matplot_windows_manager.refresh_windows("", ["residual"]) @@ -56,7 +63,7 @@ Examples >>> callback_itr_id = solver.events.register_callback(SolverEvent.ITERATION_ENDED, auto_refersh_call_back_iteration) >>> >>> @execute_in_event_loop_threadsafe - >>> def initialize_call_back(session, event_info): + >>> def initialize_call_back(session, event_info: SolutionInitializedEventInfo | DataLoadedEventInfo): >>> pyvista_windows_manager.refresh_windows(session.id, ["contour-1", "contour-2"]) >>> matplot_windows_manager.refresh_windows("", ["residual"]) >>> @@ -64,10 +71,10 @@ Examples >>> >>> callback_data_read_id = solver.events.register_callback(SolverEvent.DATA_LOADED, initialize_call_back) >>> - >>> def on_case_loaded(session, event_info): + >>> def on_case_loaded(session, event_info: CaseLoadedEventInfo): >>> print("Case loaded. Index = ", event_info.index) >>> - >>> def on_case_loaded_with_args(x, y, session, event_info): + >>> def on_case_loaded_with_args(session, event_info: CaseLoadedEventInfo, x, y): >>> print(f"Case loaded with {x}, {y}. Index = ", event_info.index) >>> >>> callback = meshing.events.register_callback(MeshingEvent.CASE_LOADED, on_case_loaded) diff --git a/doc/source/user_guide/fields/field_data.rst b/doc/source/user_guide/fields/field_data.rst index e16c0b42be5..de5c1935eda 100644 --- a/doc/source/user_guide/fields/field_data.rst +++ b/doc/source/user_guide/fields/field_data.rst @@ -5,31 +5,24 @@ Field data ========== -You can use field data objects to access Fluent surface, scalar, vector, and -pathlines data. +You can use field data objects to access Fluent surface, scalar, vector, and pathlines data. Accessing field data objects ---------------------------- -In order to access field data, launch the fluent solver, and make field data -available (for example, either by reading a case file and then initializing as in the following code, or -by reading case and data files). +To work with field data, ensure the Fluent solver is launched and the relevant data is made available. +You can do this either by loading both case and data files or by reading a case file and initializing. .. code-block:: python >>> import ansys.fluent.core as pyfluent - >>> from ansys.fluent.core import examples - >>> import_file_name = examples.download_file("mixing_elbow.msh.h5", "pyfluent/mixing_elbow") + >>> from ansys.fluent.core.examples import download_file >>> solver = pyfluent.launch_fluent(mode=pyfluent.FluentMode.SOLVER) - >>> solver.settings.file.read(file_type="case", file_name=import_file_name) - >>> solver.settings.solution.initialization.hybrid_initialize() - -The field data object is an attribute of the :obj:`~ansys.fluent.core.session_solver.Solver` object: - -.. code-block:: python - - >>> field_data = solver.fields.field_data + >>> case_path = download_file(file_name="exhaust_system.cas.h5", directory="pyfluent/exhaust_system") + >>> data_path = download_file(file_name="exhaust_system.dat.h5", directory="pyfluent/exhaust_system") + >>> solver.settings.file.read_case_data(file_name=case_path) + >>> field_data = solver.fields.field_data # This creates an instance of the FieldData class. Simple requests --------------- @@ -38,8 +31,8 @@ Here are the methods for requesting each type of field: - ``get_surface_data`` for surface data. - ``get_scalar_field_data`` for scalar field data. -- ``get_vector_field_data`` for vector field data -- ``get_pathlines_field_data`` for vector field data +- ``get_vector_field_data`` for vector field data. +- ``get_pathlines_field_data`` for pathlines field data. Get surface data ~~~~~~~~~~~~~~~~ @@ -50,25 +43,33 @@ the ``get_surface_data`` method and specifying ``Vertices`` for ``data_types``. >>> from ansys.fluent.core.services.field_data import SurfaceDataType - >>> vertices_data = field_data.get_surface_data(surfaces=["cold-inlet"], data_types=[SurfaceDataType.Vertices]) - >>> vertices_data["cold-inlet"][SurfaceDataType.Vertices].shape - (241, 3) - >>> vertices_data["cold-inlet"][SurfaceDataType.Vertices][5] - array([-0.2 , -0.10167995, 0.00362008], dtype=float32) + >>> vertices_data = field_data.get_surface_data( + >>> surfaces=["inlet"], data_types=[SurfaceDataType.Vertices] + >>> ) + # The method retrieves surface vertex coordinates as a NumPy array. + # Shape: (389, 3) - This means 389 vertices, each defined by 3 coordinates (x, y, z). + >>> vertices_data["inlet"][SurfaceDataType.Vertices].shape + (389, 3) + >>> vertices_data["inlet"][SurfaceDataType.Vertices][5] + # Example: The 6th vertex (zero-based indexing) has coordinates [-0.3469, 0.0, -0.0386]. + array([-0.34689394, 0. , -0.03863097], dtype=float32) -You can call the same method to get the corresponding surface face normals and centroids. -For ``data_types``, specifying ``FacesNormal`` and ``FacesCentroid`` respectively. +You can call the same method to get the corresponding surface face normals and centroids +by specifying ``FacesNormal`` and ``FacesCentroid`` for ``data_types`` respectively. .. code-block:: python >>> faces_normal_and_centroid_data = field_data.get_surface_data( - >>> data_types=[SurfaceDataType.FacesNormal, SurfaceDataType.FacesCentroid], surfaces=["cold-inlet"] + >>> data_types=[SurfaceDataType.FacesNormal, SurfaceDataType.FacesCentroid], + >>> surfaces=["inlet"] >>> ) - - >>> faces_normal_and_centroid_data["cold-inlet"][SurfaceDataType.FacesNormal].shape - (152, 3) - >>> faces_normal_and_centroid_data["cold-inlet"][SurfaceDataType.FacesCentroid][15] - array([-0.2 , -0.11418786, 0.03345207], dtype=float32) + # FacesNormal shape: (262, 3) - 262 face normals, each with 3 components (x, y, z). + # FacesCentroid shape: (262, 3) - Centroids for each of the 262 faces, given as (x, y, z). + >>> faces_normal_and_centroid_data["inlet"][SurfaceDataType.FacesNormal].shape + (262, 3) + >>> faces_normal_and_centroid_data["inlet"][SurfaceDataType.FacesCentroid][15] + # Example: The centroid of the 16th face has coordinates [-0.3463, 0.0, -0.0328]. + array([-0.34634298, 0. , -0.03276413], dtype=float32) You can request face connectivity data for given ``surfaces`` by calling the ``get_surface_data`` method and specifying ``FacesConnectivity`` for ``data_types``. @@ -76,15 +77,12 @@ the ``get_surface_data`` method and specifying ``FacesConnectivity`` for ``data_ .. code-block:: python >>> faces_connectivity_data = field_data.get_surface_data( - >>> data_types=[SurfaceDataType.FacesConnectivity], surfaces=["cold-inlet"] + >>> data_types=[SurfaceDataType.FacesConnectivity], surfaces=["inlet"] >>> ) - >>> faces_connectivity_data["cold-inlet"][SurfaceDataType.FacesConnectivity][5] - array([12, 13, 17, 16]) - - -If a single surface is provided as input, the response contains face vertices, connectivity data, and normal or centroid data. -If multiple surfaces are provided as input, the response is a dictionary containing a map of surface IDs to face -vertices, connectivity data, and normal or centroid data. + # FacesConnectivity provides indices of vertices for each face. For example: + # Face 6 is connected to vertices 4, 5, 12, and 11. + >>> faces_connectivity_data["inlet"][SurfaceDataType.FacesConnectivity][5] + array([ 4, 5, 12, 11]) Get scalar field data ~~~~~~~~~~~~~~~~~~~~~ @@ -92,14 +90,15 @@ You can call the ``get_scalar_field_data`` method to get scalar field data, such .. code-block:: python - >>> abs_press_data = field_data.get_scalar_field_data(field_name="absolute-pressure", surfaces=["cold-inlet"]) - >>> abs_press_data["cold-inlet"].shape - (241,) - >>> abs_press_data["cold-inlet"][120] - 101325.0 - -If a single surface is provided as input, scalar field data is returned. -If multiple surfaces are provided as input, a dictionary containing a map of surface IDs to scalar field data is returned. + >>> abs_press_data = field_data.get_scalar_field_data( + >>> field_name="absolute-pressure", surfaces=["inlet"] + >>> ) + # Shape: (389,) - A single scalar value (e.g., pressure) for each of the 389 vertices. + >>> abs_press_data["inlet"].shape + (389,) + >>> abs_press_data["inlet"][120] + # Example: The absolute pressure at the 121st vertex is 102031.4 Pascals. + 102031.4 Get vector field data ~~~~~~~~~~~~~~~~~~~~~ @@ -107,12 +106,13 @@ You can call the ``get_vector_field_data`` method to get vector field data. .. code-block:: python - >>> velocity_vector_data = field_data.get_vector_field_data(field_name="velocity", surfaces=["cold-inlet"]) - >>> velocity_vector_data["cold-inlet"].shape - (152, 3) - -If a single surface is provided as input, vector field data is returned. -If multiple surfaces are provided as input, a dictionary containing a map of surface IDs to vector field data is returned. + >>> velocity_vector_data = field_data.get_vector_field_data(field_name="velocity", surfaces=["inlet", "inlet1"]) + # Shape: (262, 3) - Velocity vectors for 262 faces, each with components (vx, vy, vz) for 'inlet'. + >>> velocity_vector_data["inlet"].shape + (262, 3) + # Shape: (265, 3) - Velocity vectors for 265 faces, each with components (vx, vy, vz) for 'inlet1'. + >>> velocity_vector_data["inlet1"].shape + (265, 3) Get pathlines field data ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -120,34 +120,32 @@ You can call the ``get_pathlines_field_data`` method to get pathlines field data .. code-block:: python - >>> path_lines_data = field_data.get_pathlines_field_data(field_name="velocity", surfaces=["cold-inlet"]) - >>> path_lines_data["cold-inlet"]["vertices"].shape - (76152, 3) - >>> len(path_lines_data["cold-inlet"]["lines"]) - 76000 - >>> path_lines_data["cold-inlet"]["velocity"].shape - (76152, ) - >>> path_lines_data["cold-inlet"]["lines"][100] + >>> path_lines_data = field_data.get_pathlines_field_data( + >>> field_name="velocity", surfaces=["inlet"] + >>> ) + # Vertices shape: (29565, 3) - 29565 pathline points, each with coordinates (x, y, z). + # Lines: A list where each entry contains indices of vertices forming a pathline. + # Velocity shape: (29565,) - Scalar velocity values at each pathline point. + >>> path_lines_data["inlet"]["vertices"].shape + (29565, 3) + >>> len(path_lines_data["inlet"]["lines"]) + 29303 + >>> path_lines_data["inlet"]["velocity"].shape + (29565,) + >>> path_lines_data["inlet"]["lines"][100] + # Example: Pathline 101 connects vertices 100 and 101. array([100, 101]) -Dictionary containing a map of surface IDs to the path-line data is returned. -For example, pathlines connectivity, vertices, and field. - - -.. note:: - In Fluent, a surface name can be associated with multiple surface IDs. - Thus, a response contains a surface ID as a key of the returned dictionary. - - Making multiple requests in a single transaction ------------------------------------------------ You can get data for multiple fields in a single transaction. -First create transaction object for field data. +First, create a transaction object for field data. .. code-block:: python >>> transaction = solver.fields.field_data.new_transaction() + # This creates a new transaction object for batching multiple requests. Then combine requests for multiple fields using ``add__request`` methods in a single transaction: @@ -156,55 +154,39 @@ Then combine requests for multiple fields using ``add__request`` methods - ``add_vector_fields_request`` adds a vector fields request. - ``add_pathlines_fields_request`` adds a pathlines fields request. -Following code demonstrate adding multiple requests to a single transaction. - -.. code-block:: +.. code-block:: python >>> transaction.add_surfaces_request( - >>> surfaces=[1], data_types = [SurfaceDataType.Vertices, SurfaceDataType.FacesCentroid], - >>> ) - >>> transaction.add_surfaces_request( - >>> surfaces=[2], data_types = [SurfaceDataType.Vertices, SurfaceDataType.FacesConnectivity], + >>> surfaces=[1], data_types=[SurfaceDataType.Vertices, SurfaceDataType.FacesCentroid] >>> ) + # Adds a request for surface data such as vertices and centroids. >>> transaction.add_scalar_fields_request( - >>> surfaces=[1,2], field_name="pressure", node_value=True, boundary_value=True + >>> surfaces=[1, 2], field_name="pressure", node_value=True, boundary_value=True >>> ) - >>> transaction.add_vector_fields_request(surfaces=[1,2], field_name="velocity") - >>> transaction.add_pathlines_fields_request(surfaces=[1,2], field_name="temperature") - + # Adds a request for scalar field data like pressure. + >>> transaction.add_vector_fields_request( + >>> surfaces=[1, 2], field_name="velocity" + >>> ) + # Adds a request for vector field data like velocity. -You can call the ``get_fields`` method to get the data for all these requests. This call also -clears all requests, so that subsequent calls to the ``get_fields`` method yield nothing until -more requests are added. +You can call the ``get_fields`` method to execute the transaction and retrieve the data: -.. code-block:: +.. code-block:: python >>> payload_data = transaction.get_fields() + # Executes all requests and returns the combined field data. ``payload_data`` is a dictionary containing the requested fields as a numpy array in the following order: ``tag -> surface_id [int] -> field_name [str] -> field_data[np.array]`` +.. note:: + ``get_fields`` call also clears all requests, so that subsequent calls to this method + yield nothing until more requests are added. + Tag --- - -Fluent versions earlier than 2023 R1 -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -A tag is int, generated by applying ``bitwise or`` on all tags for a request. Here is a list -of supported tags and their values: - -* OVERSET_MESH: 1, -* ELEMENT_LOCATION: 2, -* NODE_LOCATION: 4, -* BOUNDARY_VALUES: 8, - -For example, if you request the scalar field data for element location[2], in the -dictionary, ``tag`` is ``2``. Similarly, if you request the boundary values[8] for -node location[4], ``tag`` is ``(4|8)`` or 12. - -Fluent versions 2023 R1 and later -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A tag is tuple of input, value pairs for which field data is generated. For example, if you request the scalar field data for element location, in the @@ -231,7 +213,6 @@ depending on the request arguments: - ``centroid``, which contains face centroids - ``face-normal``, which contains face normals - Scalar field request ~~~~~~~~~~~~~~~~~~~~ The response to a scalar field request contains a single field with the same @@ -271,17 +252,76 @@ Some sample use cases are demonstrated below: >>> field_data.get_scalar_field_data.field_name.allowed_values() ['abs-angular-coordinate', 'absolute-pressure', 'angular-coordinate', - 'anisotropic-adaption-cells', 'axial-coordinate', 'axial-velocity', + 'anisotropic-adaption-cells', 'aspect-ratio', 'axial-coordinate', 'axial-velocity', 'boundary-cell-dist', 'boundary-layer-cells', 'boundary-normal-dist', ...] >>> transaction = field_data.new_transaction() >>> transaction.add_scalar_fields_request.field_name.allowed_values() ['abs-angular-coordinate', 'absolute-pressure', 'angular-coordinate', - 'anisotropic-adaption-cells', 'axial-coordinate', 'axial-velocity', + 'anisotropic-adaption-cells', 'aspect-ratio', 'axial-coordinate', 'axial-velocity', 'boundary-cell-dist', 'boundary-layer-cells', 'boundary-normal-dist', ...] >>> field_data.get_scalar_field_data.surface_name.allowed_values() - ['cold-inlet', 'hot-inlet', 'outlet', 'symmetry-xyplane', 'wall-elbow', 'wall-inlet'] + ['in1', 'in2', 'in3', 'inlet', 'inlet1', 'inlet2', 'out1', 'outlet', 'solid_up:1', 'solid_up:1:830', 'solid_up:1:830-shadow'] >>> field_data.get_surface_data.surface_ids.allowed_values() - [0, 1, 2, 3, 4, 5] + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + +Field data streaming +-------------------- + +PyFluent's field data streaming service allows you to dynamically observe changes +in field data by tracking its values in real time. You can integrate PyFluent's +field data streaming callback mechanism with visualization +tools from the Python ecosystem, making it easy to visualize the data of interest. + +.. note:: + In **Meshing mode**, only 'field_data_streaming' provides a valid interface as of now. + Other methods currently return an empty array when used in Meshing mode. + + The 'field_data_streaming' is available only for the **Meshing mode**. + +The following example demonstrates how to update mesh data in **Meshing mode** +using the field data streaming mechanism: + +.. code-block:: python + + >>> import ansys.fluent.core as pyfluent + >>> from ansys.fluent.core import examples + + >>> # Download example geometry file + >>> import_file_name = examples.download_file( + >>> "mixing_elbow.pmdb", "pyfluent/mixing_elbow" + >>> ) + + >>> # Launch Fluent in Meshing mode + >>> meshing = pyfluent.launch_fluent(mode="meshing") + + >>> # Dictionary to store mesh data + >>> mesh_data = {} + + >>> # Define a callback function to process streamed field data + >>> def plot_mesh(index, field_name, data): + >>> if data is not None: + >>> if index in mesh_data: + >>> mesh_data[index].update({field_name: data}) + >>> else: + >>> mesh_data[index] = {field_name: data} + + >>> # Register the callback function + >>> meshing.fields.field_data_streaming.register_callback(plot_mesh) + + >>> # Start field data streaming with byte stream and chunk size + >>> meshing.fields.field_data_streaming.start(provideBytesStream=True, chunkSize=1024) + + >>> # Initialize the Meshing workflow + >>> meshing.workflow.InitializeWorkflow(WorkflowType="Watertight Geometry") + + >>> # Import the geometry into the workflow + >>> meshing.workflow.TaskObject["Import Geometry"].Arguments = { + >>> "FileName": import_file_name, + >>> "LengthUnit": "in", + >>> } + + >>> meshing.workflow.TaskObject["Import Geometry"].Execute() diff --git a/doc/source/user_guide/fields/reduction.rst b/doc/source/user_guide/fields/reduction.rst index 606c7fb3531..e2aaa974667 100644 --- a/doc/source/user_guide/fields/reduction.rst +++ b/doc/source/user_guide/fields/reduction.rst @@ -3,107 +3,109 @@ Reduction ========= -You can use reduction functions on Fluent data from one -or across multiple remote Fluent sessions. +You can use reduction functions on Fluent data from one or across multiple remote Fluent sessions. +PyFluent provides both **functional** and **object-oriented** approaches to applying reduction functions. +While both are supported, the **functional approach** is emphasized for its flexibility, +particularly when working with multiple solver sessions. -Accessing reduction functions ------------------------------ +Introduction to Reduction Functions +----------------------------------- -In order to access reduction function, import it and launch the Fluent solver. -Then, make boundary conditions data, etc. available (for example, by reading case files): +Reduction functions perform operations like computing averages, integrals, and sums over specified data locations, +such as areas or volumes. + +To demonstrate the following examples, first initialize two separate solver sessions +with two separate examples case files as follows: .. code-block:: python - >>> import ansys.fluent.core as pyfluent - >>> from ansys.fluent.core.examples import download_file - >>> solver = pyfluent.launch_fluent(mode=pyfluent.FluentMode.SOLVER) - >>> case_path = download_file("Static_Mixer_main.cas.h5", "pyfluent/static_mixer") - >>> solver.settings.file.read(file_type="case", file_name=case_path) + >>> import ansys.fluent.core as pyfluent + >>> from ansys.fluent.core.solver.function import reduction + >>> from ansys.fluent.core.examples import download_file + >>> solver1 = pyfluent.launch_fluent(mode=pyfluent.FluentMode.SOLVER) + >>> case_path = download_file(file_name="exhaust_system.cas.h5", directory="pyfluent/exhaust_system") + >>> data_path = download_file(file_name="exhaust_system.dat.h5", directory="pyfluent/exhaust_system") + >>> solver1.settings.file.read_case_data(file_name=case_path) -Simple usage ------------- + >>> solver2 = pyfluent.launch_fluent(mode=pyfluent.FluentMode.SOLVER) + >>> case_path = download_file("elbow1.cas.h5", "pyfluent/file_session") + >>> data_path = download_file("elbow1.dat.h5", "pyfluent/file_session") + >>> solver2.settings.file.read_case_data(file_name=case_path) -You can use the reduction functions from PyFluent simply by initializing the solution -and accessing the select functions with the required parameters. + >>> solver = solver1 -For example, in the below case, do hybrid initialization of the solution and perform -an area-average of absolute pressure over the velocity inlet. -.. code-block:: python +Functional Usage +---------------- - >>> solver.settings.solution.initialization.hybrid_initialize() - >>> solver.fields.reduction.area_average( - >>> expression="AbsolutePressure", - >>> locations=solver.settings.setup.boundary_conditions.velocity_inlet, - >>> ) - 101325.0000000001 +The **functional approach** is preferred for its: -Similarly one can use the other functions available currently with PyFluent. +1. **Conciseness**: Avoids deeply nested paths in code. +2. **Flexibility**: Supports reductions over multiple solver sessions or complex data sources. -.. note:: - The fluxes are evaluated on boundaries and face zones. So, for 'volume', 'mass_flow', - 'mass_average' and 'mass_integrated_average' the chosen location cannot be a - user-defined surface. +Reduction functions can be accessed directly via the `reduction` module. +Here's how to set up a simple example: -Usage of context ----------------- +.. code-block:: python -You can also use the context argument available with all the reduction functions -to mention the context instead of listing down the entire path of the locations, -and the path to the location is identified automatically. + >>> from ansys.fluent.core import VelocityInlets + >>> # Compute the minimum of absolute pressure across multiple solvers + >>> reduction.minimum( + ... expression="AbsolutePressure", + ... locations=[VelocityInlets(settings_source=solver) for solver in [solver1, solver2]], + ... ) + 101343.2241809384 -For example, to calculate area of a location one has to do: + +Object-Oriented Usage +--------------------- +The **object-oriented approach** leverages solver instance attributes +like `solver.fields.reduction` to perform reductions. While this approach +is intuitive for single-solver scenarios, it may be less suited to multi-solver or functional-style workflows. + +To use reduction functions within a specific solver instance, initialize the solver and access the functions via `solver.fields.reduction`: .. code-block:: python - >>> solver.settings.solution.initialization.hybrid_initialize() - >>> solver.fields.reduction.area( - >>> locations=[solver.settings.setup.boundary_conditions.velocity_inlet["inlet1"]] - >>> ) - 7.565427133371293e-07 + >>> solver.fields.reduction.area_average( + ... expression="AbsolutePressure", + ... locations=solver.settings.setup.boundary_conditions.velocity_inlet, + ... ) + 101957.2452989816 -Instead, one can use the context argument: +For convenience, context-aware reductions are also supported: .. code-block:: python - >>> solver.settings.solution.initialization.hybrid_initialize() - >>> solver.fields.reduction.area(locations=["inlet1"], ctxt=solver) - 7.565427133371293e-07 + >>> solver.fields.reduction.area(locations=["inlet1"]) + 0.002555675491754098 + + >>> reduction.area(locations=["inlet1"], ctxt=solver) + 0.002555675491754098 -Current capabilities --------------------- -At present, PyFluent allows the usage of the following reduction functions: +Reduction Functions: Capabilities +---------------------------------- -Area -~~~~ -Compute the total area of the specified locations. +The following reduction functions are available in PyFluent: +- **Area**: Compute the total area. .. code-block:: python >>> reduction.area(locations) -Area average -~~~~~~~~~~~~ -Compute the area averaged value of the specified expression over the specified locations. - +- **Area Average**: Compute the area-averaged value of an expression. .. code-block:: python >>> reduction.area_average(expression, locations) -Area integral -~~~~~~~~~~~~~ -Compute the area integrated averaged of the specified expression over the specified locations. - +- **Area Integral**: Compute the integrated area of an expression. .. code-block:: python >>> reduction.area_integral(expression, locations) -Volume -~~~~~~ -Compute the total volume of the specified locations. - +- **Volume**: Compute the total volume. .. code-block:: python >>> reduction.volume(locations) @@ -111,99 +113,62 @@ Compute the total volume of the specified locations. .. note:: Only boundaries and face zones are allowed locations. It cannot be a user-defined surface. -Volume average -~~~~~~~~~~~~~~ -Compute the volume averaged value of the specified expression over the specified locations. - +- **Volume Average**: Compute the volume-averaged value of an expression. .. code-block:: python >>> reduction.volume_average(expression, locations) -Volume integral -~~~~~~~~~~~~~~~ -Compute the volume integrated averaged of the specified expression over the specified locations. - +- **Volume Integral**: Compute the integrated volume of an expression. .. code-block:: python >>> reduction.volume_integral(expression, locations) -Centroid -~~~~~~~~ -Compute the geometric centroid of the specified locations as a vector. - +- **Centroid**: Compute the geometric centroid. .. code-block:: python >>> reduction.centroid(locations) -Force -~~~~~ -Compute the force acting on the locations specified (should be walls) as a vector. - +- **Force**: Compute the total force vector on specified walls. .. code-block:: python >>> reduction.force(locations) -Pressure force -~~~~~~~~~~~~~~ -Compute the pressure force acting on the locations specified (should be walls) as a vector. - +- **Pressure Force**: Compute the pressure force vector on specified walls. .. code-block:: python >>> reduction.pressure_force(locations) -Viscous force -~~~~~~~~~~~~~ -Compute the viscous force acting on the locations specified (should be walls) as a vector. - +- **Viscous Force**: Compute the viscous force vector on specified walls. .. code-block:: python >>> reduction.viscous_force(locations) -Moment -~~~~~~ -Compute the moment vector about the specified point (which can be single-valued expression) -for the specified locations. - +- **Moment**: Compute the moment vector about the specified point (which can be single-valued expression). .. code-block:: python >>> reduction.moment(expression, locations) -Count -~~~~~ -Compute the total number of cells included in the specified locations. - +- **Count**: Compute the total number of cells in specified locations. .. code-block:: python >>> reduction.count(locations) -Count if -~~~~~~~~ -Compute the total number of cells included in the specified locations if a condition is satisfied. - +- **Count if**: Compute the conditional count. .. code-block:: python >>> reduction.count_if(condition, locations) -Minimum -~~~~~~~ -Compute the minimum of the specified expression over the specified locations. - +- **Minimum**: Compute the minimum value of an expression. .. code-block:: python >>> reduction.minimum(expression, locations) -Maximum -~~~~~~~ -Compute the maximum of the specified expression over the specified locations. - +- **Maximum**: Compute the maximum value of an expression. .. code-block:: python >>> reduction.maximum(expression, locations) -Mass average -~~~~~~~~~~~~ -Compute the mass-weighted average value of the specified expression over the specified locations. - +- **Mass average**: Compute the mass-weighted average of an expression. .. code-block:: python >>> reduction.mass_average(expression, locations) @@ -211,10 +176,7 @@ Compute the mass-weighted average value of the specified expression over the spe .. note:: Only boundaries and face zones are allowed locations. It cannot be a user-defined surface. -Mass integral -~~~~~~~~~~~~~ -Compute the total mass-weighted value of the specified expression over the specified locations. - +- **Mass integral**: Compute the integrated mass-weighted value of an expression. .. code-block:: python >>> reduction.mass_integral(expression, locations) @@ -222,138 +184,130 @@ Compute the total mass-weighted value of the specified expression over the speci .. note:: Only boundaries and face zones are allowed locations. It cannot be a user-defined surface. -Mass flow average absolute -~~~~~~~~~~~~~~~~~~~~~~~~~~ -Compute the mass-flow-weighted absolute average value of the specified expression over the specified locations. - +- **Mass flow average absolute**: Compute the mass-flow-weighted absolute average of an expression. .. code-block:: python >>> reduction.mass_flow_average_absolute(expression, locations) - -Mass flow average -~~~~~~~~~~~~~~~~~ -Compute the mass-flow-weighted average value of the specified expression over the specified locations. - +- **Mass flow average**: Compute the mass-flow-weighted average of an expression. .. code-block:: python >>> reduction.mass_flow_average(expression, locations) -Mass flow integral -~~~~~~~~~~~~~~~~~~ -Compute the total mass-flow-weighted value of the specified expression over the specified locations. - +- **Mass flow integral**: Compute the integrated mass-flow-weighted value of an expression. .. code-block:: python >>> reduction.mass_flow_integral(expression, locations) -Sum -~~~ -Compute the sum of the specified expression over the specified locations. - +- **Sum**: Compute the sum of an expression over locations. .. code-block:: python >>> reduction.sum(expression, locations, weight) -Sum if -~~~~~~ -Compute the sum of the specified expression over the specified locations if a condition is satisfied. - +- **Sum If**: Compute the conditional sum of an expression. .. code-block:: python >>> reduction.sum_if(expression, condition, locations, weight) -Example use cases ------------------ -You can either calculate the area of one inlet or the combine area of all -the velocity inlets with the below examples: +.. note:: + The fluxes are evaluated on boundaries and face zones. So, for 'volume', 'mass_flow', + 'mass_average' and 'mass_integrated_average' the chosen location cannot be a + user-defined surface. -.. code-block:: python +Each function supports both the functional and object-oriented formats. See the following examples for typical use cases. - >>> area_inlet_1 = solver.fields.reduction.area( - >>> locations=[solver.settings.setup.boundary_conditions.velocity_inlet["inlet1"]], - >>> ) - 7.565427133371293e-07 +Examples +-------- - >>> area_inlet = solver.fields.reduction.area( - >>> locations=[solver.settings.setup.boundary_conditions.velocity_inlet], - >>> ) - 1.513085401926681e-06 +**Example: Area Average** + +Functional: + +.. code-block:: python + + >>> reduction.area_average( + ... expression="AbsolutePressure", + ... locations=solver.setup.boundary_conditions.velocity_inlet, + ... ) + 101957.2452989816 -You can calculate the area average of "Absolute Pressure" over the entire set of velocity -inlets as shown: +Object-Oriented: .. code-block:: python >>> solver.fields.reduction.area_average( - >>> expression="AbsolutePressure", - >>> locations=solver.settings.setup.boundary_conditions.velocity_inlet, - >>> ) - 101325.0000000001 + ... expression="AbsolutePressure", + ... locations=solver.settings.setup.boundary_conditions.velocity_inlet, + ... ) + 101957.2452989816 -You can calculate the area integrated average of "Absolute Pressure" over the velocity inlet 1 -as shown: +**Example: Minimum Across Multiple Solvers** .. code-block:: python - >>> solver.fields.reduction.area_integral( - >>> expression="AbsolutePressure", - >>> locations=[solver.settings.setup.boundary_conditions.velocity_inlet["inlet1"]], - >>> ) - 0.07665669042888468 + >>> reduction.minimum( + ... expression="AbsolutePressure", + ... locations=[ + ... solver1.setup.boundary_conditions.pressure_outlet, + ... solver2.setup.boundary_conditions.pressure_outlet, + ... ], + ... ) + 101325.0 -You can calculate the geometric centroid of the velocity inlet 2 as shown: +**Example: Using Boundary Abstractions** .. code-block:: python - >>> solver.fields.reduction.centroid( - >>> locations=[solver.settings.setup.boundary_conditions.velocity_inlet["inlet2"]] - >>> ) - x: -0.001000006193379666 - y: -0.002999999999999999 - z: 0.001500047988232209 + >>> reduction.minimum( + ... expression="AbsolutePressure", + ... locations=[ + ... VelocityInlets(settings_source=solver) for solver in [solver1, solver2] + ... ], + ... ) + 101343.2241809384 -You can calculate the moment vector about a single-valued expression -for the specified locations as shown: +**Example: Geometric centroid of the velocity inlet 2** .. code-block:: python - >>> solver.fields.reduction.moment( - >>> expression="Force(['wall'])", + >>> cent = reduction.centroid( >>> locations=[solver.settings.setup.boundary_conditions.velocity_inlet["inlet2"]] >>> ) - [ 1.15005117e-24, 1.15218653e-24, -6.60723735e-20] + >>> cent.array + array([-2.85751176e-02, -7.92555538e-20, -4.41951790e-02]) -You can calculate the moment vector about the specified point for the -specified locations as shown: +**Example: Geometric centroid of the velocity inlets over multiple solvers** .. code-block:: python - >>> solver.fields.reduction.moment( - >>> expression="['inlet1']", - >>> locations=[solver.settings.setup.boundary_conditions.velocity_inlet["inlet2"]] + >>> cent = reduction.centroid( + >>> locations=[VelocityInlets(settings_source=solver) for solver in [solver1, solver2]] >>> ) - [ 1.15005117e-24, 1.15218653e-24, -6.60723735e-20] + >>> cent.array + array([-0.35755706, -0.15706201, -0.02360788]) + -One can calculate sum of Absolute Pressure over all nodes of velocity inlet with area as weight. +**Example: Sum with area as weight** .. code-block:: python - >>> solver.fields.reduction.sum( + >>> reduction.sum( >>> expression="AbsolutePressure", >>> locations=[solver.settings.setup.boundary_conditions.velocity_inlet], >>> weight="Area" >>> ) - 20670300.0 + 80349034.56621933 -You can also calculate the sum with a condition: +**Example: Conditional sum** .. code-block:: python - >>> solver.fields.reduction.sum_if( + >>> reduction.sum_if( >>> expression="AbsolutePressure", >>> condition="AbsolutePressure > 0[Pa]", >>> locations=[solver.settings.setup.boundary_conditions.velocity_inlet], >>> weight="Area" >>> ) - 20670300.0 + 80349034.56621933 + +.. note:: Boundary abstractions such as `PressureOutlets` and `VelocityInlets` simplify workflows by removing the need to specify complex paths. diff --git a/doc/source/user_guide/index.rst b/doc/source/user_guide/index.rst index 92d62c6c85e..64fd8dc4f6a 100644 --- a/doc/source/user_guide/index.rst +++ b/doc/source/user_guide/index.rst @@ -47,7 +47,7 @@ A simple example >>> setup.cell_zone_conditions.set_zone_type(zone_list="elbow-fluid", new_type="fluid") >>> solution.initialization.hybrid_initialize() >>> solution.run_calculation.iterate(iter_count=100) - >>> velocity_data = solver.fields.field_data.get_vector_field_data(field_name="velocity", surface_name="cold-inlet") + >>> velocity_data = solver.fields.field_data.get_vector_field_data(field_name="velocity", surfaces=["cold-inlet"]) Key features diff --git a/doc/source/user_guide/offline/file_session.rst b/doc/source/user_guide/offline/file_session.rst index 6e0f7b60282..d9bed1612b6 100644 --- a/doc/source/user_guide/offline/file_session.rst +++ b/doc/source/user_guide/offline/file_session.rst @@ -86,17 +86,17 @@ Single-phase 'vertices': array([ 0. , -0.1016 , 0. , ..., 0.06435075, -0.08779959, 0. ])}} >>> from ansys.fluent.core.services.field_data import SurfaceDataType - >>> file_session.fields.field_data.get_surface_data(SurfaceDataType.Vertices, [3, 4])[3].size - 3810 - >>> file_session.fields.field_data.get_surface_data(SurfaceDataType.Vertices, [3, 4])[3][1500].x + >>> file_session.fields.field_data.get_surface_data([SurfaceDataType.Vertices], [3, 4])[3].shape + (3810, 3) + >>> file_session.fields.field_data.get_surface_data(data_types=[SurfaceDataType.Vertices], surfaces=[3, 4])[3][1500][0] 0.12405861914157867 - >>> file_session.fields.field_data.get_scalar_field_data("SV_T", surface_name="wall").size - 3630 - >>> file_session.fields.field_data.get_scalar_field_data("SV_T", surface_name="wall")[1500].scalar_data + >>> file_session.fields.field_data.get_scalar_field_data("SV_T", surfaces=["wall"])["wall"].shape + (3630,) + >>> file_session.fields.field_data.get_scalar_field_data("SV_T", surfaces=["wall"])["wall"][1500] 293.18071329432047 - >>> file_session.fields.field_data.get_vector_field_data("velocity", surface_name="symmetry").size - 2018 - >>> file_session.fields.field_data.get_vector_field_data("velocity", surface_name="symmetry")[1000].x + >>> file_session.fields.field_data.get_vector_field_data("velocity", surfaces=["symmetry"])["symmetry"].shape + (2018, 3) + >>> file_session.fields.field_data.get_vector_field_data("velocity", surfaces=["symmetry"])["symmetry"][1000][0] 0.001690600193527586 @@ -147,17 +147,17 @@ Multiphase 'vector-field'),): {30: {'phase-1:velocity': array([0., ..... 0.]), 'vector-scale': array([0.1])}}} >>> from ansys.fluent.core.services.field_data import SurfaceDataType - >>> file_session.fields.field_data.get_surface_data(SurfaceDataType.Vertices, [30])[30].size - 79 - >>> ffile_session.fields.field_data.get_surface_data(SurfaceDataType.Vertices, [30])[30][50].x + >>> file_session.fields.field_data.get_surface_data([SurfaceDataType.Vertices], [30])[30].shape + (79, 3) + >>> file_session.fields.field_data.get_surface_data([SurfaceDataType.Vertices], [30])[30][50][0] 0.14896461503555408 - >>> file_session.fields.field_data.get_scalar_field_data("phase-1:SV_P", surface_name="wall-elbow").size - 2168 - >>> file_session.fields.field_data.get_scalar_field_data("phase-1:SV_P", surface_name="wall-elbow")[1100].scalar_data + >>> file_session.fields.field_data.get_scalar_field_data("phase-1:SV_P", surfaces=["wall-elbow"])["wall-elbow"].shape + (2168,) + >>> file_session.fields.field_data.get_scalar_field_data("phase-1:SV_P", surfaces=["wall-elbow"])["wall-elbow"][1100] 1.4444035696104466e-11 - >>> file_session.fields.field_data.get_vector_field_data("phase-2:velocity", surface_name="wall-elbow").size - 2168 - >>> file_session.fields.field_data.get_vector_field_data("phase-2:velocity", surface_name="wall-elbow")[1000].x + >>> file_session.fields.field_data.get_vector_field_data("phase-2:velocity", surfaces=["wall-elbow"])["wall-elbow"].shape + (2168, 3) + >>> file_session.fields.field_data.get_vector_field_data("phase-2:velocity", surfaces=["wall-elbow"])["wall-elbow"][1000][0] 0.0 diff --git a/doc/source/user_guide/session/session.rst b/doc/source/user_guide/session/session.rst index d9599d52a49..e34c45a8522 100644 --- a/doc/source/user_guide/session/session.rst +++ b/doc/source/user_guide/session/session.rst @@ -119,7 +119,7 @@ that of the ``settings`` objects: 'aspect-ratio'] >>> add_scalar_fields.surface_names.allowed_values() ['cold-inlet', 'hot-inlet', 'outlet', 'symmetry-xyplane', 'wall-elbow', 'wall-inlet'] - >>> add_scalar_fields(field_name='absolute-pressure', surface_names=['cold-inlet', 'hot-inlet', 'outlet', 'symmetry-xyplane', 'wall-elbow', 'wall-inlet']) + >>> add_scalar_fields(field_name='absolute-pressure', surfaces=['cold-inlet', 'hot-inlet', 'outlet', 'symmetry-xyplane', 'wall-elbow', 'wall-inlet']) >>> pressure_fields = transaction.get_fields() >>> solver.fields.reduction.sum_if( >>> expression="AbsolutePressure", diff --git a/doc/source/user_guide/solver_settings/index.rst b/doc/source/user_guide/solver_settings/index.rst index 18ea6b1a9df..47f73783e19 100644 --- a/doc/source/user_guide/solver_settings/index.rst +++ b/doc/source/user_guide/solver_settings/index.rst @@ -100,7 +100,7 @@ as a dictionary for ``Group`` and ``NamedObject`` types or as a list for ``ListO .. code-block:: >>> import ansys.fluent.core as pyfluent - >>> viscous = pyfluent.Viscous(settings_source=solver) + >>> viscous = pyfluent.solver.Viscous(settings_source=solver) >>> viscous.model() 'k-epsilon-standard' @@ -108,7 +108,7 @@ as a dictionary for ``Group`` and ``NamedObject`` types or as a list for ``ListO .. code-block:: >>> import ansys.fluent.core as pyfluent - >>> energy = pyfluent.Energy(settings_source=solver) + >>> energy = pyfluent.solver.Energy(settings_source=solver) >>> from pprint import pprint >>> pprint (energy(), width=1) {'enabled': True, @@ -116,7 +116,7 @@ as a dictionary for ``Group`` and ``NamedObject`` types or as a list for ``ListO 'kinetic_energy': False, 'pressure_work': False, 'viscous_dissipation': False} - >>> inlet1 = pyfluent.VelocityInlet(settings_source=solver, name="inlet1") + >>> inlet1 = pyfluent.solver.VelocityInlet(settings_source=solver, name="inlet1") >>> inlet1.vmag.constant() 10.0 @@ -129,11 +129,11 @@ and ``NamedObject`` types, the state value is a dictionary. For the .. code-block:: >>> import ansys.fluent.core as pyfluent - >>> viscous = pyfluent.Viscous(settings_source=solver) + >>> viscous = pyfluent.solver.Viscous(settings_source=solver) >>> viscous.model = 'laminar' - >>> energy = pyfluent.Energy(settings_source=solver) + >>> energy = pyfluent.solver.Energy(settings_source=solver) >>> energy = { 'enabled' : False } - >>> inlet1 = pyfluent.VelocityInlet(settings_source=solver, name="inlet1") + >>> inlet1 = pyfluent.solver.VelocityInlet(settings_source=solver, name="inlet1") >>> inlet1.vmag.constant = 14 @@ -165,7 +165,7 @@ You can print the current state in a simple text format with the .. code-block:: >>> import ansys.fluent.core as pyfluent - >>> models = pyfluent.Models(settings_source=solver) + >>> models = pyfluent.solver.Models(settings_source=solver) >>> models.print_state() @@ -219,7 +219,7 @@ for that object or returns ``None`` otherwise. .. code-block:: >>> import ansys.fluent.core as pyfluent - >>> viscous = pyfluent.Viscous(settings_source=solver) + >>> viscous = pyfluent.solver.Viscous(settings_source=solver) >>> viscous.model.allowed_values() ['inviscid', 'laminar', 'k-epsilon-standard', 'k-omega-standard', 'mixing-length', 'spalart-allmaras', 'k-kl-w', 'transition-sst', 'reynolds-stress', 'scale-adaptive-simulation', 'detached-eddy-simulation', 'large-eddy-simulation'] @@ -227,7 +227,7 @@ for that object or returns ``None`` otherwise. .. code-block:: >>> import ansys.fluent.core as pyfluent - >>> viscous = pyfluent.Viscous(settings_source=solver) + >>> viscous = pyfluent.solver.Viscous(settings_source=solver) >>> viscous.model.get_attr('allowed-values') ['inviscid', 'laminar', 'k-epsilon-standard', 'k-omega-standard', 'mixing-length', 'spalart-allmaras', 'k-kl-w', 'transition-sst', 'reynolds-stress', 'scale-adaptive-simulation', 'detached-eddy-simulation', 'large-eddy-simulation'] @@ -235,7 +235,7 @@ for that object or returns ``None`` otherwise. .. code-block:: >>> import ansys.fluent.core as pyfluent - >>> viscous = pyfluent.Viscous(settings_source=solver) + >>> viscous = pyfluent.solver.Viscous(settings_source=solver) >>> viscous.model.get_attrs(['allowed-values']) {'allowed-values': ['inviscid', 'laminar', 'k-epsilon', 'k-omega', 'mixing-length', 'spalart-allmaras', 'k-kl-w', 'transition-sst', 'reynolds-stress', 'scale-adaptive-simulation', 'detached-eddy-simulation', 'large-eddy-simulation']} @@ -244,28 +244,22 @@ These examples accesses the list of zone surfaces: .. code-block:: - >>> root.solution.report_definitions.flux["mass_flow_rate"] = {} - >>> root.solution.report_definitions.flux[ - "mass_flow_rate" - ].zone_names.allowed_values() + >>> solver.settings.solution.report_definitions.flux["mass_flow_rate"] = {} + >>> solver.settings.solution.report_definitions.flux["mass_flow_rate"].boundaries.allowed_values() ['symmetry-xyplane', 'hot-inlet', 'cold-inlet', 'outlet', 'wall-inlet', 'wall-elbow', 'interior--elbow-fluid'] .. code-block:: - >>> root.solution.report_definitions.flux["mass_flow_rate"] = {} - >>> root.solution.report_definitions.flux[ - "mass_flow_rate" - ].zone_names.get_attr("allowed-values") + >>> solver.settings.solution.report_definitions.flux["mass_flow_rate"] = {} + >>> solver.settings.solution.report_definitions.flux["mass_flow_rate"].boundaries.get_attr("allowed-values") ['symmetry-xyplane', 'hot-inlet', 'cold-inlet', 'outlet', 'wall-inlet', 'wall-elbow', 'interior--elbow-fluid'] .. code-block:: - >>> root.solution.report_definitions.flux["mass_flow_rate"] = {} - >>> root.solution.report_definitions.flux[ - "mass_flow_rate" - ].zone_names.get_attrs(["allowed-values"]) + >>> solver.settings.solution.report_definitions.flux["mass_flow_rate"] = {} + >>> solver.settings.solution.report_definitions.flux["mass_flow_rate"].boundaries.get_attrs(["allowed-values"]) {'allowed-values': ['symmetry-xyplane', 'hot-inlet', 'cold-inlet', 'outlet', 'wall-inlet', 'wall-elbow', 'interior--elbow-fluid']} @@ -303,7 +297,7 @@ in a single solver session: >>> solver.settings.file.read(file_type="case", file_name=import_file_name) Fast-loading... ...Done - >>> viscous = pyfluent.Viscous(settings_source=solver) + >>> viscous = pyfluent.solver.Viscous(settings_source=solver) >>> viscous.is_active() True >>> viscous.model.is_read_only() @@ -322,7 +316,7 @@ in a single solver session: 'scale-adaptive-simulation', 'detached-eddy-simulation', 'large-eddy-simulation'] - >>> cold_inlet = pyfluent.VelocityInlet(settings_source=solver, name="cold-inlet") + >>> cold_inlet = pyfluent.solver.VelocityInlet(settings_source=solver, name="cold-inlet") >>> cold_inlet.turb_intensity.min() 0 >>> cold_inlet.turb_intensity.max() @@ -339,7 +333,7 @@ The ``get_active_child_names()`` method returns a list of active children:: >>> import ansys.fluent.core as pyfluent - >>> models = pyfluent.Models(settings_source=solver) + >>> models = pyfluent.solver.Models(settings_source=solver) >>> models.get_active_child_names() ['energy', 'multiphase', 'viscous'] @@ -355,14 +349,14 @@ You can use wildcards when using named objects, list objects, and string list se For named objects and list objects, for instance:: >>> import ansys.fluent.core as pyfluent - >>> fluid = pyfluent.FluidCellZone(settings_source=solver, name="*") + >>> fluid = pyfluent.solver.FluidCellZone(settings_source=solver, name="*") >>> fluid.source_terms["*mom*"]() {'fluid': {'source_terms': {'x-momentum': [], 'y-momentum': [], 'z-momentum': []}}} Also, when you have one or more velocity inlets with "inlet" in their names:: >>> import ansys.fluent.core as pyfluent - >>> inlet = pyfluent.VelocityInlet(settings_source=solver, name="*inlet*") + >>> inlet = pyfluent.solver.VelocityInlet(settings_source=solver, name="*inlet*") >>> inlet.vmag() {'velo-inlet_2': {'vmag': {'option': 'value', 'value': 50}}, 'velo-inlet_1': {'vmag': {'option': 'value', 'value': 35}} diff --git a/doc/source/user_guide/solver_settings/solution.rst b/doc/source/user_guide/solver_settings/solution.rst index 1b7a32b8a41..60d99daf19a 100644 --- a/doc/source/user_guide/solver_settings/solution.rst +++ b/doc/source/user_guide/solver_settings/solution.rst @@ -9,7 +9,8 @@ Steady or transient solution model .. code:: python - >>> solver_time = solver.settings.setup.general.solver.time + >>> setup = pyfluent.Setup(settings_source=solver) + >>> solver_time = setup.general.solver.time >>> solver_time.get_state() 'steady' >>> solver_time.allowed_values() @@ -22,7 +23,8 @@ Pressure-based or density-based solver .. code:: python - >>> solver_type = solver.settings.setup.general.solver.type + >>> setup = pyfluent.Setup(settings_source=solver) + >>> solver_type = setup.general.solver.type >>> solver_type.get_state() 'pressure-based' >>> solver_type.allowed_values() @@ -37,7 +39,7 @@ Velocity coupling scheme and gradient options .. code:: python - >>> methods = solver.settings.solution.methods + >>> methods = pyfluent.Methods(settings_source=solver) >>> flow_scheme = methods.p_v_coupling.flow_scheme >>> flow_scheme.allowed_values() ['SIMPLE', 'SIMPLEC', 'PISO', 'Coupled'] @@ -53,7 +55,8 @@ Solution controls .. code:: python - >>> p_v_controls = solver.settings.solution.controls.p_v_controls + >>> controls = pyfluent.Controls(settings_source=solver) + >>> p_v_controls = controls.p_v_controls >>> explicit_momentum_under_relaxation = p_v_controls.explicit_momentum_under_relaxation >>> explicit_momentum_under_relaxation.min() 0 @@ -72,7 +75,8 @@ Create a report definition .. code:: python - >>> surface_report_definitions = solver.settings.solution.report_definitions.surface + >>> rep_defs = pyfluent.ReportDefinitions(settings_source=solver) + >>> surface_report_definitions = rep_defs.surface >>> defn_name = "outlet-temp-avg" >>> surface_report_definitions[defn_name] = {} >>> outlet_temperature = surface_report_definitions[defn_name] diff --git a/examples/00-fluent/brake.py b/examples/00-fluent/brake.py index 7d9dd28295e..5a1ba0b7c47 100644 --- a/examples/00-fluent/brake.py +++ b/examples/00-fluent/brake.py @@ -211,7 +211,6 @@ "filled": True, } -session.settings.results.graphics.contour["temperature"] = {} session.settings.results.graphics.contour["temperature"] = { "field": "temperature", "surfaces_list": "wall*", @@ -233,19 +232,21 @@ "bground_color": "#CCD3E2", "title_elements": "Variable and Object Name", }, - "range_option": { - "option": "auto-range-off", - "auto_range_off": {"maximum": 400.0, "minimum": 300, "clip_to_range": False}, - }, } +session.settings.results.graphics.contour["temperature"].range_option.option = ( + "auto-range-off" +) +session.settings.results.graphics.contour["temperature"].range_option.set_state( + { + "auto_range_off": {"maximum": 400.0, "minimum": 300, "clip_to_range": False}, + } +) + session.settings.results.graphics.views.restore_view(view_name="top") session.settings.results.graphics.views.camera.zoom(factor=2) session.settings.results.graphics.views.save_view(view_name="animation-view") -session.settings.solution.calculation_activity.solution_animations[ - "animate-temperature" -] = {} session.settings.solution.calculation_activity.solution_animations[ "animate-temperature" ] = { diff --git a/examples/00-fluent/parametric_static_mixer_1.py b/examples/00-fluent/parametric_static_mixer_1.py index 85e976f422c..3193ae879b3 100644 --- a/examples/00-fluent/parametric_static_mixer_1.py +++ b/examples/00-fluent/parametric_static_mixer_1.py @@ -218,7 +218,7 @@ solver_session.settings.parametric_studies[ "Static_Mixer_main-Solve" -].design_points.create_1(write_data=False, capture_simulation_report_data=True) +].design_points.create(write_data=False, capture_simulation_report_data=True) solver_session.settings.parametric_studies["Static_Mixer_main-Solve"].design_points[ "DP1" @@ -319,7 +319,9 @@ precision="double", processor_count=2, mode="solver" ) -project_filepath_read = str(Path(pyfluent.EXAMPLES_PATH) / "static_mixer_study.flprj") +project_filepath_read = str( + Path(pyfluent.EXAMPLES_PATH) / "static_mixer_study_save.flprj" +) solver_session.settings.file.parametric_project.open( project_filename=project_filepath_read, load_case=True diff --git a/examples/00-fluent/radiation_headlamp.py b/examples/00-fluent/radiation_headlamp.py index c84ed078e5c..9f11211219d 100644 --- a/examples/00-fluent/radiation_headlamp.py +++ b/examples/00-fluent/radiation_headlamp.py @@ -366,7 +366,7 @@ bezel_enc_bc.thermal.material = "plastic" bezel_enc_bc.radiation.radiation_bc = "Opaque" bezel_enc_bc.radiation.internal_emissivity = 1 -bezel_enc_bc.radiation.diffuse_fraction_band = {"s-": 1} +bezel_enc_bc.radiation.diffuse_irradiation_settings.diffuse_fraction_band = {"s-": 1} # Get list of wall zones bc_state = solver.settings.setup.boundary_conditions.get_state() @@ -389,7 +389,7 @@ enc_lens_bc = solver.settings.setup.boundary_conditions.wall["enclosure-lens"] enc_lens_bc.thermal.material = "glass" enc_lens_bc.radiation.radiation_bc = "Semi Transparent" -enc_lens_bc.radiation.diffuse_fraction_band = {"s-": 0} +enc_lens_bc.radiation.diffuse_irradiation_settings.diffuse_fraction_band = {"s-": 0} # Copy enclosure-lens BC to other lens boundary solver.settings.setup.boundary_conditions.copy( @@ -407,7 +407,9 @@ enc_rim_bezel_bc.thermal.material = "plastic" enc_rim_bezel_bc.radiation.radiation_bc = "Opaque" enc_rim_bezel_bc.radiation.internal_emissivity = 0.16 -enc_rim_bezel_bc.radiation.diffuse_fraction_band = {"s-": 0.1} +enc_rim_bezel_bc.radiation.diffuse_irradiation_settings.diffuse_fraction_band = { + "s-": 0.1 +} # Copy enclosure-rim-bezel BC to other rim bezel boundaries solver.settings.setup.boundary_conditions.copy( @@ -440,8 +442,12 @@ rad_input_bc.thermal.thermal_condition = "Temperature" rad_input_bc.thermal.temperature.value = 298.15 rad_input_bc.radiation.boundary_source = True -rad_input_bc.radiation.direct_irradiation = {"s-": 1200} -rad_input_bc.radiation.reference_direction = [-0.848, 0, -0.53] +rad_input_bc.radiation.direct_irradiation_settings.direct_irradiation = {"s-": 1200} +rad_input_bc.radiation.direct_irradiation_settings.reference_direction = [ + -0.848, + 0, + -0.53, +] ########################################################################################################### # Set convergence criteria diff --git a/examples/00-fluent/species_transport.py b/examples/00-fluent/species_transport.py index c8ba88dbb34..5663282ccac 100644 --- a/examples/00-fluent/species_transport.py +++ b/examples/00-fluent/species_transport.py @@ -70,7 +70,8 @@ from pathlib import Path # noqa: E402 -from ansys.fluent.core import ( # noqa: E402 +from ansys.fluent.core.examples import download_file # noqa: E402 +from ansys.fluent.core.solver import ( # noqa: E402 Contour, Energy, Mesh, @@ -82,7 +83,6 @@ Viscous, WallBoundary, ) -from ansys.fluent.core.examples import download_file # noqa: E402 # %% # Mesh diff --git a/pyproject.toml b/pyproject.toml index e4cff4f9743..e31f5f0b487 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] # Check https://python-poetry.org/docs/pyproject/ for all available sections name = "ansys-fluent-core" -version = "0.27.dev1" +version = "0.28.dev0" description = "PyFluent provides Pythonic access to Ansys Fluent" license = "MIT" authors = ["ANSYS, Inc. "] @@ -40,10 +40,10 @@ packages = [ [tool.poetry.dependencies] python = ">=3.10,<4.0" -ansys-api-fluent = "^0.3.28" +ansys-api-fluent = "^0.3.30" ansys-platform-instancemanagement = "~=1.0" ansys-tools-filetransfer = ">=0.1,<0.3" -ansys-units = "^0.3.2" +ansys-units = "^0.3.3" docker = ">=7.1.0" grpcio = "^1.30.0" grpcio-health-checking = "^1.30.0" @@ -108,7 +108,6 @@ addopts = """ -v --durations=0 --show-capture=all --n 4 """ markers = [ "settings_only: Read and modify the case settings only, without loading the mesh, initializing, or solving the case", diff --git a/src/ansys/fluent/core/__init__.py b/src/ansys/fluent/core/__init__.py index 5ee31d5a345..c2050d8ff0b 100644 --- a/src/ansys/fluent/core/__init__.py +++ b/src/ansys/fluent/core/__init__.py @@ -31,11 +31,7 @@ from ansys.fluent.core.search import search # noqa: F401 from ansys.fluent.core.services.batch_ops import BatchOps # noqa: F401 from ansys.fluent.core.session import BaseSession as Fluent # noqa: F401 -from ansys.fluent.core.streaming_services.events_streaming import ( # noqa: F401 - Event, - MeshingEvent, - SolverEvent, -) +from ansys.fluent.core.streaming_services.events_streaming import * # noqa: F401, F403 from ansys.fluent.core.utils import fldoc, get_examples_download_dir from ansys.fluent.core.utils.fluent_version import FluentVersion # noqa: F401 from ansys.fluent.core.utils.setup_for_fluent import setup_for_fluent # noqa: F401 @@ -45,11 +41,6 @@ warning, ) -try: - from ansys.fluent.core.generated.solver.settings_builtin import * # noqa: F401, F403 -except (ImportError, AttributeError, SyntaxError): - pass - _VERSION_INFO = None """Global variable indicating the version of the PyFluent package - Empty by default""" @@ -107,11 +98,10 @@ def version_info() -> str: # Whether to use remote gRPC file transfer service USE_FILE_TRANSFER_SERVICE = False -# Directory where API files are writes out during codegen -CODEGEN_OUTDIR = (Path(__file__) / ".." / "generated").resolve() - -# Whether to zip settings API files during codegen -CODEGEN_ZIP_SETTINGS = os.getenv("PYFLUENT_CODEGEN_ZIP_SETTINGS", False) +# Directory where API files are written out during codegen +CODEGEN_OUTDIR = os.getenv( + "PYFLUENT_CODEGEN_OUTDIR", (Path(__file__) / ".." / "generated").resolve() +) # Whether to show mesh in Fluent after case read FLUENT_SHOW_MESH_AFTER_CASE_READ = False @@ -128,5 +118,25 @@ def version_info() -> str: # Whether to skip health check CHECK_HEALTH = True +# Whether to print search results +PRINT_SEARCH_RESULTS = True + # Whether to clear environment variables related to Fluent parallel mode CLEAR_FLUENT_PARA_ENVS = False + +# Set stdout of the launched Fluent process +# Valid values are same as subprocess.Popen's stdout argument +LAUNCH_FLUENT_STDOUT = None + +# Set stderr of the launched Fluent process +# Valid values are same as subprocess.Popen's stderr argument +LAUNCH_FLUENT_STDERR = None + +# Set the IP address of the Fluent server while launching Fluent +LAUNCH_FLUENT_IP = None + +# Set the port of the Fluent server while launching Fluent +LAUNCH_FLUENT_PORT = None + +# Skip password check during rpc execution when Fluent is launched from PyFluent +LAUNCH_FLUENT_SKIP_PASSWORD_CHECK = False diff --git a/src/ansys/fluent/core/_version.py b/src/ansys/fluent/core/_version.py index 0240e55ca02..7042815c134 100644 --- a/src/ansys/fluent/core/_version.py +++ b/src/ansys/fluent/core/_version.py @@ -6,7 +6,7 @@ """ # major, minor, patch -version_info = 0, 27, "dev1" +version_info = 0, 28, "dev0" # Nice string for the version __version__ = ".".join(map(str, version_info)) diff --git a/src/ansys/fluent/core/codegen/allapigen.py b/src/ansys/fluent/core/codegen/allapigen.py index cb26b444bf8..1c9f620845b 100644 --- a/src/ansys/fluent/core/codegen/allapigen.py +++ b/src/ansys/fluent/core/codegen/allapigen.py @@ -23,9 +23,6 @@ def generate(version: str, static_infos: dict): api_tree = {"": {}, "": {}} _update_first_level(api_tree, tuigen.generate(version, static_infos)) _update_first_level(api_tree, datamodelgen.generate(version, static_infos)) - if os.getenv("PYFLUENT_USE_OLD_SETTINGSGEN") == "1": - global settingsgen - from ansys.fluent.core.codegen import settingsgen_old as settingsgen _update_first_level(api_tree, settingsgen.generate(version, static_infos)) api_tree_file = get_api_tree_file_name(version) Path(api_tree_file).parent.mkdir(parents=True, exist_ok=True) diff --git a/src/ansys/fluent/core/codegen/builtin_settingsgen.py b/src/ansys/fluent/core/codegen/builtin_settingsgen.py index d7ca37a8841..acc32494f1e 100644 --- a/src/ansys/fluent/core/codegen/builtin_settingsgen.py +++ b/src/ansys/fluent/core/codegen/builtin_settingsgen.py @@ -1,8 +1,5 @@ """Generate builtin setting classes.""" -import os -from zipimport import zipimporter - from ansys.fluent.core import CODEGEN_OUTDIR, FluentVersion from ansys.fluent.core.solver.flobject import CreatableNamedObjectMixin, NamedObject from ansys.fluent.core.solver.settings_builtin_data import DATA @@ -12,24 +9,12 @@ def _get_settings_root(version: str): - from ansys.fluent.core import CODEGEN_OUTDIR, CODEGEN_ZIP_SETTINGS, utils + from ansys.fluent.core import CODEGEN_OUTDIR, utils - if os.getenv("PYFLUENT_USE_OLD_SETTINGSGEN") != "1": - settings = utils.load_module( - f"settings_{version}", - CODEGEN_OUTDIR / "solver" / f"settings_{version}.py", - ) - else: - if CODEGEN_ZIP_SETTINGS: - importer = zipimporter( - str(CODEGEN_OUTDIR / "solver" / f"settings_{version}.zip") - ) - settings = importer.load_module("settings") - else: - settings = utils.load_module( - f"settings_{version}", - CODEGEN_OUTDIR / "solver" / f"settings_{version}" / "__init__.py", - ) + settings = utils.load_module( + f"settings_{version}", + CODEGEN_OUTDIR / "solver" / f"settings_{version}.py", + ) return settings.root diff --git a/src/ansys/fluent/core/codegen/print_fluent_version.py b/src/ansys/fluent/core/codegen/print_fluent_version.py index 100762ca926..3e6b5aac7ae 100644 --- a/src/ansys/fluent/core/codegen/print_fluent_version.py +++ b/src/ansys/fluent/core/codegen/print_fluent_version.py @@ -1,26 +1,21 @@ """Module to write Fluent version information.""" -from ansys.fluent.core import CODEGEN_OUTDIR, launch_fluent -from ansys.fluent.core.utils.fluent_version import get_version_for_file_name +from ansys.fluent.core import CODEGEN_OUTDIR, FluentVersion, launch_fluent -def print_fluent_version(version: str, scheme_eval): +def print_fluent_version(app_utilities): """Write Fluent version information to file.""" + version = FluentVersion(app_utilities.get_product_version()).number + build_info = app_utilities.get_build_info() version_file = (CODEGEN_OUTDIR / f"fluent_version_{version}.py").resolve() with open(version_file, "w", encoding="utf8") as f: f.write(f'FLUENT_VERSION = "{version}"\n') - f.write(f'FLUENT_BUILD_TIME = "{scheme_eval("(inquire-build-time)")}"\n') - f.write(f'FLUENT_BUILD_ID = "{scheme_eval("(inquire-build-id)")}"\n') - f.write(f'FLUENT_REVISION = "{scheme_eval("(inquire-src-vcs-id)")}"\n') - f.write(f'FLUENT_BRANCH = "{scheme_eval("(inquire-src-vcs-branch)")}"\n') - - -def generate(version: str, scheme_eval): - """Write Fluent version information.""" - print_fluent_version(version, scheme_eval) + f.write(f'FLUENT_BUILD_TIME = "{build_info["build_time"]}"\n') + f.write(f'FLUENT_BUILD_ID = "{build_info["build_id"]}"\n') + f.write(f'FLUENT_REVISION = "{build_info["vcs_revision"]}"\n') + f.write(f'FLUENT_BRANCH = "{build_info["vcs_branch"]}"\n') if __name__ == "__main__": solver = launch_fluent() - version = get_version_for_file_name(session=solver) - generate(version, solver.scheme_eval.scheme_eval) + print_fluent_version(solver._app_utilities) diff --git a/src/ansys/fluent/core/codegen/settingsgen_old.py b/src/ansys/fluent/core/codegen/settingsgen_old.py deleted file mode 100644 index 333c4c8dc6b..00000000000 --- a/src/ansys/fluent/core/codegen/settingsgen_old.py +++ /dev/null @@ -1,535 +0,0 @@ -"""Provide a module to generate the Fluent settings tree. - -Running this module generates a python module with the definition of the Fluent -settings classes. The out is placed at: - -- src/ansys/fluent/core/solver/settings.py - -Running this module requires Fluent to be installed. - -Process -------- - - Launch fluent and get static info. Parse the class with flobject.get_cls() - - Generate a dictionary of unique classes with their hash as a key and a tuple of cls, children hash, commands hash, arguments hash, child object type hash as value. - - - This eliminates reduandancy and only unique classes are written. - - Generate .py files for the classes in hash dictionary. Resolve named conflicts with integer suffix. - - - Populate files dictionary with hash as key and file name as value. - - - child_object_type handled specially to avoid a lot of files with same name and to provide more insight of the child. - - Populate the classes. - - - For writing the import statements, get the hash of the child/command/argument/named object stored in the hash dict tuple value. - - - Use that hash to locate the corresponding children file name in the hash dict. - -Usage ------ -python -""" - -import hashlib -import io -import os -from pathlib import Path -import pickle -import pprint -import shutil - -import ansys.fluent.core as pyfluent -from ansys.fluent.core import launch_fluent -from ansys.fluent.core.codegen import StaticInfoType -from ansys.fluent.core.solver import flobject -from ansys.fluent.core.utils.fix_doc import fix_settings_doc -from ansys.fluent.core.utils.fluent_version import get_version_for_file_name - -hash_dict = {} -files_dict = {} -root_class_path = "" - - -def _gethash(obj_info): - dhash = hashlib.sha256() - dhash.update(pickle.dumps(obj_info)) - return dhash.hexdigest() - - -def _get_indent_str(indent): - return f"{' '*indent*4}" - - -def _populate_hash_dict(name, info, cls, api_tree): - children = info.get("children") - if children: - children_hash = [] - for cname, cinfo in children.items(): - for child in getattr(cls, "child_names", None): - child_cls = cls._child_classes[child] - if cname == child_cls.fluent_name: - api_tree[child] = {} - children_hash.append( - _populate_hash_dict(cname, cinfo, child_cls, api_tree[child]) - ) - okey = f"{child}:" - if okey in api_tree[child]: - api_tree[child].update(api_tree[child][okey]) - del api_tree[child][okey] - api_tree[okey] = api_tree.pop(child) - else: - api_tree[child] = api_tree[child] or "Parameter" - break - else: - children_hash = None - - commands = info.get("commands") - if commands: - commands_hash = [] - for cname, cinfo in commands.items(): - for command in getattr(cls, "command_names", None): - command_cls = cls._child_classes[command] - if cname == command_cls.fluent_name: - api_tree[command] = "Command" - commands_hash.append( - _populate_hash_dict(cname, cinfo, command_cls, {}) - ) - break - else: - commands_hash = None - - queries = info.get("queries") - if queries: - queries_hash = [] - for qname, qinfo in queries.items(): - for query in getattr(cls, "query_names", None): - query_cls = cls._child_classes[query] - if qname == query_cls.fluent_name: - api_tree[query] = "Query" - queries_hash.append( - _populate_hash_dict(qname, qinfo, query_cls, {}) - ) - break - else: - queries_hash = None - - arguments = info.get("arguments") - if arguments: - arguments_hash = [] - for aname, ainfo in arguments.items(): - for argument in getattr(cls, "argument_names", None): - argument_cls = cls._child_classes[argument] - if aname == argument_cls.fluent_name: - arguments_hash.append( - _populate_hash_dict(aname, ainfo, argument_cls, {}) - ) - break - else: - arguments_hash = None - - object_type = info.get("object-type") - if object_type: - key = f"{cls.__name__}:" - api_tree[key] = {} - object_hash = _populate_hash_dict( - "child-object-type", - object_type, - getattr(cls, "child_object_type", None), - api_tree[key], - ) - else: - object_hash = None - - cls_tuple = ( - name, - cls.__name__, - cls.__bases__, - info["type"], - info.get("help"), - children_hash, - commands_hash, - queries_hash, - arguments_hash, - object_hash, - ) - hash = _gethash(cls_tuple) - if not hash_dict.get(hash): - hash_dict[hash] = ( - cls, - children_hash, - commands_hash, - queries_hash, - arguments_hash, - object_hash, - ) - return hash - - -class _CommandInfo: - def __init__(self, doc, args_info): - self.doc = doc - self.args_info = args_info - - -_arg_type_strings = { - flobject.Boolean: "bool", - flobject.Integer: "int", - flobject.Real: "float | str", - flobject.String: "str", - flobject.Filename: "str", - flobject.BooleanList: "List[bool]", - flobject.IntegerList: "List[int]", - flobject.RealVector: "Tuple[float | str, float | str, float | str", - flobject.RealList: "List[float | str]", - flobject.StringList: "List[str]", - flobject.FilenameList: "List[str]", -} - - -def _get_commands_info(commands_hash): - commands_info = {} - for command_hash in commands_hash: - command_hash_info = hash_dict.get(command_hash) - command_cls = command_hash_info[0] - command_name = command_cls.__name__ - command_info = _CommandInfo(command_cls.__doc__, []) - if command_hash_info[4]: - for arg_hash in command_hash_info[4]: - arg_hash_info = hash_dict.get(arg_hash) - arg_cls = arg_hash_info[0] - arg_name = arg_cls.__name__ - arg_type = _arg_type_strings[arg_cls.__bases__[0]] - command_info.args_info.append(f"{arg_name}: {arg_type}") - commands_info[command_name] = command_info - return commands_info - - -def _write_doc_string(doc, indent, writer): - doc = ("\n" + indent).join(doc.split("\n")) - writer.write(f'{indent}"""\n') - writer.write(f"{indent}{doc}") - writer.write(f'\n{indent}"""\n\n') - - -def _populate_classes(parent_dir): - istr = _get_indent_str(0) - istr1 = _get_indent_str(1) - istr2 = _get_indent_str(2) - files = [] - # generate files - for key, ( - cls, - children_hash, - commands_hash, - queries_hash, - arguments_hash, - object_hash, - ) in hash_dict.items(): - cls_name = file_name = cls.__name__ - if cls_name == "child_object_type": - # Get the first parent for this class. - for ( - cls1, - children_hash1, - commands_hash1, - queries_hash1, - arguments_hash1, - object_hash1, - ) in hash_dict.values(): - if key == object_hash1: - cls.__name__ = file_name = cls1.__name__ + "_child" - break - i = 0 - while file_name in files: - if i > 0: - file_name = file_name[: file_name.rfind("_")] - i += 1 - file_name += "_" + str(i) - files.append(file_name) - files_dict[key] = file_name - - # Store root class path for __init__.py - if cls_name == "root": - global root_class_path - root_class_path = file_name - - file_name += ".py" - file_name = os.path.normpath(os.path.join(parent_dir, file_name)) - with open(file_name, "w") as f: - f.write(f"name: {cls_name}") - - # populate files - for key, ( - cls, - children_hash, - commands_hash, - queries_hash, - arguments_hash, - object_hash, - ) in hash_dict.items(): - file_name = files_dict.get(key) - cls_name = cls.__name__ - file_name = os.path.normpath(os.path.join(parent_dir, file_name + ".py")) - stub_f = None - if not pyfluent.CODEGEN_ZIP_SETTINGS: - stub_file_name = file_name + "i" - stub_f = open(stub_file_name, "w") - with open(file_name, "w") as f: - # disclaimer to py file - f.write("#\n") - f.write("# This is an auto-generated file. DO NOT EDIT!\n") - f.write("#\n") - f.write("\n") - if stub_f: - stub_f.write("#\n") - stub_f.write("# This is an auto-generated file. DO NOT EDIT!\n") - stub_f.write("#\n") - stub_f.write("\n\n") - - # write imports to py file - import_str = ( - "from ansys.fluent.core.solver.flobject import *\n\n" - "from ansys.fluent.core.solver.flobject import (\n" - f"{istr1}_ChildNamedObjectAccessorMixin,\n" - f"{istr1}CreatableNamedObjectMixin,\n" - f"{istr1}_NonCreatableNamedObjectMixin,\n" - f"{istr1}AllowedValuesMixin,\n" - f"{istr1}_InputFile,\n" - f"{istr1}_OutputFile,\n" - f"{istr1}_InOutFile,\n" - ")\n\n" - ) - f.write(import_str) - if stub_f: - stub_f.write(import_str) - stub_f.write("from typing import Union, List, Tuple\n\n") - - if children_hash: - for child in children_hash: - pchild_name = hash_dict.get(child)[0].__name__ - import_str = f"from .{files_dict.get(child)} import {pchild_name} as {pchild_name}_cls\n" - f.write(import_str) - if stub_f: - stub_f.write(import_str) - - if commands_hash: - for child in commands_hash: - pchild_name = hash_dict.get(child)[0].__name__ - import_str = f"from .{files_dict.get(child)} import {pchild_name} as {pchild_name}_cls\n" - f.write(import_str) - if stub_f: - stub_f.write(import_str) - - if queries_hash: - for child in queries_hash: - pchild_name = hash_dict.get(child)[0].__name__ - import_str = f"from .{files_dict.get(child)} import {pchild_name} as {pchild_name}_cls\n" - f.write(import_str) - if stub_f: - stub_f.write(import_str) - - if arguments_hash: - for child in arguments_hash: - pchild_name = hash_dict.get(child)[0].__name__ - import_str = f"from .{files_dict.get(child)} import {pchild_name} as {pchild_name}_cls\n" - f.write(import_str) - if stub_f: - stub_f.write(import_str) - - if object_hash: - pchild_name = hash_dict.get(object_hash)[0].__name__ - import_str = ( - f"from .{files_dict.get(object_hash)} import {pchild_name}\n\n" - ) - f.write(import_str) - if stub_f: - stub_f.write(import_str) - - # class name - class_def_str = ( - f"\n{istr}class {cls_name}" - f'({", ".join(f"{c.__name__}[{hash_dict.get(object_hash)[0].__name__}]" if object_hash else c.__name__ for c in cls.__bases__)}):\n' - ) - f.write(class_def_str) - if stub_f: - stub_f.write(class_def_str) - - doc = fix_settings_doc(cls.__doc__) - # Custom doc for child object type - if cls.fluent_name == "child-object-type": - parent_name = Path(file_name).stem[ - 0 : Path(file_name).stem.find("_child") - ] - doc = f"'child_object_type' of {parent_name}." - - _write_doc_string(doc, istr1, f) - f.write(f'{istr1}fluent_name = "{cls.fluent_name}"\n') - f.write(f'{istr1}version = "{cls.version}"\n\n') - if stub_f: - stub_f.write(f"{istr1}fluent_name = ...\n") - stub_f.write(f"{istr1}version = ...\n\n") - - child_class_strings = [] - - # write children objects - child_names = getattr(cls, "child_names", None) - if child_names: - f.write(f"{istr1}child_names = \\\n") - strout = io.StringIO() - pprint.pprint(child_names, stream=strout, compact=True, width=70) - mn = ("\n" + istr2).join(strout.getvalue().strip().split("\n")) - f.write(f"{istr2}{mn}\n\n") - if stub_f: - stub_f.write(f"{istr1}child_names = ...\n") - - for child in child_names: - child_cls = cls._child_classes[child] - child_class_strings.append(f"{child}={child_cls.__name__}_cls") - if stub_f: - stub_f.write( - f"{istr1}{child}: {child_cls.__name__}_cls = ...\n" - ) - - # write command objects - command_names = getattr(cls, "command_names", None) - if command_names: - f.write(f"{istr1}command_names = \\\n") - strout = io.StringIO() - pprint.pprint(command_names, stream=strout, compact=True, width=70) - mn = ("\n" + istr2).join(strout.getvalue().strip().split("\n")) - f.write(f"{istr2}{mn}\n\n") - if stub_f: - stub_f.write(f"{istr1}command_names = ...\n\n") - - commands_info = _get_commands_info(commands_hash) - for command in command_names: - command_cls = cls._child_classes[command] - child_class_strings.append(f"{command}={command_cls.__name__}_cls") - # function annotation for commands - command_info = commands_info[command] - if stub_f: - stub_f.write(f"{istr1}def {command}(self, ") - stub_f.write(", ".join(command_info.args_info)) - stub_f.write("):\n") - _write_doc_string(command_info.doc, istr2, stub_f) - - # write query objects - query_names = getattr(cls, "query_names", None) - if query_names: - f.write(f"{istr1}query_names = \\\n") - strout = io.StringIO() - pprint.pprint(query_names, stream=strout, compact=True, width=70) - mn = ("\n" + istr2).join(strout.getvalue().strip().split("\n")) - f.write(f"{istr2}{mn}\n\n") - if stub_f: - stub_f.write(f"{istr1}query_names = ...\n\n") - - queries_info = _get_commands_info(queries_hash) - for query in query_names: - query_cls = cls._child_classes[query] - child_class_strings.append(f"{query}={query_cls.__name__}_cls") - # function annotation for queries - query_info = queries_info[query] - if stub_f: - stub_f.write(f"{istr1}def {query}(self, ") - stub_f.write(", ".join(query_info.args_info)) - stub_f.write("):\n") - _write_doc_string(query_info.doc, istr2, stub_f) - - # write arguments - arguments = getattr(cls, "argument_names", None) - if arguments: - f.write(f"{istr1}argument_names = \\\n") - strout = io.StringIO() - pprint.pprint(arguments, stream=strout, compact=True, width=70) - mn = ("\n" + istr2).join(strout.getvalue().strip().split("\n")) - f.write(f"{istr2}{mn}\n\n") - if stub_f: - stub_f.write(f"{istr1}argument_names = ...\n") - - for argument in arguments: - argument_cls = cls._child_classes[argument] - child_class_strings.append( - f"{argument}={argument_cls.__name__}_cls" - ) - if stub_f: - stub_f.write( - f"{istr1}{argument}: {argument_cls.__name__}_cls = ...\n" - ) - - if child_class_strings: - f.write(f"{istr1}_child_classes = dict(\n") - f.writelines( - [f"{istr2}{cls_str},\n" for cls_str in child_class_strings] - ) - f.write(f"{istr1})\n\n") - - child_aliases = getattr(cls, "_child_aliases", None) - if child_aliases: - f.write(f"{istr1}_child_aliases = dict(\n") - f.writelines([f'{istr2}{k}="{v}",\n' for k, v in child_aliases.items()]) - f.write(f"{istr1})\n\n") - - # write object type - child_object_type = getattr(cls, "child_object_type", None) - if child_object_type: - f.write(f"{istr1}child_object_type: {pchild_name} = {pchild_name}\n") - f.write(f'{istr1}"""\n') - f.write(f"{istr1}child_object_type of {cls_name}.") - f.write(f'\n{istr1}"""\n') - if stub_f: - stub_f.write(f"{istr1}child_object_type: {pchild_name} = ...\n") - - return_type = getattr(cls, "return_type", None) - if return_type: - f.write(f'{istr1}return_type = "{return_type}"\n') - if stub_f: - stub_f.write(f"{istr1}return_type = ...\n") - if stub_f: - stub_f.close() - - -def _populate_init(parent_dir, hash): - file_name = os.path.normpath(os.path.join(parent_dir, "__init__.py")) - with open(file_name, "w") as f: - f.write("#\n") - f.write("# This is an auto-generated file. DO NOT EDIT!\n") - f.write("#\n") - f.write("\n") - f.write('"""A package providing Fluent\'s Settings Objects in Python."""') - f.write("\n") - f.write("from ansys.fluent.core.solver.flobject import *\n\n") - f.write(f'SHASH = "{hash}"\n') - f.write(f"from .{root_class_path} import root") - - -def generate(version, static_infos: dict): - """Generate settings API classes.""" - parent_dir = (pyfluent.CODEGEN_OUTDIR / "solver" / f"settings_{version}").resolve() - api_tree = {} - sinfo = static_infos.get(StaticInfoType.SETTINGS) - - # Clear previously generated data - if os.path.exists(parent_dir): - shutil.rmtree(parent_dir) - - if sinfo: - hash = _gethash(sinfo) - os.makedirs(parent_dir) - - if pyfluent.CODEGEN_ZIP_SETTINGS: - parent_dir = parent_dir / "settings" - os.makedirs(parent_dir) - - cls, _ = flobject.get_cls("", sinfo, version=version) - - _populate_hash_dict("", sinfo, cls, api_tree) - _populate_classes(parent_dir) - _populate_init(parent_dir, hash) - - if pyfluent.CODEGEN_ZIP_SETTINGS: - shutil.make_archive(parent_dir.parent, "zip", parent_dir.parent) - shutil.rmtree(parent_dir.parent) - - return {"": api_tree} - - -if __name__ == "__main__": - solver = launch_fluent() - version = get_version_for_file_name(session=solver) - static_infos = {StaticInfoType.SETTINGS: solver._settings_service.get_static_info()} - generate(version, static_infos) diff --git a/src/ansys/fluent/core/codegen/walk_api.py b/src/ansys/fluent/core/codegen/walk_api.py new file mode 100644 index 00000000000..c21983fb364 --- /dev/null +++ b/src/ansys/fluent/core/codegen/walk_api.py @@ -0,0 +1,57 @@ +"""Module containing tool for walking (generated) API class hierarchy. + +Example +------- + +.. code-block:: python + + >>> from ansys.fluent.core.codegen import walk_api + >>> from ansys.fluent.core.generated.solver import settings_252 + >>> walk_api.walk_api(settings_252.root, lambda p: print(p), current_path=[]) + +""" + +from typing import List + + +def walk_api(api_root_cls, on_each_path, current_path: str | List[str] = ""): + """ + Recursively traverse the API hierarchy, calling `on_each_path` for each item. + + Parameters: + - api_root_cls: The root class of the API hierarchy. + - on_each_path: A callback function to call for each path. + - current_path: The current path in the hierarchy (default: empty string). + Paths can be either dot-separated strings or string lists. The type is + determined by the client. + """ + # Skip the root path + if current_path: + on_each_path(current_path) + + # Get child names and their respective classes + all_names = [ + name + for attr in ("child_names", "argument_names", "command_names", "query_names") + for name in getattr(api_root_cls, attr, []) + ] + child_classes = getattr(api_root_cls, "_child_classes", {}) + + # Traverse each child + for child_name in all_names: + if child_name in child_classes: + child_cls = child_classes[child_name] + # Construct the new path + if isinstance(current_path, list): + new_path = current_path + [child_name] + else: + new_path = ( + f"{current_path}.{child_name}" if current_path else child_name + ) + # Recursively walk the child + walk_api(child_cls, on_each_path, new_path) + + # Delegate directly to any child_object_type (relevant for named objects) + child_object_type = getattr(api_root_cls, "child_object_type", None) + if child_object_type: + walk_api(child_cls, on_each_path, current_path) diff --git a/src/ansys/fluent/core/data_model_cache.py b/src/ansys/fluent/core/data_model_cache.py index 79980f1daf0..43c3b96a98d 100644 --- a/src/ansys/fluent/core/data_model_cache.py +++ b/src/ansys/fluent/core/data_model_cache.py @@ -5,7 +5,7 @@ import copy from enum import Enum from threading import RLock -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional from ansys.api.fluent.v0.variant_pb2 import Variant from ansys.fluent.core.utils.fluent_version import FluentVersion @@ -206,28 +206,34 @@ def _update_cache_from_variant_state( source: Dict[str, StateType], key: str, state: Variant, - updaterFn, + updater_fn, rules_str: str, version, ): - if state.HasField("bool_state"): - updaterFn(source, key, state.bool_state) - elif state.HasField("int64_state"): - updaterFn(source, key, state.int64_state) - elif state.HasField("double_state"): - updaterFn(source, key, state.double_state) - elif state.HasField("string_state"): - updaterFn(source, key, state.string_state) - elif state.HasField("bool_vector_state"): - updaterFn(source, key, state.bool_vector_state.item) - elif state.HasField("int64_vector_state"): - updaterFn(source, key, state.int64_vector_state.item) - elif state.HasField("double_vector_state"): - updaterFn(source, key, state.double_vector_state.item) - elif state.HasField("string_vector_state"): - updaterFn(source, key, state.string_vector_state.item) - elif state.HasField("variant_vector_state"): - updaterFn(source, key, []) + # Helper function to update the source with the state value + def update_source_with_state(state_field): + if state.HasField(state_field): + updater_fn(source, key, getattr(state, state_field)) + return True + return False + + # Check for basic state types + for state_type in [ + "bool_state", + "int64_state", + "double_state", + "string_state", + "bool_vector_state", + "int64_vector_state", + "double_vector_state", + "string_vector_state", + ]: + if update_source_with_state(state_type): + return + + # Handle variant vector state + if state.HasField("variant_vector_state"): + updater_fn(source, key, []) for item in state.variant_vector_state.item: self._update_cache_from_variant_state( rules, @@ -238,34 +244,28 @@ def _update_cache_from_variant_state( rules_str + "/" + key.split(":", maxsplit=1)[0], version, ) - elif state.HasField("variant_map_state"): + return + + # Handle variant map state + if state.HasField("variant_map_state"): internal_names_as_keys = ( self.get_config(rules, "name_key") == NameKey.INTERNAL ) + + # Determine the appropriate key if ":" in key: type_, iname = key.split(":", maxsplit=1) - for k1, v1 in source.items(): - if (internal_names_as_keys and k1 == key) or ( - (not internal_names_as_keys) - and isinstance(v1, dict) - and v1.get(NameKey.INTERNAL.value) == iname - ): - key = k1 - break - else: # new named object - if internal_names_as_keys: - source[key] = {} - else: - name = state.variant_map_state.item[ - NameKey.DISPLAY.value - ].string_state - key = f"{type_}:{name}" - source[key] = {NameKey.INTERNAL.value: iname} + key = self._determine_key( + source, internal_names_as_keys, key, state, type_, iname + ) else: if key not in source: source[key] = {} + if version and _is_dict_parameter_type(version, rules, rules_str): source[key] = {} + + # Update the source with items from the variant map state if state.variant_map_state.item: source = source[key] for k, v in state.variant_map_state.item.items(): @@ -280,8 +280,40 @@ def _update_cache_from_variant_state( ) else: source[key] = {} + + # Default case when no fields are matched else: - updaterFn(source, key, None) + updater_fn(source, key, None) + + def _determine_key( + self, + source: Dict[str, StateType], + internal_names_as_keys: bool, + key: str, + state: Variant, + type_: str, + iname: str, + ) -> str: + """Determine the appropriate key based on internal naming conventions.""" + for k1, v1 in source.items(): + if (internal_names_as_keys and k1 == key) or ( + (not internal_names_as_keys) + and isinstance(v1, dict) + and v1.get(NameKey.INTERNAL.value) == iname + ): + return k1 # Found a matching key + + # If no match found and internal naming is used + if internal_names_as_keys: + source[key] = {} + return key + + # If no match found and external naming is used + name = state.variant_map_state.item[NameKey.DISPLAY.value].string_state + new_key = f"{type_}:{name}" + source[new_key] = {NameKey.INTERNAL.value: iname} + + return new_key def update_cache( self, rules: str, state: Variant, deleted_paths: List[str], version=None @@ -300,37 +332,16 @@ def update_cache( Fluent version """ cache = self.rules_str_to_cache[rules] + with self._with_lock(rules): internal_names_as_keys = ( self.get_config(rules, "name_key") == NameKey.INTERNAL ) - for deleted_path in deleted_paths: - comps = [x for x in deleted_path.split("/") if x] - sub_cache = cache - for i, comp in enumerate(comps): - if ":" in comp: - _, iname = comp.split(":", maxsplit=1) - key_to_del = None - for k, v in sub_cache.items(): - if (internal_names_as_keys and k == comp) or ( - (not internal_names_as_keys) - and isinstance(v, dict) - and v.get(NameKey.INTERNAL.value) == iname - ): - if i == len(comps) - 1: - key_to_del = k - else: - sub_cache = v - break - else: - break - if key_to_del: - del sub_cache[key_to_del] - else: - if comp in sub_cache: - sub_cache = sub_cache[comp] - else: - break + + # Process deleted paths + self._process_deleted_paths(cache, deleted_paths, internal_names_as_keys) + + # Update cache with new state items for k, v in state.variant_map_state.item.items(): self._update_cache_from_variant_state( rules, @@ -342,6 +353,56 @@ def update_cache( version, ) + def _process_deleted_paths( + self, + cache: Dict[str, Any], + deleted_paths: List[str], + internal_names_as_keys: bool, + ): + """Process and delete paths from the cache based on the deleted paths list.""" + for deleted_path in deleted_paths: + comps = [x for x in deleted_path.split("/") if x] + self._delete_from_cache(cache, comps, internal_names_as_keys) + + def _delete_from_cache( + self, sub_cache: Dict[str, Any], comps: List[str], internal_names_as_keys: bool + ): + """Recursively delete components from the cache.""" + for i, comp in enumerate(comps): + if ":" in comp: + _, iname = comp.split(":", maxsplit=1) + key_to_del = self._find_key_to_delete( + sub_cache, comp, iname, i == len(comps) - 1, internal_names_as_keys + ) + if key_to_del: + del sub_cache[key_to_del] + return # Exit after deletion + else: + if comp in sub_cache: + sub_cache = sub_cache[comp] + else: + break + + def _find_key_to_delete( + self, + sub_cache: Dict[str, Any], + comp: str, + iname: str, + is_last_component: bool, + internal_names_as_keys: bool, + ) -> Optional[str]: + """Find the key to delete from the sub-cache.""" + for k, v in sub_cache.items(): + if (internal_names_as_keys and k == comp) or ( + (not internal_names_as_keys) + and isinstance(v, dict) + and v.get(NameKey.INTERNAL.value) == iname + ): + return ( + k if is_last_component else None + ) # Return key if it's the last component + return None # No key found to delete + @staticmethod def _dm_path_comp(comp): return ":".join(comp) if comp[1] else comp[0] diff --git a/src/ansys/fluent/core/fluent_connection.py b/src/ansys/fluent/core/fluent_connection.py index 3f32b293acd..3759c2ec91c 100644 --- a/src/ansys/fluent/core/fluent_connection.py +++ b/src/ansys/fluent/core/fluent_connection.py @@ -21,6 +21,10 @@ import ansys.fluent.core as pyfluent from ansys.fluent.core.services import service_creator +from ansys.fluent.core.services.app_utilities import ( + AppUtilitiesOld, + AppUtilitiesService, +) from ansys.fluent.core.services.scheme_eval import SchemeEvalService from ansys.fluent.core.utils.execution import timeout_exec, timeout_loop from ansys.fluent.core.utils.file_transfer_service import RemoteFileTransferStrategy @@ -246,15 +250,24 @@ def __init__(self, create_grpc_service, error_state): self.scheme_eval = service_creator("scheme_eval").create( self._scheme_eval_service ) + if ( + pyfluent.FluentVersion(self.scheme_eval.version) + < pyfluent.FluentVersion.v252 + ): + self._app_utilities = AppUtilitiesOld(self.scheme_eval) + else: + self._app_utilities_service = create_grpc_service( + AppUtilitiesService, error_state + ) + self._app_utilities = service_creator("app_utilities").create( + self._app_utilities_service + ) @property def product_build_info(self) -> str: """Get Fluent build information.""" - build_time = self.scheme_eval.scheme_eval("(inquire-build-time)") - build_id = self.scheme_eval.scheme_eval("(inquire-build-id)") - rev = self.scheme_eval.scheme_eval("(inquire-src-vcs-id)") - branch = self.scheme_eval.scheme_eval("(inquire-src-vcs-branch)") - return f"Build Time: {build_time} Build Id: {build_id} Revision: {rev} Branch: {branch}" + build_info = self._app_utilities.get_build_info() + return f'Build Time: {build_info["build_time"]} Build Id: {build_info["build_id"]} Revision: {build_info["vcs_revision"]} Branch: {build_info["vcs_branch"]}' def get_cortex_connection_properties(self): """Get connection properties of Fluent.""" @@ -263,10 +276,12 @@ def get_cortex_connection_properties(self): try: logger.info(self.product_build_info) logger.debug("Obtaining Cortex connection properties...") - fluent_host_pid = self.scheme_eval.scheme_eval("(cx-client-id)") - cortex_host = self.scheme_eval.scheme_eval("(cx-cortex-host)") - cortex_pid = self.scheme_eval.scheme_eval("(cx-cortex-id)") - cortex_pwd = self.scheme_eval.scheme_eval("(cortex-pwd)") + cortex_info = self._app_utilities.get_controller_process_info() + solver_info = self._app_utilities.get_solver_process_info() + fluent_host_pid = solver_info["process_id"] + cortex_host = cortex_info["hostname"] + cortex_pid = cortex_info["process_id"] + cortex_pwd = cortex_info["working_directory"] logger.debug("Cortex connection properties successfully obtained.") except _InactiveRpcError: logger.warning( @@ -282,22 +297,11 @@ def get_cortex_connection_properties(self): def get_mode(self): """Get the mode of a running fluent session.""" - from ansys.fluent.core import FluentMode - - if self.scheme_eval.scheme_eval("(cx-solver-mode?)"): - mode_str = self.scheme_eval.scheme_eval('(getenv "PRJAPP_APP")') - if mode_str == "flaero_server": - return FluentMode.SOLVER_AERO - elif mode_str == "flicing": - return FluentMode.SOLVER_ICING - else: - return FluentMode.SOLVER - else: - return FluentMode.MESHING + return self._app_utilities.get_app_mode() def exit_server(self): """Exits the server.""" - self.scheme_eval.exec(("(exit-server)",)) + self._app_utilities.exit() def _pid_exists(pid): diff --git a/src/ansys/fluent/core/journaling.py b/src/ansys/fluent/core/journaling.py index 6d39bbb7529..c4f86918d2b 100644 --- a/src/ansys/fluent/core/journaling.py +++ b/src/ansys/fluent/core/journaling.py @@ -4,14 +4,14 @@ class Journal: """Control the writing of Fluent Python journals.""" - def __init__(self, scheme_eval): + def __init__(self, app_utilities): """__init__ method of Journal class.""" - self.scheme_eval = scheme_eval + self._app_utilities = app_utilities def start(self, file_name: str): """Start writing a Fluent Python journal at the specified file_name.""" - self.scheme_eval.exec([f'(api-start-python-journal "{file_name}")']) + self._app_utilities.start_python_journal(journal_name=file_name) def stop(self): """Stop writing the Fluent Python journal.""" - self.scheme_eval.exec(["(api-stop-python-journal)"]) + self._app_utilities.stop_python_journal() diff --git a/src/ansys/fluent/core/launcher/fluent_container.py b/src/ansys/fluent/core/launcher/fluent_container.py index f69c4cb05ca..38259d894ba 100644 --- a/src/ansys/fluent/core/launcher/fluent_container.py +++ b/src/ansys/fluent/core/launcher/fluent_container.py @@ -240,14 +240,15 @@ def configure_container_dict( f"Starting Fluent container mounted to {mount_source}, with this path available as {mount_target} for the Fluent session running inside the container." ) - if "ports" not in container_dict: - if not port: - port = get_free_port() - container_dict.update(ports={str(port): port}) # container port : host port - else: + if not port and "ports" in container_dict: # take the specified 'port', OR the first port value from the specified 'ports', for Fluent to use - if not port: - port = next(iter(container_dict["ports"].values())) + port = next(iter(container_dict["ports"].values())) + if not port and pyfluent.LAUNCH_FLUENT_PORT: + port = pyfluent.LAUNCH_FLUENT_PORT + if not port: + port = get_free_port() + + container_dict.update(ports={str(port): port}) # container port : host port if "environment" not in container_dict: if not license_server: @@ -318,6 +319,29 @@ def configure_container_dict( container_dict["environment"] = {} container_dict["environment"]["FLUENT_NO_AUTOMATIC_TRANSCRIPT"] = "1" + if ( + os.getenv("REMOTING_NEW_DM_API") == "1" + or os.getenv("REMOTING_MAPPED_NEW_DM_API") == "1" + ): + if "environment" not in container_dict: + container_dict["environment"] = {} + if os.getenv("REMOTING_NEW_DM_API") == "1": + container_dict["environment"]["REMOTING_NEW_DM_API"] = "1" + if os.getenv("REMOTING_MAPPED_NEW_DM_API") == "1": + container_dict["environment"]["REMOTING_MAPPED_NEW_DM_API"] = "1" + + if pyfluent.LAUNCH_FLUENT_IP or os.getenv("REMOTING_SERVER_ADDRESS"): + if "environment" not in container_dict: + container_dict["environment"] = {} + container_dict["environment"]["REMOTING_SERVER_ADDRESS"] = ( + pyfluent.LAUNCH_FLUENT_IP or os.getenv("REMOTING_SERVER_ADDRESS") + ) + + if pyfluent.LAUNCH_FLUENT_SKIP_PASSWORD_CHECK: + if "environment" not in container_dict: + container_dict["environment"] = {} + container_dict["environment"]["FLUENT_LAUNCHED_FROM_PYFLUENT"] = "1" + fluent_commands = ["-gu", f"-sifile={container_server_info_file}"] + args container_dict_default = {} diff --git a/src/ansys/fluent/core/launcher/launcher.py b/src/ansys/fluent/core/launcher/launcher.py index 7e1be2ab755..2b5f830635b 100644 --- a/src/ansys/fluent/core/launcher/launcher.py +++ b/src/ansys/fluent/core/launcher/launcher.py @@ -168,8 +168,9 @@ def launch_fluent( See also :mod:`~ansys.fluent.core.launcher.fluent_container`. dry_run : bool, optional Defaults to False. If True, will not launch Fluent, and will instead print configuration information - that would be used as if Fluent was being launched. If dry running a container start, - ``launch_fluent()`` will return the configured ``container_dict``. + that would be used as if Fluent was being launched. If dry running a standalone start + ``launch_fluent()`` will return a tuple containing Fluent launch string and the server info file name. + If dry running a container start, ``launch_fluent()`` will return the configured ``container_dict``. cleanup_on_exit : bool, optional Whether to shut down the connected Fluent session when PyFluent is exited, or the ``exit()`` method is called on the session instance, diff --git a/src/ansys/fluent/core/launcher/launcher_utils.py b/src/ansys/fluent/core/launcher/launcher_utils.py index 16451d7283e..8164041d990 100644 --- a/src/ansys/fluent/core/launcher/launcher_utils.py +++ b/src/ansys/fluent/core/launcher/launcher_utils.py @@ -28,6 +28,10 @@ def _get_subprocess_kwargs_for_fluent(env: Dict[str, Any], argvals) -> Dict[str, kwargs: Dict[str, Any] = {} if is_slurm: kwargs.update(stdout=subprocess.PIPE) + else: + kwargs.update( + stdout=pyfluent.LAUNCH_FLUENT_STDOUT, stderr=pyfluent.LAUNCH_FLUENT_STDERR + ) if is_windows(): kwargs.update(shell=True, creationflags=subprocess.CREATE_NEW_PROCESS_GROUP) else: @@ -39,6 +43,15 @@ def _get_subprocess_kwargs_for_fluent(env: Dict[str, Any], argvals) -> Dict[str, del fluent_env["PARA_NPROCS"] del fluent_env["PARA_MESH_NPROCS"] + if pyfluent.LAUNCH_FLUENT_IP: + fluent_env["REMOTING_SERVER_ADDRESS"] = pyfluent.LAUNCH_FLUENT_IP + + if pyfluent.LAUNCH_FLUENT_PORT: + fluent_env["REMOTING_PORTS"] = f"{pyfluent.LAUNCH_FLUENT_PORT}/portspan=2" + + if pyfluent.LAUNCH_FLUENT_SKIP_PASSWORD_CHECK: + fluent_env["FLUENT_LAUNCHED_FROM_PYFLUENT"] = "1" + if not is_slurm: if pyfluent.INFER_REMOTING_IP and "REMOTING_SERVER_ADDRESS" not in fluent_env: remoting_ip = find_remoting_ip() diff --git a/src/ansys/fluent/core/launcher/process_launch_string.py b/src/ansys/fluent/core/launcher/process_launch_string.py index 91dcc1744aa..2b1ed4cfad8 100644 --- a/src/ansys/fluent/core/launcher/process_launch_string.py +++ b/src/ansys/fluent/core/launcher/process_launch_string.py @@ -137,7 +137,14 @@ def get_exe_path(fluent_root: Path) -> Path: return fluent_root / "bin" / "fluent" # Look for Fluent exe path in the following order: - # 1. product_version parameter passed with launch_fluent + # 1. Custom Path provided by the user in launch_fluent + fluent_path = launch_argvals.get("fluent_path") + if fluent_path: + # Return the fluent_path string verbatim. The path may not even exist + # in the current machine if user wants to launch fluent externally (dry_run use case). + return fluent_path + + # 2. product_version parameter passed with launch_fluent product_version = launch_argvals.get("product_version") if product_version: return get_exe_path(get_fluent_root(FluentVersion(product_version))) @@ -147,10 +154,5 @@ def get_exe_path(fluent_root: Path) -> Path: if fluent_root: return get_exe_path(Path(fluent_root)) - # 2. Custom Path provided by the user in launch_fluent - fluent_path = launch_argvals.get("fluent_path") - if fluent_path: - return Path(fluent_path) - # 3. the latest ANSYS version from AWP_ROOT environment variables return get_exe_path(get_fluent_root(FluentVersion.get_latest_installed())) diff --git a/src/ansys/fluent/core/launcher/pyfluent_enums.py b/src/ansys/fluent/core/launcher/pyfluent_enums.py index cfff8b63c2b..278b055061e 100644 --- a/src/ansys/fluent/core/launcher/pyfluent_enums.py +++ b/src/ansys/fluent/core/launcher/pyfluent_enums.py @@ -276,9 +276,7 @@ def _get_running_session_mode( return session_mode.get_fluent_value() -def _get_standalone_launch_fluent_version( - product_version: FluentVersion | str | float | int | None, -) -> FluentVersion | None: +def _get_standalone_launch_fluent_version(argvals) -> FluentVersion | None: """Determine the Fluent version during the execution of the ``launch_fluent()`` method in standalone mode. @@ -295,9 +293,14 @@ def _get_standalone_launch_fluent_version( # Look for Fluent version in the following order: # 1. product_version parameter passed with launch_fluent + product_version = argvals.get("product_version") if product_version: return FluentVersion(product_version) + # If fluent_path is provided, we cannot determine the Fluent version, so returning None. + if argvals.get("fluent_path"): + return None + # (DEV) if "PYFLUENT_FLUENT_ROOT" environment variable is defined, we cannot # determine the Fluent version, so returning None. if os.getenv("PYFLUENT_FLUENT_ROOT"): diff --git a/src/ansys/fluent/core/launcher/server_info.py b/src/ansys/fluent/core/launcher/server_info.py index 662d76e8d8c..44c3f5c3081 100644 --- a/src/ansys/fluent/core/launcher/server_info.py +++ b/src/ansys/fluent/core/launcher/server_info.py @@ -10,7 +10,27 @@ from ansys.fluent.core.session import _parse_server_info_file -def _get_server_info_file_name(use_tmpdir=True): +def _get_server_info_file_names(use_tmpdir=True) -> tuple[str, str]: + """Returns a tuple containing server and client-side file names with the server connection information. + When server and client are in a different machine, the environment variable SERVER_INFO_DIR + can be set to a shared directory between the two machines and the server-info file will be + created in that directory. The value of the environment variable SERVER_INFO_DIR can be + different for the server and client machines. The relative path of the server-side server-info + file is passed to Fluent launcher and PyFluent connects to the server using the absolute path + of the client-side server-info file. A typical use case of the environment variable + SERVER_INFO_DIR is as follows: + - Server machine environment variable: SERVER_INFO_DIR=/mnt/shared + - Client machine environment variable: SERVER_INFO_DIR=\\\\server\\shared + - Server-side server-info file: /mnt/shared/serverinfo-xyz.txt + - Client-side server-info file: \\\\server\\shared\\serverinfo-xyz.txt + - Fluent launcher command: fluent ... -sifile=serverinfo-xyz.txt ... + - From PyFluent: connect_to_fluent(server_info_file_name="\\\\server\\shared\\serverinfo-xyz.txt") + + When the environment variable SERVER_INFO_DIR is not set, the server-side and client-side + file paths for the server-info file are identical. The server-info file is created in the + temporary directory if ``use_tmpdir`` is True, otherwise it is created in the current working + directory. + """ server_info_dir = os.getenv("SERVER_INFO_DIR") dir_ = ( Path(server_info_dir) @@ -19,7 +39,10 @@ def _get_server_info_file_name(use_tmpdir=True): ) fd, file_name = tempfile.mkstemp(suffix=".txt", prefix="serverinfo-", dir=str(dir_)) os.close(fd) - return file_name + if server_info_dir: + return Path(file_name).name, file_name + else: + return file_name, file_name def _get_server_info( diff --git a/src/ansys/fluent/core/launcher/slurm_launcher.py b/src/ansys/fluent/core/launcher/slurm_launcher.py index 9323fc51040..eb0397ba0f4 100644 --- a/src/ansys/fluent/core/launcher/slurm_launcher.py +++ b/src/ansys/fluent/core/launcher/slurm_launcher.py @@ -64,7 +64,7 @@ UIMode, _get_argvals_and_session, ) -from ansys.fluent.core.launcher.server_info import _get_server_info_file_name +from ansys.fluent.core.launcher.server_info import _get_server_info_file_names from ansys.fluent.core.session_meshing import Meshing from ansys.fluent.core.session_pure_meshing import PureMeshing from ansys.fluent.core.session_solver import Solver @@ -416,11 +416,14 @@ def __init__( ) def _prepare(self): - self._server_info_file_name = _get_server_info_file_name(use_tmpdir=False) + server_info_file_name_for_server, server_info_file_name_for_client = ( + _get_server_info_file_names(use_tmpdir=False) + ) + self._server_info_file_name = server_info_file_name_for_client self._argvals.update(self._argvals["scheduler_options"]) launch_cmd = _generate_launch_string( self._argvals, - self._server_info_file_name, + server_info_file_name_for_server, ) self._sifile_last_mtime = Path(self._server_info_file_name).stat().st_mtime diff --git a/src/ansys/fluent/core/launcher/standalone_launcher.py b/src/ansys/fluent/core/launcher/standalone_launcher.py index 1e807bab7dc..edf5bb062e6 100644 --- a/src/ansys/fluent/core/launcher/standalone_launcher.py +++ b/src/ansys/fluent/core/launcher/standalone_launcher.py @@ -43,7 +43,7 @@ ) from ansys.fluent.core.launcher.server_info import ( _get_server_info, - _get_server_info_file_name, + _get_server_info_file_names, ) import ansys.fluent.core.launcher.watchdog as watchdog from ansys.fluent.core.utils.fluent_version import FluentVersion @@ -128,8 +128,9 @@ def __init__( exited, or the ``exit()`` method is called on the session instance, or if the session instance becomes unreferenced. The default is ``True``. dry_run : bool, optional - Defaults to False. If True, will not launch Fluent, and will instead print configuration information - that would be used as if Fluent was being launched. + Defaults to False. If True, will not launch Fluent, and will print configuration information + that would be used as if Fluent was being launched. If True, the ``call()`` method will return + a tuple containing Fluent launch string and the server info file name. start_transcript : bool, optional Whether to start streaming the Fluent transcript in the client. The default is ``True``. You can stop and start the streaming of the @@ -177,6 +178,8 @@ def __init__( The allocated machines and core counts are queried from the scheduler environment and passed to Fluent. """ + import ansys.fluent.core as pyfluent + self.argvals, self.new_session = _get_argvals_and_session(locals().copy()) self.file_transfer_service = file_transfer_service if os.getenv("PYFLUENT_SHOW_SERVER_GUI") == "1": @@ -186,9 +189,7 @@ def __init__( self.argvals["start_timeout"] = 60 if self.argvals["lightweight_mode"] is None: self.argvals["lightweight_mode"] = False - fluent_version = _get_standalone_launch_fluent_version( - self.argvals["product_version"] - ) + fluent_version = _get_standalone_launch_fluent_version(self.argvals) if fluent_version: _raise_non_gui_exception_in_windows(self.argvals["ui_mode"], fluent_version) @@ -198,10 +199,13 @@ def __init__( if os.getenv("PYFLUENT_FLUENT_DEBUG") == "1": self.argvals["fluent_debug"] = True - self._server_info_file_name = _get_server_info_file_name() + server_info_file_name_for_server, server_info_file_name_for_client = ( + _get_server_info_file_names() + ) + self._server_info_file_name = server_info_file_name_for_client self._launch_string = _generate_launch_string( self.argvals, - self._server_info_file_name, + server_info_file_name_for_server, ) self._sifile_last_mtime = Path(self._server_info_file_name).stat().st_mtime @@ -215,8 +219,11 @@ def __init__( ) if is_windows(): - # Using 'start.exe' is better; otherwise Fluent is more susceptible to bad termination attempts. - self._launch_cmd = 'start "" ' + self._launch_string + if pyfluent.LAUNCH_FLUENT_STDOUT or pyfluent.LAUNCH_FLUENT_STDERR: + self._launch_cmd = self._launch_string + else: + # Using 'start.exe' is better; otherwise Fluent is more susceptible to bad termination attempts. + self._launch_cmd = 'start "" ' + self._launch_string else: if self.argvals["ui_mode"] not in [UIMode.GUI, UIMode.HIDDEN_GUI]: # Using nohup to hide Fluent output from the current terminal @@ -227,11 +234,11 @@ def __init__( def __call__(self): if self.argvals["dry_run"]: print(f"Fluent launch string: {self._launch_string}") - return + return self._launch_string, self._server_info_file_name try: logger.debug(f"Launching Fluent with command: {self._launch_cmd}") - subprocess.Popen(self._launch_cmd, **self._kwargs) + process = subprocess.Popen(self._launch_cmd, **self._kwargs) try: _await_fluent_launch( @@ -247,7 +254,7 @@ def __call__(self): logger.warning( f"Retrying Fluent launch with less robust command: {launch_cmd}" ) - subprocess.Popen(launch_cmd, **self._kwargs) + process = subprocess.Popen(launch_cmd, **self._kwargs) _await_fluent_launch( self._server_info_file_name, self.argvals["start_timeout"], @@ -264,6 +271,7 @@ def __call__(self): launcher_args=self.argvals, inside_container=False, ) + session._process = process start_watchdog = _confirm_watchdog_start( self.argvals["start_watchdog"], self.argvals["cleanup_on_exit"], diff --git a/src/ansys/fluent/core/post_objects/post_helper.py b/src/ansys/fluent/core/post_objects/post_helper.py index ef3c6e2609d..776e50ec1b4 100644 --- a/src/ansys/fluent/core/post_objects/post_helper.py +++ b/src/ansys/fluent/core/post_objects/post_helper.py @@ -2,6 +2,9 @@ import re +from ansys.fluent.core.solver.flunits import get_si_unit_for_fluent_quantity +from ansys.fluent.core.utils.fluent_version import FluentVersion + class IncompleteISOSurfaceDefinition(RuntimeError): """Raised when iso-surface definition is incomplete.""" @@ -94,11 +97,9 @@ def create_surface_on_server(self): def delete_surface_on_server(self): """Deletes the surface on server.""" if self.obj.definition.type() == "iso-surface": - self._get_api_handle().iso_surface.delete(self._surface_name_on_server) + del self._get_api_handle().iso_surface[self._surface_name_on_server] elif self.obj.definition.type() == "plane-surface": - self._get_api_handle().plane_surface.delete( - self._surface_name_on_server - ) + del self._get_api_handle().plane_surface[self._surface_name_on_server] def __init__(self, obj): """__init__ method of PostAPIHelper class.""" @@ -127,16 +128,22 @@ def remote_surface_name(self, local_surface_name): # Following functions will be deprecated in future. def get_vector_fields(self): """Returns vector field.""" - scheme_eval_str = "(map car (apply append (map client-inquire-cell-vector-functions (inquire-domain-for-cell-functions))))" - return self._scheme_str_to_py_list(scheme_eval_str) + return self.field_info.get_vector_fields_info() def get_field_unit(self, field): """Returns the unit of the field.""" - quantity = self._field_unit_quantity(field) - if quantity == "*null*": - return "" - scheme_eval_str = f"(units/get-pretty-wb-units-from-dimension (units/inquire-dimension '{quantity}))" - return " ".join(self._scheme_str_to_py_list(scheme_eval_str)) + session = self.obj.get_root().session + if FluentVersion(session.scheme_eval.version) < FluentVersion.v252: + quantity = self._field_unit_quantity(field) + if quantity == "*null*": + return "" + scheme_eval_str = f"(units/get-pretty-wb-units-from-dimension (units/inquire-dimension '{quantity}))" + return " ".join(self._scheme_str_to_py_list(scheme_eval_str)) + else: + fields_info = self.field_info.get_fields_info() + for field_info in fields_info: + if field_info["solverName"] == field: + return get_si_unit_for_fluent_quantity(field_info["quantity_name"]) def _field_unit_quantity(self, field): scheme_eval_str = f"(cdr (assq 'units (%fill-render-info '{field})))" diff --git a/src/ansys/fluent/core/post_objects/post_object_definitions.py b/src/ansys/fluent/core/post_objects/post_object_definitions.py index 0698fb001aa..3468c7eaaba 100644 --- a/src/ansys/fluent/core/post_objects/post_object_definitions.py +++ b/src/ansys/fluent/core/post_objects/post_object_definitions.py @@ -19,7 +19,7 @@ class BasePostObjectDefn: def _pre_display(self): local_surfaces_provider = self.get_root()._local_surfaces_provider() - for surf_name in self.surfaces_list(): + for surf_name in self.surfaces(): if surf_name in list(local_surfaces_provider): surf_obj = local_surfaces_provider[surf_name] surf_api = surf_obj._api_helper.surface_api @@ -27,7 +27,7 @@ def _pre_display(self): def _post_display(self): local_surfaces_provider = self.get_root()._local_surfaces_provider() - for surf_name in self.surfaces_list(): + for surf_name in self.surfaces(): if surf_name in list(local_surfaces_provider): surf_obj = local_surfaces_provider[surf_name] surf_api = surf_obj._api_helper.surface_api @@ -128,7 +128,7 @@ def allowed_values(self): """X axis function allowed values.""" return ["direction-vector"] - class surfaces_list(metaclass=PyLocalPropertyMeta): + class surfaces(metaclass=PyLocalPropertyMeta): """List of surfaces for plotting.""" value: List[str] = [] @@ -146,7 +146,7 @@ class MeshDefn(GraphicsDefn): PLURAL = "Meshes" - class surfaces_list(metaclass=PyLocalPropertyMeta): + class surfaces(metaclass=PyLocalPropertyMeta): """List of surfaces for mesh graphics.""" value: List[str] = [] @@ -189,7 +189,7 @@ def allowed_values(self): """Field allowed values.""" return list(self._api_helper.field_info().get_scalar_fields_info()) - class surfaces_list(metaclass=PyLocalPropertyMeta): + class surfaces(metaclass=PyLocalPropertyMeta): """List of surfaces for pathlines.""" value: List[str] = [] @@ -207,6 +207,11 @@ class SurfaceDefn(GraphicsDefn): PLURAL = "Surfaces" + @property + def name(self) -> str: + """Return name of the surface.""" + return self._name + class show_edges(metaclass=PyLocalPropertyMeta): """Show edges for surface.""" @@ -376,7 +381,7 @@ def allowed_values(self): """Field allowed values.""" return list(self._api_helper.field_info().get_scalar_fields_info()) - class surfaces_list(metaclass=PyLocalPropertyMeta): + class surfaces(metaclass=PyLocalPropertyMeta): """Contour surfaces.""" value: List[str] = [] @@ -557,7 +562,7 @@ def allowed_values(self): """Field allowed values.""" return list(self._api_helper.field_info().get_scalar_fields_info()) - class surfaces_list(metaclass=PyLocalPropertyMeta): + class surfaces(metaclass=PyLocalPropertyMeta): """List of surfaces for vector graphics.""" value: List[str] = [] diff --git a/src/ansys/fluent/core/post_objects/post_objects_container.py b/src/ansys/fluent/core/post_objects/post_objects_container.py index d2bd171102b..a7bb53b5cb8 100644 --- a/src/ansys/fluent/core/post_objects/post_objects_container.py +++ b/src/ansys/fluent/core/post_objects/post_objects_container.py @@ -80,14 +80,51 @@ def __call__(self, show_attributes=False): return state def _init_module(self, obj, mod, post_api_helper): + """ + Dynamically initializes and attaches containers for classes in a module. + + Args: + obj: The parent object to which containers are attached. + mod: The module containing class definitions to process. + post_api_helper: Helper object for post-processing API interactions. + + This method identifies classes in the module that match certain criteria, + creates a container for managing instances of these classes, and attaches + the container to the parent object (`obj`). A `create()` method is also + dynamically added to each container for creating and initializing new objects. + """ + # Iterate through all attributes in the module's dictionary for name, cls in mod.__dict__.items(): if cls.__class__.__name__ in ( "PyLocalNamedObjectMetaAbstract", ) and not inspect.isabstract(cls): + cont = PyLocalContainer(self, cls, post_api_helper, cls.PLURAL) + + # Define a method to add a "create" function to the container + def _add_create(py_cont): + def _create(**kwargs): + new_object = py_cont.__getitem__( + py_cont._get_unique_chid_name() + ) + # Validate that all kwargs are valid attributes for the object + unexpected_args = set(kwargs) - set(new_object()) + if unexpected_args: + raise TypeError( + f"create() got an unexpected keyword argument '{next(iter(unexpected_args))}'." + ) + for key, value in kwargs.items(): + setattr(new_object, key, value) + return new_object + + return _create + + # Attach the create method to the container + setattr(cont, "create", _add_create(cont)) + # Attach the container to the parent object setattr( obj, cls.PLURAL, - PyLocalContainer(self, cls, post_api_helper, cls.PLURAL), + cont, ) @@ -179,7 +216,7 @@ def add_outline_mesh(self): if meshes is not None: outline_mesh_id = "mesh-outline" outline_mesh = meshes[outline_mesh_id] - outline_mesh.surfaces_list = [ + outline_mesh.surfaces = [ k for k, v in outline_mesh._api_helper.field_info() .get_surfaces_info() diff --git a/src/ansys/fluent/core/search.py b/src/ansys/fluent/core/search.py index 32411f4b358..e5527fd9e85 100644 --- a/src/ansys/fluent/core/search.py +++ b/src/ansys/fluent/core/search.py @@ -8,25 +8,17 @@ from pathlib import Path import pickle import re -import sys -from typing import Any import warnings -from ansys.fluent.core.solver import flobject +import ansys.fluent.core as pyfluent from ansys.fluent.core.solver.error_message import closest_allowed_names from ansys.fluent.core.utils.fluent_version import ( FluentVersion, get_version_for_file_name, ) -from ansys.fluent.core.workflow import ( - BaseTask, - ClassicWorkflow, - TaskContainer, - Workflow, -) -def _get_api_tree_data_file(): +def _get_api_tree_data_file_path(): """Get API tree data file.""" from ansys.fluent.core import CODEGEN_OUTDIR @@ -62,142 +54,24 @@ def _remove_suffix(input: str, suffix): _meshing_rules = ["workflow", "meshing", "PartManagement", "PMFileManagement"] -def _get_version_path_prefix_from_obj(obj: Any): - from ansys.fluent.core.services.datamodel_se import PyMenu, PyNamedObjectContainer - from ansys.fluent.core.services.datamodel_tui import TUIMenu - from ansys.fluent.core.session_pure_meshing import PureMeshing - from ansys.fluent.core.session_solver import Solver - - path = None - version = None - prefix = None - if isinstance(obj, PureMeshing): - path = [""] - version = get_version_for_file_name(obj.get_fluent_version().value) - prefix = "" - elif isinstance(obj, Solver): - path = [""] - version = get_version_for_file_name(obj.get_fluent_version().value) - prefix = "" - elif isinstance(obj, TUIMenu): - module = obj.__class__.__module__ - path = [ - ( - "" - if module.startswith("meshing") - else "" - ), - "tui", - ] - path.extend(obj._path) - version = module.rsplit("_", 1)[-1] - prefix = "" - elif isinstance(obj, (ClassicWorkflow, Workflow)): - path = ["", obj.rules] - module = obj._workflow.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = "" - elif isinstance(obj, BaseTask): - path = ["", obj.rules] - path.extend([f"{k[0]}:" if k[1] else k[0] for k in obj.path]) - module = obj._workflow.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = "" - elif isinstance(obj, TaskContainer): - path = ["", obj.rules] - path.extend([f"{k[0]}:" if k[1] else k[0] for k in obj.path]) - path[-1] = f"{path[-1]}:" - module = obj._container._workflow.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = '[""]' - elif isinstance(obj, PyMenu): - rules = obj.rules - path = ["" if rules in _meshing_rules else ""] - path.append(rules) - path.extend([f"{k[0]}:" if k[1] else k[0] for k in obj.path]) - module = obj.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = "" - elif isinstance(obj, PyNamedObjectContainer): - rules = obj.rules - path = ["" if rules in _meshing_rules else ""] - path.append(rules) - path.extend([f"{k[0]}:" if k[1] else k[0] for k in obj.path]) - path[-1] = f"{path[-1]}:" - module = obj.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = '[""]' - elif isinstance(obj, flobject.Group): - module = obj.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = "" - path = [""] - # Cannot deduce the whole path without api_tree - elif isinstance(obj, flobject.NamedObject): - module = obj.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = '[""]' - path = [""] - # Cannot deduce the whole path without api_tree - return version, path, prefix - - -def _search( - word: str, - match_whole_word: bool = False, - match_case: bool = False, +def _generate_api_data( version: str | None = None, - search_root: Any | None = None, - write_api_tree_data: bool | None = False, ): - """Search for a word through the Fluent's object hierarchy. + """Generate API tree data. Parameters ---------- - word : str - Word to search for. - match_whole_word : bool, optional - Whether to match whole word, by default False - match_case : bool, optional - Whether to match case, by default False version : str, optional Fluent version to search in. The default is ``None``. If ``None``, it searches in the latest version for which codegen was run. - search_root : Any, optional - The root object within which the search is performed. - It can be a session object or any API object within a session. - The default is ``None``. If ``None``, it searches everything. write_api_tree_data: bool, optional Whether to write the API tree data. - - Examples - -------- - >>> import ansys.fluent.core as pyfluent - >>> pyfluent.search("geometry") - .tui.file.import_.cad_geometry (Command) - .tui.display.update_scene.select_geometry (Command) - .meshing.ImportGeometry (Command) - .meshing.LoadCADGeometry (Command) - .tui.solve.initialize.compute_defaults.geometry (Command) - .tui.report.reference_values.compute.geometry (Command) - .tui.define.geometry (Command) - .tui.mesh.geometry (Object) - .setup.boundary_conditions.geometry[""] (Object) - .setup.geometry (Object) - .solution.report_definitions.surface[""].geometry (Parameter) - .solution.report_definitions.volume[""].geometry (Parameter) - .results.graphics.mesh[""].geometry (Parameter) - .results.graphics.contour[""].geometry (Parameter) """ - api_objects = [] - api_tui_objects = [] - api_object_names = [] - results = [] + api_objects = set() + api_tui_objects = set() + api_object_names = set() if version: version = get_version_for_file_name(version) - root_version, root_path, prefix = _get_version_path_prefix_from_obj(search_root) - if search_root and not prefix: - return if not version: for fluent_version in FluentVersion: version = get_version_for_file_name(fluent_version.value) @@ -207,35 +81,7 @@ def _search( with open(api_tree_file, "rb") as f: api_tree = pickle.load(f) - if isinstance(search_root, (flobject.Group, flobject.NamedObject)): - path = root_path + [ - flobject.to_python_name(x) for x in search_root.path.split("/") - ] - root_path = [] - tree = api_tree - while path: - p = path.pop(0) - if p in tree: - tree = tree[p] - root_path.append(p) - elif f"{p}:" in tree: - tree = tree[f"{p}:"] - root_path.append(f"{p}:") - if path: - path.pop(0) - else: - return - - def inner(tree, path, root_path): - if root_path: - path = prefix - while root_path: - p = root_path.pop(0) - if p in tree: - tree = tree[p] - else: - return - + def inner(tree, path): for k, v in tree.items(): if k in ("", ""): next_path = k @@ -249,21 +95,20 @@ def inner(tree, path, root_path): else: next_path = f"{path}.{k}" type_ = "Object" if isinstance(v, Mapping) else v - api_object_names.append(k) + api_object_names.add(k) if "tui" in next_path: - api_tui_objects.append(f"{next_path} ({type_})") + api_tui_objects.add(f"{next_path} ({type_})") else: - api_objects.append(f"{next_path} ({type_})") - if _match(k, word, match_whole_word, match_case): - results.append(f"{next_path} ({type_})") + api_objects.add(f"{next_path} ({type_})") if isinstance(v, Mapping): - inner(v, next_path, root_path) + inner(v, next_path) - inner(api_tree, "", root_path) + inner(api_tree, "") api_tree_data = dict() - api_tree_data["api_objects"] = sorted(api_objects) - api_tree_data["api_tui_objects"] = sorted(api_tui_objects) + api_tree_data["api_objects"] = sorted(list(api_objects)) + api_tree_data["api_tui_objects"] = sorted(list(api_tui_objects)) + api_tree_data["all_api_object_names"] = sorted(list(api_object_names)) def _write_api_tree_file(api_tree_data: dict, api_object_names: list): from nltk.corpus import wordnet as wn @@ -276,35 +121,31 @@ def _write_api_tree_file(api_tree_data: dict, api_object_names: list): all_api_object_name_synsets = dict() for name in api_object_names: - api_object_name_synsets = ( - wn.synsets(name.decode("utf-8"), lang="eng") - if sys.version_info[0] < 3 - else wn.synsets(name, lang="eng") - ) - synset_names = [] + api_object_name_synsets = wn.synsets(name, lang="eng") + synset_names = set() for api_object_name_synset in api_object_name_synsets: - synset_names.append(api_object_name_synset.name().split(".")[0]) - all_api_object_name_synsets[name] = synset_names + synset_names.add(api_object_name_synset.name()) + if synset_names: + all_api_object_name_synsets[name] = sorted(list(synset_names)) api_tree_data["all_api_object_name_synsets"] = all_api_object_name_synsets - api_tree_file = _get_api_tree_data_file() - api_tree_file.touch() - with open(api_tree_file, "w") as json_file: + api_tree_file_path = _get_api_tree_data_file_path() + api_tree_file_path.touch() + with open(api_tree_file_path, "w") as json_file: json.dump(api_tree_data, json_file) - if write_api_tree_data: - _write_api_tree_file( - api_tree_data=api_tree_data, api_object_names=list(api_object_names) - ) - return results + _write_api_tree_file( + api_tree_data=api_tree_data, api_object_names=list(api_object_names) + ) + api_tree_file.unlink() @functools.cache def _get_api_tree_data(): """Get API tree data.""" - api_tree_data_file = _get_api_tree_data_file() - if api_tree_data_file.exists(): - json_file = open(api_tree_data_file, "r") + api_tree_data_file_path = _get_api_tree_data_file_path() + if api_tree_data_file_path.exists(): + json_file = open(api_tree_data_file_path, "r") api_tree_data = json.load(json_file) return api_tree_data @@ -319,13 +160,32 @@ def _print_search_results(queries: list, api_tree_data: dict): api_tree_data: dict All API object data. """ + results = [] api_tree_data = api_tree_data if api_tree_data else _get_api_tree_data() api_tree_datas = [api_tree_data["api_objects"], api_tree_data["api_tui_objects"]] - for api_tree_data in api_tree_datas: + + def _get_results(api_tree_data): + results = [] for query in queries: for api_object in api_tree_data: - if query in api_object: - print(api_object) + if api_object.split()[0].endswith(query): + results.append(api_object) + return results + + settings_results = _get_results(api_tree_datas[0]) + tui_results = _get_results(api_tree_datas[1]) + + settings_results.sort() + tui_results.sort() + + results.extend(settings_results) + results.extend(tui_results) + + if pyfluent.PRINT_SEARCH_RESULTS: + for result in results: + print(result) + elif results: + return results def _get_wildcard_matches_for_word_from_names(word: str, names: list): @@ -364,10 +224,10 @@ def _search_wildcard(search_string: str, api_tree_data: dict): """ api_tree_data = api_tree_data if api_tree_data else _get_api_tree_data() queries = _get_wildcard_matches_for_word_from_names( - search_string, names=list(api_tree_data["all_api_object_name_synsets"].keys()) + search_string, names=api_tree_data["all_api_object_names"] ) if queries: - _print_search_results(queries, api_tree_data=api_tree_data) + return _print_search_results(queries, api_tree_data=api_tree_data) def _get_exact_match_for_word_from_names( @@ -387,7 +247,7 @@ def _get_exact_match_for_word_from_names( ------- List of exact match. """ - return [name for name in names if word == name] + return list({name for name in names if word == name or word in name}) def _get_capitalize_match_for_word_from_names( @@ -459,7 +319,7 @@ def _get_close_matches_for_word_from_names( def _search_whole_word( search_string: str, match_case: bool = False, - match_whole_word: bool = False, + match_whole_word: bool = True, api_tree_data: dict = None, ): """Perform exact search for a word through the Fluent's object hierarchy. @@ -483,43 +343,43 @@ def _search_whole_word( """ api_tree_data = api_tree_data if api_tree_data else _get_api_tree_data() queries = [] - if match_case and match_whole_word: + if not match_case and not match_whole_word: queries.extend( - _get_exact_match_for_word_from_names( + _get_capitalize_match_for_word_from_names( search_string, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), + names=api_tree_data["all_api_object_names"], ) ) - elif match_case: queries.extend( _get_match_case_for_word_from_names( search_string, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), + names=api_tree_data["all_api_object_names"], ) ) - elif match_whole_word: - for word in [search_string, search_string.capitalize()]: - queries.extend( - _get_exact_match_for_word_from_names( - word, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), - ) - ) - elif not match_case and not match_whole_word: + elif match_case and match_whole_word: queries.extend( - _get_capitalize_match_for_word_from_names( + _get_exact_match_for_word_from_names( search_string, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), + names=api_tree_data["all_api_object_names"], ) ) + elif match_case: queries.extend( _get_match_case_for_word_from_names( search_string, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), + names=api_tree_data["all_api_object_names"], ) ) + elif match_whole_word: + for word in [search_string, search_string.capitalize()]: + queries.extend( + _get_exact_match_for_word_from_names( + word, + names=api_tree_data["all_api_object_names"], + ) + ) if queries: - _print_search_results(queries, api_tree_data=api_tree_data) + return _print_search_results(queries, api_tree_data=api_tree_data) def _download_nltk_data(): @@ -567,32 +427,31 @@ def _search_semantic(search_string: str, language: str, api_tree_data: dict): api_tree_data = api_tree_data if api_tree_data else _get_api_tree_data() similar_keys = set() - search_string_synsets = ( - wn.synsets(search_string.decode("utf-8"), lang=language) - if sys.version_info[0] < 3 - else wn.synsets(search_string, lang=language) - ) + search_string_synsets = set(wn.synsets(search_string, lang=language)) for api_object_name, api_object_synset_names in list( api_tree_data["all_api_object_name_synsets"].items() ): - for search_string_synset in search_string_synsets: - for api_object_synset_name in api_object_synset_names: - search_string_synset_name = search_string_synset.name().split(".")[0] - if ( - search_string in api_object_synset_name - or search_string_synset_name in api_object_synset_name - ): - similar_keys.add(api_object_synset_name + "*") + api_object_synsets = { + wn.synset(api_object_synset_name) + for api_object_synset_name in api_object_synset_names + } + if search_string_synsets & api_object_synsets: + similar_keys.add(api_object_name + "*") if similar_keys: + results = [] for key in similar_keys: - _search_wildcard(key, api_tree_data) + result = _search_wildcard(key, api_tree_data) + if result: + results.extend(result) + if results: + return results else: queries = _get_close_matches_for_word_from_names( search_string, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), + names=api_tree_data["all_api_object_names"], ) if queries: - _print_search_results(queries, api_tree_data=api_tree_data) + return _print_search_results(queries, api_tree_data=api_tree_data) def search( @@ -646,47 +505,35 @@ def search( "``wildcard=True`` matches wildcard pattern.", UserWarning, ) - elif language and match_whole_word: - warnings.warn( - "``match_whole_word=True`` matches the whole word (case insensitive).", - UserWarning, - ) - elif match_whole_word: - warnings.warn( - "``match_whole_word=True`` matches the whole word (case insensitive).", - UserWarning, - ) - elif match_case: - warnings.warn( - "``match_case=True`` matches the whole word (case sensitive).", - UserWarning, - ) api_tree_data = _get_api_tree_data() - try: - _search_semantic(search_string, language, api_tree_data=api_tree_data) - except ModuleNotFoundError: - pass - except LookupError: - _download_nltk_data() - _search_semantic(search_string, language, api_tree_data=api_tree_data) - if wildcard: - _search_wildcard( + return _search_wildcard( search_string, api_tree_data=api_tree_data, ) elif match_whole_word: if not match_case: - _search_whole_word( + return _search_whole_word( search_string, match_whole_word=True, api_tree_data=api_tree_data ) else: - _search_whole_word( - search_string, match_case=True, api_tree_data=api_tree_data + return _search_whole_word( + search_string, + match_case=True, + match_whole_word=True, + api_tree_data=api_tree_data, ) else: - _search_whole_word( - search_string, match_whole_word=True, api_tree_data=api_tree_data - ) + try: + return _search_semantic( + search_string, language, api_tree_data=api_tree_data + ) + except ModuleNotFoundError: + pass + except LookupError: + _download_nltk_data() + return _search_semantic( + search_string, language, api_tree_data=api_tree_data + ) diff --git a/src/ansys/fluent/core/services/__init__.py b/src/ansys/fluent/core/services/__init__.py index 6cc7b15a406..3a3b9a62330 100644 --- a/src/ansys/fluent/core/services/__init__.py +++ b/src/ansys/fluent/core/services/__init__.py @@ -1,5 +1,6 @@ """Provides a module to create gRPC services.""" +from ansys.fluent.core.services.app_utilities import AppUtilities from ansys.fluent.core.services.batch_ops import BatchOpsService from ansys.fluent.core.services.datamodel_se import ( DatamodelService as DatamodelService_SE, @@ -22,6 +23,7 @@ from ansys.fluent.core.services.transcript import TranscriptService _service_cls_by_name = { + "app_utilities": AppUtilities, "health_check": HealthCheckService, "datamodel": DatamodelService_SE, "tui": DatamodelService_TUI, diff --git a/src/ansys/fluent/core/services/api_upgrade.py b/src/ansys/fluent/core/services/api_upgrade.py index b90c8647ec8..f8e957fde5a 100644 --- a/src/ansys/fluent/core/services/api_upgrade.py +++ b/src/ansys/fluent/core/services/api_upgrade.py @@ -3,7 +3,7 @@ import os from typing import TypeVar -from ansys.fluent.core.services.scheme_eval import SchemeEval +from ansys.fluent.core.services.app_utilities import AppUtilities from ansys.fluent.core.utils.fluent_version import FluentVersion _TApiUpgradeAdvisor = TypeVar("_TApiUpgradeAdvisor", bound="ApiUpgradeAdvisor") @@ -12,11 +12,17 @@ class ApiUpgradeAdvisor: """API upgrade advisor.""" - def __init__(self, scheme_eval: SchemeEval, version: str, mode: str) -> None: + def __init__( + self, + app_utilities: AppUtilities, + version: str, + mode: str, + ) -> None: """Initialize ApiUpgradeAdvisor.""" - self._scheme_eval = scheme_eval.scheme_eval + self._app_utilities = app_utilities self._version = version self._mode = mode + self._id = None def _can_advise(self) -> bool: return ( @@ -27,16 +33,12 @@ def _can_advise(self) -> bool: def __enter__(self) -> _TApiUpgradeAdvisor: if self._can_advise(): - self._scheme_eval("(define pyfluent-journal-str-port (open-output-string))") - self._scheme_eval("(api-echo-python-port pyfluent-journal-str-port)") + self._id = self._app_utilities.start_python_journal() return self def __exit__(self, exc_type, exc_value, exc_tb) -> None: if self._can_advise(): - self._scheme_eval("(api-unecho-python-port pyfluent-journal-str-port)") - journal_str = self._scheme_eval( - "(close-output-port pyfluent-journal-str-port)" - ).strip() + journal_str = (self._app_utilities.stop_python_journal(self._id)).strip() if ( journal_str.startswith("solver.") and not journal_str.startswith("solver.tui") diff --git a/src/ansys/fluent/core/services/app_utilities.py b/src/ansys/fluent/core/services/app_utilities.py new file mode 100644 index 00000000000..86ce0147e54 --- /dev/null +++ b/src/ansys/fluent/core/services/app_utilities.py @@ -0,0 +1,407 @@ +"""Wrappers over AppUtilities gRPC service of Fluent.""" + +from enum import Enum +from typing import List, Tuple + +import grpc + +from ansys.api.fluent.v0 import app_utilities_pb2 as AppUtilitiesProtoModule +from ansys.api.fluent.v0 import app_utilities_pb2_grpc as AppUtilitiesGrpcModule +from ansys.fluent.core.services.interceptors import ( + BatchInterceptor, + ErrorStateInterceptor, + GrpcErrorInterceptor, + TracingInterceptor, +) +from ansys.fluent.core.streaming_services.events_streaming import SolverEvent + + +class AppUtilitiesService: + """AppUtilities Service.""" + + def __init__( + self, channel: grpc.Channel, metadata: List[Tuple[str, str]], fluent_error_state + ): + """__init__ method of AppUtilities class.""" + intercept_channel = grpc.intercept_channel( + channel, + GrpcErrorInterceptor(), + ErrorStateInterceptor(fluent_error_state), + TracingInterceptor(), + BatchInterceptor(), + ) + self._stub = AppUtilitiesGrpcModule.AppUtilitiesStub(intercept_channel) + self._metadata = metadata + + def get_product_version( + self, request: AppUtilitiesProtoModule.GetProductVersionRequest + ) -> AppUtilitiesProtoModule.GetProductVersionResponse: + """Get product version RPC of AppUtilities service.""" + return self._stub.GetProductVersion(request, metadata=self._metadata) + + def get_build_info( + self, request: AppUtilitiesProtoModule.GetBuildInfoRequest + ) -> AppUtilitiesProtoModule.GetBuildInfoResponse: + """Get build info RPC of AppUtilities service.""" + return self._stub.GetBuildInfo(request, metadata=self._metadata) + + def get_controller_process_info( + self, request: AppUtilitiesProtoModule.GetControllerProcessInfoRequest + ) -> AppUtilitiesProtoModule.GetControllerProcessInfoResponse: + """Get controller process info RPC of AppUtilities service.""" + return self._stub.GetControllerProcessInfo(request, metadata=self._metadata) + + def get_solver_process_info( + self, request: AppUtilitiesProtoModule.GetSolverProcessInfoRequest + ) -> AppUtilitiesProtoModule.GetSolverProcessInfoResponse: + """Get solver process info RPC of AppUtilities service.""" + return self._stub.GetSolverProcessInfo(request, metadata=self._metadata) + + def get_app_mode( + self, request: AppUtilitiesProtoModule.GetAppModeRequest + ) -> AppUtilitiesProtoModule.GetAppModeResponse: + """Get app mode RPC of AppUtilities service.""" + return self._stub.GetAppMode(request, metadata=self._metadata) + + def start_python_journal( + self, request: AppUtilitiesProtoModule.StartPythonJournalRequest + ) -> AppUtilitiesProtoModule.StartPythonJournalResponse: + """Start python journal RPC of AppUtilities service.""" + return self._stub.StartPythonJournal(request, metadata=self._metadata) + + def stop_python_journal( + self, request: AppUtilitiesProtoModule.StopPythonJournalRequest + ) -> AppUtilitiesProtoModule.StopPythonJournalResponse: + """Stop python journal RPC of AppUtilities service.""" + return self._stub.StopPythonJournal(request, metadata=self._metadata) + + def is_beta_enabled( + self, request: AppUtilitiesProtoModule.IsBetaEnabledRequest + ) -> AppUtilitiesProtoModule.IsBetaEnabledResponse: + """Is beta enabled RPC of AppUtilities service.""" + return self._stub.IsBetaEnabled(request, metadata=self._metadata) + + def is_wildcard( + self, request: AppUtilitiesProtoModule.IsWildcardRequest + ) -> AppUtilitiesProtoModule.IsWildcardResponse: + """Is wildcard RPC of AppUtilities service.""" + return self._stub.IsWildcard(request, metadata=self._metadata) + + def is_solution_data_available( + self, request: AppUtilitiesProtoModule.IsSolutionDataAvailableRequest + ) -> AppUtilitiesProtoModule.IsSolutionDataAvailableResponse: + """Is solution data available RPC of AppUtilities service.""" + return self._stub.IsSolutionDataAvailable(request, metadata=self._metadata) + + def register_pause_on_solution_events( + self, request: AppUtilitiesProtoModule.RegisterPauseOnSolutionEventsRequest + ) -> AppUtilitiesProtoModule.RegisterPauseOnSolutionEventsResponse: + """Register on pause solution events RPC of AppUtilities service.""" + return self._stub.RegisterPauseOnSolutionEvents( + request, metadata=self._metadata + ) + + def resume_on_solution_event( + self, request: AppUtilitiesProtoModule.ResumeOnSolutionEventRequest + ) -> AppUtilitiesProtoModule.ResumeOnSolutionEventResponse: + """Resume on solution event RPC of AppUtilities service.""" + return self._stub.ResumeOnSolutionEvent(request, metadata=self._metadata) + + def unregister_pause_on_solution_events( + self, request: AppUtilitiesProtoModule.UnregisterPauseOnSolutionEventsRequest + ) -> AppUtilitiesProtoModule.UnregisterPauseOnSolutionEventsResponse: + """Unregister on pause solution events RPC of AppUtilities service.""" + return self._stub.UnregisterPauseOnSolutionEvents( + request, metadata=self._metadata + ) + + def exit( + self, request: AppUtilitiesProtoModule.ExitRequest + ) -> AppUtilitiesProtoModule.ExitResponse: + """Exit RPC of AppUtilities service.""" + return self._stub.Exit(request, metadata=self._metadata) + + +class AppUtilitiesOld: + """AppUtilitiesOld.""" + + def __init__(self, scheme_eval): + """__init__ method of AppUtilitiesOld class.""" + self.scheme_eval = scheme_eval + + def get_product_version(self) -> str: + """Get product version.""" + return self.scheme_eval.version + + def get_build_info(self) -> dict: + """Get build info.""" + build_time = self.scheme_eval.scheme_eval("(inquire-build-time)") + build_id = self.scheme_eval.scheme_eval("(inquire-build-id)") + vcs_revision = self.scheme_eval.scheme_eval("(inquire-src-vcs-id)") + vcs_branch = self.scheme_eval.scheme_eval("(inquire-src-vcs-branch)") + return { + "build_time": build_time, + "build_id": build_id, + "vcs_revision": vcs_revision, + "vcs_branch": vcs_branch, + } + + def get_controller_process_info(self) -> dict: + """Get controller process info.""" + cortex_host = self.scheme_eval.scheme_eval("(cx-cortex-host)") + cortex_pid = self.scheme_eval.scheme_eval("(cx-cortex-id)") + cortex_pwd = self.scheme_eval.scheme_eval("(cortex-pwd)") + return { + "hostname": cortex_host, + "process_id": cortex_pid, + "working_directory": cortex_pwd, + } + + def get_solver_process_info(self) -> dict: + """Get solver process info.""" + fluent_host = self.scheme_eval.scheme_eval("(cx-client-host)") + fluent_pid = self.scheme_eval.scheme_eval("(cx-client-id)") + fluent_pwd = self.scheme_eval.scheme_eval("(cx-send '(cx-client-pwd))") + return { + "hostname": fluent_host, + "process_id": fluent_pid, + "working_directory": fluent_pwd, + } + + def get_app_mode(self) -> Enum: + """Get app mode.""" + from ansys.fluent.core import FluentMode + + if self.scheme_eval.scheme_eval("(cx-solver-mode?)"): + mode_str = self.scheme_eval.scheme_eval('(getenv "PRJAPP_APP")') + if mode_str == "flaero_server": + return FluentMode.SOLVER_AERO + elif mode_str == "flicing": + return FluentMode.SOLVER_ICING + else: + return FluentMode.SOLVER + else: + return FluentMode.MESHING + + def start_python_journal(self, journal_name: str | None = None) -> int: + """Start python journal.""" + if journal_name: + self.scheme_eval.exec([f'(api-start-python-journal "{journal_name}")']) + else: + self.scheme_eval.scheme_eval( + "(define pyfluent-journal-str-port (open-output-string))" + ) + self.scheme_eval.scheme_eval( + "(api-echo-python-port pyfluent-journal-str-port)" + ) + return "1" + + def stop_python_journal(self, journal_id: str | None = None) -> str: + """Stop python journal.""" + if journal_id: + self.scheme_eval.scheme_eval( + "(api-unecho-python-port pyfluent-journal-str-port)" + ) + journal_str = self.scheme_eval.scheme_eval( + "(close-output-port pyfluent-journal-str-port)" + ) + return journal_str + else: + self.scheme_eval.exec(["(api-stop-python-journal)"]) + + def is_beta_enabled(self) -> bool: + """Is beta enabled.""" + return self.scheme_eval.scheme_eval("(is-beta-feature-available?)") + + def is_wildcard(self, input: str | None = None) -> bool: + """Is wildcard.""" + return self.scheme_eval.scheme_eval(f'(has-fnmatch-wild-card? "{input}")') + + def is_solution_data_available(self) -> bool: + """Is solution data available.""" + return self.scheme_eval.scheme_eval("(data-valid?)") + + def register_pause_on_solution_events(self, solution_event: SolverEvent) -> int: + """Register pause on solution events.""" + unique_id: int = self.scheme_eval.scheme_eval( + f""" + (let + ((ids + (let loop ((i 1)) + (define next-id (string->symbol (format #f "pyfluent-~d" i))) + (if (check-monitor-existence next-id) + (loop (1+ i)) + (list i next-id) + ) + ) + )) + (register-solution-monitor + (cadr ids) + (lambda (niter time) + (if (integer? niter) + (begin + (events/transmit 'auto-pause (cons (car ids) niter)) + (grpcserver/auto-pause (is-server-running?) (cadr ids)) + ) + ) + () + ) + {'#t' if solution_event == SolverEvent.TIMESTEP_ENDED else '#f'} + ) + (car ids) + ) + """ + ) + return unique_id + + def resume_on_solution_event(self, registration_id: int) -> None: + """Resume on solution event.""" + self.scheme_eval.scheme_eval( + f"(grpcserver/auto-resume (is-server-running?) 'pyfluent-{registration_id})" + ) + + def unregister_pause_on_solution_events(self, registration_id: int) -> None: + """Unregister pause on solution events.""" + self.scheme_eval.scheme_eval( + f"(cancel-solution-monitor 'pyfluent-{registration_id})" + ) + + def exit(self) -> None: + """Exit.""" + self.scheme_eval.exec(("(exit-server)",)) + + +class AppUtilities: + """AppUtilities.""" + + def __init__(self, service: AppUtilitiesService): + """__init__ method of AppUtilities class.""" + self.service = service + + def get_product_version(self) -> str: + """Get product version.""" + request = AppUtilitiesProtoModule.GetProductVersionRequest() + response = self.service.get_product_version(request) + return f"{response.major}.{response.minor}.{response.patch}" + + def get_build_info(self) -> dict: + """Get build info.""" + request = AppUtilitiesProtoModule.GetBuildInfoRequest() + response = self.service.get_build_info(request) + return { + "build_time": response.build_time, + "build_id": response.build_id, + "vcs_revision": response.vcs_revision, + "vcs_branch": response.vcs_branch, + } + + def get_controller_process_info(self) -> dict: + """Get controller process info.""" + request = AppUtilitiesProtoModule.GetControllerProcessInfoRequest() + response = self.service.get_controller_process_info(request) + return { + "hostname": response.hostname, + "process_id": response.process_id, + "working_directory": response.working_directory, + } + + def get_solver_process_info(self) -> dict: + """Get solver process info.""" + request = AppUtilitiesProtoModule.GetSolverProcessInfoRequest() + response = self.service.get_solver_process_info(request) + return { + "hostname": response.hostname, + "process_id": response.process_id, + "working_directory": response.working_directory, + } + + def get_app_mode(self) -> Enum: + """Get app mode. + + Raises + ------ + ValueError + If app mode is unknown. + """ + import ansys.fluent.core as pyfluent + + request = AppUtilitiesProtoModule.GetAppModeRequest() + response = self.service.get_app_mode(request) + match response.app_mode: + case AppUtilitiesProtoModule.APP_MODE_UNKNOWN: + raise ValueError("Unknown app mode.") + case AppUtilitiesProtoModule.APP_MODE_MESHING: + return pyfluent.FluentMode.MESHING + case AppUtilitiesProtoModule.APP_MODE_SOLVER: + return pyfluent.FluentMode.SOLVER + case AppUtilitiesProtoModule.APP_MODE_SOLVER_ICING: + return pyfluent.FluentMode.SOLVER_ICING + case AppUtilitiesProtoModule.APP_MODE_SOLVER_AERO: + return pyfluent.FluentMode.SOLVER_AERO + + def start_python_journal(self, journal_name: str | None = None) -> int: + """Start python journal.""" + request = AppUtilitiesProtoModule.StartPythonJournalRequest() + request.journal_name = journal_name + response = self.service.start_python_journal(request) + return response.journal_id + + def stop_python_journal(self, journal_id: str | None = None) -> str: + """Stop python journal.""" + request = AppUtilitiesProtoModule.StopPythonJournalRequest() + if journal_id: + request.journal_id = journal_id + response = self.service.stop_python_journal(request) + return response.journal_str + + def is_beta_enabled(self) -> bool: + """Is beta enabled.""" + request = AppUtilitiesProtoModule.IsBetaEnabledRequest() + response = self.service.is_beta_enabled(request) + return response.is_beta_enabled + + def is_wildcard(self, input: str | None = None) -> bool: + """Is wildcard.""" + request = AppUtilitiesProtoModule.IsWildcardRequest() + request.input = input + response = self.service.is_wildcard(request) + return response.is_wildcard + + def is_solution_data_available(self) -> bool: + """Is solution data available.""" + request = AppUtilitiesProtoModule.IsSolutionDataAvailableRequest() + response = self.service.is_solution_data_available(request) + return response.is_solution_data_available + + def register_pause_on_solution_events(self, solution_event: SolverEvent) -> int: + """Register pause on solution events.""" + request = AppUtilitiesProtoModule.RegisterPauseOnSolutionEventsRequest() + request.solution_event = AppUtilitiesProtoModule.SOLUTION_EVENT_UNKNOWN + match solution_event: + case SolverEvent.ITERATION_ENDED: + request.solution_event = ( + AppUtilitiesProtoModule.SOLUTION_EVENT_ITERATION + ) + case SolverEvent.TIMESTEP_ENDED: + request.solution_event = ( + AppUtilitiesProtoModule.SOLUTION_EVENT_TIME_STEP + ) + response = self.service.register_pause_on_solution_events(request) + return response.registration_id + + def resume_on_solution_event(self, registration_id: int) -> None: + """Resume on solution event.""" + request = AppUtilitiesProtoModule.ResumeOnSolutionEventRequest() + request.registration_id = registration_id + self.service.resume_on_solution_event(request) + + def unregister_pause_on_solution_events(self, registration_id: int) -> None: + """Unregister pause on solution events.""" + request = AppUtilitiesProtoModule.UnregisterPauseOnSolutionEventsRequest() + request.registration_id = registration_id + self.service.unregister_pause_on_solution_events(request) + + def exit(self) -> None: + """Exit.""" + request = AppUtilitiesProtoModule.ExitRequest() + self.service.exit(request) diff --git a/src/ansys/fluent/core/services/datamodel_se.py b/src/ansys/fluent/core/services/datamodel_se.py index 6b1520ed80a..1c6a5404762 100644 --- a/src/ansys/fluent/core/services/datamodel_se.py +++ b/src/ansys/fluent/core/services/datamodel_se.py @@ -1577,7 +1577,7 @@ def _del_item(self, key: str) -> None: # On-deleted subscription objects are unsubscribed after the datamodel # object is deleted. self[key].add_on_deleted( - lambda _: self.service.subscriptions.unsubscribe_while_deleting( + lambda: self.service.subscriptions.unsubscribe_while_deleting( self.rules, se_path, "after" ) ) diff --git a/src/ansys/fluent/core/services/datamodel_tui.py b/src/ansys/fluent/core/services/datamodel_tui.py index 75f956b7d23..97a3eeb7f3d 100644 --- a/src/ansys/fluent/core/services/datamodel_tui.py +++ b/src/ansys/fluent/core/services/datamodel_tui.py @@ -125,10 +125,12 @@ def __init__( channel: grpc.Channel, metadata: list[tuple[str, str]], fluent_error_state, + app_utilities, scheme_eval, ) -> None: """__init__ method of DatamodelService class.""" self._impl = DatamodelServiceImpl(channel, metadata, fluent_error_state) + self._app_utilities = app_utilities self._scheme_eval = scheme_eval def get_attribute_value( @@ -233,7 +235,7 @@ def execute(self, *args, **kwargs) -> Any: Query result (any Python datatype) """ with ApiUpgradeAdvisor( - self._service._scheme_eval, + self._service._app_utilities, self._version, self._mode, ): diff --git a/src/ansys/fluent/core/services/field_data.py b/src/ansys/fluent/core/services/field_data.py index b82f78ac10e..fb24d57fc8b 100644 --- a/src/ansys/fluent/core/services/field_data.py +++ b/src/ansys/fluent/core/services/field_data.py @@ -51,6 +51,10 @@ def __init__( stub=FieldGrpcModule.FieldDataStub(intercept_channel), metadata=metadata ) + def get_fields_info(self, request): + """GetFieldsInfo RPC of FieldData service.""" + return self._stub.GetFieldsInfo(request, metadata=self._metadata) + def get_scalar_field_range(self, request): """GetRange RPC of FieldData service.""" return self._stub.GetRange(request, metadata=self._metadata) @@ -88,6 +92,9 @@ class FieldInfo: Methods ------- + get_fields_info(field: str) -> List[dict] + Get fields info. + get_scalar_field_range(field: str, node_value: bool, surface_ids: List[int]) -> List[float] Get the range (minimum and maximum values) of the field. @@ -111,6 +118,17 @@ def __init__( self._service = service self._is_data_valid = is_data_valid + def get_fields_info(self) -> List[dict]: + """Get fields info. + + Returns + ------- + List[dict] + """ + request = FieldDataProtoModule.GetFieldsInfo() + response = self._service.get_fields_info(request) + return response["fieldInfo"] + def get_scalar_field_range( self, field: str, node_value: bool = False, surface_ids: List[int] = None ) -> List[float]: diff --git a/src/ansys/fluent/core/services/reduction.py b/src/ansys/fluent/core/services/reduction.py index 1a4c9a3202a..412a0546567 100644 --- a/src/ansys/fluent/core/services/reduction.py +++ b/src/ansys/fluent/core/services/reduction.py @@ -263,6 +263,8 @@ def _validate_str_location(self, loc: str): raise ValueError(f"Invalid location input: '{loc}'") def _get_location_string(self, locations, ctxt) -> List[str]: + if locations == []: + return [] for loc in locations: if isinstance(loc, str): self._validate_str_location(loc) diff --git a/src/ansys/fluent/core/services/settings.py b/src/ansys/fluent/core/services/settings.py index ea9f366f6c1..b4c5efb1e8d 100644 --- a/src/ansys/fluent/core/services/settings.py +++ b/src/ansys/fluent/core/services/settings.py @@ -136,9 +136,12 @@ def _get_request_instance_for_path(request_class, path: str) -> Any: class SettingsService: """Service for accessing and modifying Fluent settings.""" - def __init__(self, channel, metadata, scheme_eval, fluent_error_state) -> None: + def __init__( + self, channel, metadata, app_utilities, scheme_eval, fluent_error_state + ) -> None: """__init__ method of SettingsService class.""" self._service_impl = _SettingsServiceImpl(channel, metadata, fluent_error_state) + self._app_utilities = app_utilities self._scheme_eval = scheme_eval @_trace @@ -369,7 +372,7 @@ def has_wildcard(self, name: str) -> bool: """Checks whether a name has a wildcard pattern.""" return self._scheme_eval.is_defined( "has-fnmatch-wild-card?" - ) and self._scheme_eval.scheme_eval(f'(has-fnmatch-wild-card? "{name}")') + ) and self._app_utilities.is_wildcard(name) @_trace def is_interactive_mode(self) -> bool: diff --git a/src/ansys/fluent/core/session.py b/src/ansys/fluent/core/session.py index a35ed312c98..42e45907495 100644 --- a/src/ansys/fluent/core/session.py +++ b/src/ansys/fluent/core/session.py @@ -10,6 +10,7 @@ from ansys.fluent.core.fluent_connection import FluentConnection from ansys.fluent.core.journaling import Journal from ansys.fluent.core.services import service_creator +from ansys.fluent.core.services.app_utilities import AppUtilitiesOld from ansys.fluent.core.services.field_data import FieldDataService from ansys.fluent.core.services.scheme_eval import SchemeEval from ansys.fluent.core.streaming_services.datamodel_event_streaming import ( @@ -124,7 +125,6 @@ def _build_from_fluent_connection( self.scheme_eval = scheme_eval self.rp_vars = RPVars(self.scheme_eval.string_eval) self._preferences = None - self.journal = Journal(self.scheme_eval) self._transcript_service = service_creator("transcript").create( fluent_connection._channel, fluent_connection._metadata @@ -133,10 +133,20 @@ def _build_from_fluent_connection( if self._start_transcript: self.transcript.start() + if FluentVersion(self.scheme_eval.version) < FluentVersion.v252: + self._app_utilities = AppUtilitiesOld(self.scheme_eval) + else: + self._app_utilities = ( + self._fluent_connection._connection_interface._app_utilities + ) + + self.journal = Journal(self._app_utilities) + self._datamodel_service_tui = service_creator("tui").create( fluent_connection._channel, fluent_connection._metadata, self._error_state, + self._app_utilities, self.scheme_eval, ) @@ -175,13 +185,17 @@ class Fields: def __init__(self, _session): """Initialize Fields.""" + self._is_solution_data_valid = ( + _session._app_utilities.is_solution_data_available + ) self.field_info = service_creator("field_info").create( - _session._field_data_service, _IsDataValid(_session.scheme_eval) + _session._field_data_service, + self._is_solution_data_valid, ) self.field_data = service_creator("field_data").create( _session._field_data_service, self.field_info, - _IsDataValid(_session.scheme_eval), + self._is_solution_data_valid, _session.scheme_eval, ) self.field_data_streaming = FieldDataStreaming( @@ -190,7 +204,7 @@ def __init__(self, _session): self.field_data_old = service_creator("field_data_old").create( _session._field_data_service, self.field_info, - _IsDataValid(_session.scheme_eval), + self._is_solution_data_valid, _session.scheme_eval, ) @@ -199,6 +213,7 @@ def __init__(self, _session): self._settings_service = service_creator("settings").create( fluent_connection._channel, fluent_connection._metadata, + self._app_utilities, self.scheme_eval, self._error_state, ) diff --git a/src/ansys/fluent/core/session_pure_meshing.py b/src/ansys/fluent/core/session_pure_meshing.py index f3932df2a55..188f67b14c9 100644 --- a/src/ansys/fluent/core/session_pure_meshing.py +++ b/src/ansys/fluent/core/session_pure_meshing.py @@ -156,7 +156,7 @@ def topology_based(self): RuntimeError If beta features are not enabled in Fluent. """ - if not self.scheme_eval.scheme_eval("(is-beta-feature-available?)"): + if not self._app_utilities.is_beta_enabled(): raise RuntimeError("Topology-based Meshing is a beta feature in Fluent.") self._base_meshing.topology_based_meshing_workflow.initialize() return self._base_meshing.topology_based_meshing_workflow diff --git a/src/ansys/fluent/core/solver/__init__.py b/src/ansys/fluent/core/solver/__init__.py index e69de29bb2d..59906802ba9 100644 --- a/src/ansys/fluent/core/solver/__init__.py +++ b/src/ansys/fluent/core/solver/__init__.py @@ -0,0 +1,6 @@ +"""The top-level module of PyFluent providing solver-related functionality.""" + +try: + from ansys.fluent.core.generated.solver.settings_builtin import * # noqa: F401, F403 +except (ImportError, AttributeError, SyntaxError): + pass diff --git a/src/ansys/fluent/core/solver/flobject.py b/src/ansys/fluent/core/solver/flobject.py index 1feab0d59c6..9e29c2f6fba 100644 --- a/src/ansys/fluent/core/solver/flobject.py +++ b/src/ansys/fluent/core/solver/flobject.py @@ -46,7 +46,6 @@ ) import warnings import weakref -from zipimport import zipimporter import ansys.fluent.core as pyfluent from ansys.fluent.core.utils.fluent_version import FluentVersion @@ -54,7 +53,7 @@ from .error_message import allowed_name_error_message, allowed_values_error from .flunits import UnhandledQuantity, get_si_unit_for_fluent_quantity -from .settings_external import expand_api_file_argument, use_search +from .settings_external import expand_api_file_argument def _ansys_units(): @@ -172,6 +171,38 @@ def to_python_name(fluent_name: str) -> str: return name +def _get_python_path_comps(obj): + """Get python path components for traversing class hierarchy.""" + comps = [] + while obj: + python_name = obj._python_name + obj = obj._parent + if isinstance(obj, (NamedObject, ListObject)): + comps.append(obj._python_name) + obj = obj._parent + else: + comps.append(python_name) + comps.reverse() + return comps[1:] + + +def _get_class_from_paths(root_cls, some_path: list[str], other_path: list[str]): + """Get the class for the given alias path.""" + parent_count = 0 + while other_path[0] == "..": + parent_count += 1 + other_path.pop(0) + for _ in range(parent_count): + some_path.pop() + full_path = some_path + other_path + cls = root_cls + for comp in full_path: + cls = cls._child_classes[comp] + if issubclass(cls, (NamedObject, ListObject)): + cls = cls.child_object_type + return cls, full_path + + class Base: """Provides the base class for settings and command objects. @@ -438,6 +469,8 @@ def _while_executing_command(self): return nullcontext() def __eq__(self, other): + if not isinstance(other, self.__class__): + return False return self.flproxy == other.flproxy and self.path == other.path @@ -646,43 +679,6 @@ def _create_child(cls, name, parent: weakref.CallableProxyType, alias_path=None) return cls(name, parent) -def _combine_set_states(states: List[Tuple[str, StateT]]) -> Tuple[str, StateT]: - """Combines multiple set-states into a single set-state at a common parent path. - - Parameters - ---------- - states : list[tuple[str, StateT]] - List of (, ) tuples. - - Returns - ------- - tuple[str, StateT] - Common parent path, combined state. - """ - paths, _ = zip(*states) - common_path = [] - paths = [path.split("/") for path in paths] - for comps in zip(*paths): - if len(set(comps)) == 1: - common_path.append(comps[0]) - else: - break - combined_state = {} - for path, state in states: - comps = path.split("/") - comps = comps[len(common_path) :] - if comps: - if not isinstance(combined_state, dict): - combined_state = {} - obj = combined_state - for comp in comps[:-1]: - obj = obj.setdefault(comp, {}) - obj[comps[-1]] = state - else: - combined_state = state - return "/".join(common_path), combined_state - - class SettingsBase(Base, Generic[StateT]): """Base class for settings objects. @@ -696,7 +692,7 @@ class SettingsBase(Base, Generic[StateT]): """ @classmethod - def to_scheme_keys(cls, value: StateT) -> StateT: + def to_scheme_keys(cls, value: StateT, root_cls, path: list[str]) -> StateT: """Convert value to have keys with scheme names. This is overridden in the ``Group``, ``NamedObject``, and @@ -721,63 +717,6 @@ def get_state(self) -> StateT: """Get the state of the object.""" return self.to_python_keys(self.flproxy.get_var(self.path)) - # Following is not a classmethod, as parent (required to support ".." in alias-path) - # is available only at the instance level. - def _unalias(self, cls, value): - """Unalias the given value.""" - if isinstance(value, collections.abc.Mapping): - ret = {} - outer_set_states = [] - for k, v in value.items(): - if hasattr(cls, "_child_aliases") and k in cls._child_aliases: - alias = cls._child_aliases[k] - comps = alias.split("/") - if comps[0] == "..": - outer_obj = self - while comps[0] == "..": - outer_obj = outer_obj.parent - comps = comps[1:] - for comp in comps: - try: - outer_obj = getattr(outer_obj, comp) - except InactiveObjectError: - outer_obj = super( - SettingsBase, outer_obj - ).__getattribute__(comp) - outer_set_states.append((outer_obj, v)) - else: - ret_alias = ret - aliased_cls = cls - obj = self - for i, comp in enumerate(comps): - aliased_cls = aliased_cls._child_classes[comp] - try: - obj = getattr(obj, comp) - except InactiveObjectError: - obj = super(SettingsBase, obj).__getattribute__(comp) - if i == len(comps) - 1: - ret_alias[comp], o_set_states = obj._unalias( - aliased_cls, v - ) - outer_set_states.extend(o_set_states) - else: - ret_alias = ret_alias.setdefault(comp, {}) - else: - if issubclass(cls, Group): - ccls = cls._child_classes[k] - try: - cobj = getattr(self, k) - except InactiveObjectError: - cobj = super(SettingsBase, self).__getattribute__(k) - ret[k], o_set_states = cobj._unalias(ccls, v) - outer_set_states.extend(o_set_states) - else: - ret[k], o_set_states = self._unalias(cls, v) - outer_set_states.extend(o_set_states) - return ret, outer_set_states - else: - return value, [] - def set_state(self, state: StateT | None = None, **kwargs): """Set the state of the object.""" with self._while_setting_state(): @@ -786,17 +725,14 @@ def set_state(self, state: StateT | None = None, **kwargs): ): self.value.set_state(state, **kwargs) else: - state, outer_set_states = self._unalias(self.__class__, kwargs or state) - if outer_set_states: - set_states = [] - if state: - set_states.append((self.path, self.to_scheme_keys(state))) - for obj, state in outer_set_states: - set_states.append((obj.path, obj.to_scheme_keys(state))) - path, state = _combine_set_states(set_states) - self.flproxy.set_var(path, state) - else: - self.flproxy.set_var(self.path, self.to_scheme_keys(state)) + self.flproxy.set_var( + self.path, + self.to_scheme_keys( + kwargs or state, + self._root.__class__, + _get_python_path_comps(self), + ), + ) @staticmethod def _print_state_helper(state, out, indent=0, indent_factor=2): @@ -1022,7 +958,7 @@ def __call__(self, *args, **kwargs): return self.get_state() @classmethod - def to_scheme_keys(cls, value): + def to_scheme_keys(cls, value, root_cls, path: list[str]): """Convert value to have keys with scheme names. Raises @@ -1035,7 +971,15 @@ def to_scheme_keys(cls, value): for k, v in value.items(): if k in cls.child_names: ccls = cls._child_classes[k] - ret[ccls.fluent_name] = ccls.to_scheme_keys(v) + ret[ccls.fluent_name] = ccls.to_scheme_keys(v, root_cls, path + [k]) + elif k in cls._child_aliases: + alias, scm_alias_name = cls._child_aliases[k] + alias_cls, alias_path = _get_class_from_paths( + root_cls, path.copy(), alias.split("/") + ) + ret[scm_alias_name] = alias_cls.to_scheme_keys( + v, root_cls, alias_path + ) else: raise RuntimeError("Key '" + str(k) + "' is invalid") return ret @@ -1055,7 +999,7 @@ def to_python_keys(cls, value): ret[mname] = ccls.to_python_keys(mvalue) return ret else: - return value + return {} _child_classes = {} child_names = [] @@ -1122,6 +1066,7 @@ def __getattribute__(self, name): raise InactiveObjectError(self.python_path) alias = super().__getattribute__("_child_aliases").get(name) if alias: + alias = alias[0] alias_obj = self._child_alias_objs.get(name) if alias_obj is None: obj = self.find_object(alias) @@ -1135,27 +1080,18 @@ def __getattribute__(self, name): attr._check_stable() return attr except AttributeError as ex: - modified_search_results = [] - if use_search( - codegen_outdir=pyfluent.CODEGEN_OUTDIR, - version=super().__getattribute__("version"), - ): - search_results = pyfluent.utils._search( - word=name, - search_root=self, - match_case=False, - match_whole_word=False, - ) - if search_results: - for search_result in search_results: - search_result = search_result.replace( - "", self.__class__.__name__ - ) - modified_search_results.append(search_result) + pyfluent.PRINT_SEARCH_RESULTS = False + search_results = pyfluent.utils.search( + search_string=name, + match_case=False, + match_whole_word=False, + ) + pyfluent.PRINT_SEARCH_RESULTS = True + results = search_results if search_results else [] error_msg = allowed_name_error_message( trial_name=name, message=ex.args[0], - search_results=modified_search_results, + search_results=results, ) ex.args = (error_msg,) raise @@ -1251,18 +1187,14 @@ def __iter__(self): # get_state example: a.b["*"].c.d.get_state() == {"" {"c": {"d": }}} # set_state example: a.b["*"].set_state({"c": {"d": }}) - def to_scheme_keys(self, value): + def to_scheme_keys(self, value, root_cls, path): """Convert value to have keys with scheme names.""" - return self._settings_cls.to_scheme_keys(value) + return self._settings_cls.to_scheme_keys(value, root_cls, path) def to_python_keys(self, value): """Convert value to have keys with Python names.""" return self._state_cls.to_python_keys(value) - def _unalias(self, cls, value): - # Not yet implemented - return value, [] - class NamedObjectWildcardPath(WildcardPath): """WildcardPath at a NamedObject path, so it can be looked up by wildcard again.""" @@ -1314,12 +1246,12 @@ def __init__(self, name: str | None = None, parent=None): ) @classmethod - def to_scheme_keys(cls, value): + def to_scheme_keys(cls, value, root_cls, path: list[str]): """Convert value to have keys with scheme names.""" if isinstance(value, collections.abc.Mapping): ret = {} for k, v in value.items(): - ret[k] = cls.child_object_type.to_scheme_keys(v) + ret[k] = cls.child_object_type.to_scheme_keys(v, root_cls, path) return ret else: return value @@ -1333,7 +1265,7 @@ def to_python_keys(cls, value): ret[k] = cls.child_object_type.to_python_keys(v) return ret else: - return value + return {} _child_classes = {} command_names = [] @@ -1466,6 +1398,7 @@ def get(self, name: str) -> ChildTypeT: def __getattr__(self, name: str): alias = self._child_aliases.get(name) if alias: + alias = alias[0] alias_obj = self._child_alias_objs.get(name) if alias_obj is None: obj = self.find_object(alias) @@ -1525,10 +1458,12 @@ def __init__(self, name=None, parent=None): self._setattr(query, _create_child(cls, None, self)) @classmethod - def to_scheme_keys(cls, value): + def to_scheme_keys(cls, value, root_cls, path: list[str]): """Convert value to have keys with scheme names.""" if isinstance(value, collections.abc.Sequence): - return [cls.child_object_type.to_scheme_keys(v) for v in value] + return [ + cls.child_object_type.to_scheme_keys(v, root_cls, path) for v in value + ] else: return value @@ -1538,7 +1473,7 @@ def to_python_keys(cls, value): if isinstance(value, collections.abc.Sequence): return [cls.child_object_type.to_python_keys(v) for v in value] else: - return value + return [] _child_classes = {} command_names = [] @@ -1583,6 +1518,7 @@ def __setitem__(self, index: int, value): def __getattr__(self, name: str): alias = self._child_aliases.get(name) if alias: + alias = alias[0] alias_obj = self._child_alias_objs.get(name) if alias_obj is None: obj = self.find_object(alias) @@ -1673,6 +1609,7 @@ def get_completer_info(self, prefix="", excluded=None) -> List[List[str]]: def __getattr__(self, name: str): alias = self._child_aliases.get(name) if alias: + alias = alias[0] alias_obj = self._child_alias_objs.get(name) if alias_obj is None: obj = self.find_object(alias) @@ -1724,7 +1661,11 @@ def execute_command(self, *args, **kwds): command_name=self.python_name, value=value, kwargs=kwds ) # Convert key-value to Scheme key-value - scmKwds[argument.fluent_name] = argument.to_scheme_keys(value) + scmKwds[argument.fluent_name] = argument.to_scheme_keys( + value, + argument._root.__class__, + _get_python_path_comps(argument), + ) ret = self._execute_command(*args, **scmKwds) for arg, value in kwds.items(): argument = getattr(self, arg) @@ -1810,7 +1751,11 @@ def __call__(self, **kwds): for arg, value in kwds.items(): argument = getattr(self, arg) # Convert key-value to Scheme key-value - scmKwds[argument.fluent_name] = argument.to_scheme_keys(value) + scmKwds[argument.fluent_name] = argument.to_scheme_keys( + value, + argument._root.__class__, + _get_python_path_comps(argument), + ) return self.flproxy.execute_query(self._parent.path, self.obj_name, **scmKwds) @@ -2162,8 +2107,13 @@ def _process_cls_names(info_dict, names, write_doc=False): for k, v in ( child_aliases | command_aliases | query_aliases | argument_aliases ).items(): - cls._child_aliases[to_python_name(k)] = "/".join( - x if x == ".." else to_python_name(x) for x in v.split("/") + # Storing the original name as we don't have any other way + # to recover it at runtime. + cls._child_aliases[to_python_name(k)] = ( + "/".join( + x if x == ".." else to_python_name(x) for x in v.split("/") + ), + k, ) except Exception: @@ -2212,30 +2162,17 @@ def get_root( RuntimeError If hash values are inconsistent. """ - from ansys.fluent.core import CODEGEN_OUTDIR, CODEGEN_ZIP_SETTINGS, utils + from ansys.fluent.core import CODEGEN_OUTDIR, utils - if os.getenv("PYFLUENT_USE_OLD_SETTINGSGEN") != "1": - try: - settings = utils.load_module( - f"settings_{version}", - CODEGEN_OUTDIR / "solver" / f"settings_{version}.py", - ) - root_cls = settings.root - except FileNotFoundError: - obj_info = flproxy.get_static_info() - root_cls, _ = get_cls("", obj_info, version=version) - else: - if CODEGEN_ZIP_SETTINGS: - importer = zipimporter( - str(CODEGEN_OUTDIR / "solver" / f"settings_{version}.zip") - ) - settings = importer.load_module("settings") - else: - settings = utils.load_module( - f"settings_{version}", - CODEGEN_OUTDIR / "solver" / f"settings_{version}" / "__init__.py", - ) + try: + settings = utils.load_module( + f"settings_{version}", + CODEGEN_OUTDIR / "solver" / f"settings_{version}.py", + ) root_cls = settings.root + except FileNotFoundError: + obj_info = flproxy.get_static_info() + root_cls, _ = get_cls("", obj_info, version=version) root = root_cls() root.set_flproxy(flproxy) root._set_on_interrupt(interrupt) diff --git a/src/ansys/fluent/core/solver/function/reduction.py b/src/ansys/fluent/core/solver/function/reduction.py index 12107b38f3f..646d89eca6f 100644 --- a/src/ansys/fluent/core/solver/function/reduction.py +++ b/src/ansys/fluent/core/solver/function/reduction.py @@ -6,7 +6,7 @@ Parameters ---------- -expr : Any +expression : Any Expression that can be either a string or an instance of a specific settings API named_expressions object. The expression can be a field variable or a @@ -123,6 +123,9 @@ def _validate_locn_list(locn_list, ctxt): def _locns(locns, ctxt): + if locns == []: + # Raising 'RuntimeError' instead of 'ValueError' to address a limitation in the server-side implementation. + raise RuntimeError("No locations specified.") locn_names_and_objs = _locn_names_and_objs(locns) locn_list = [] for name, obj in locn_names_and_objs: @@ -217,7 +220,7 @@ def _extent_moment_vector(f_string, expr, locations, ctxt): total += array(extent) except TypeError: raise RuntimeError(extent) - return _Vector(total) + return tuple(total) def _extent_average(extent_name, expr, locations, ctxt): @@ -251,27 +254,7 @@ def _extent_vectors(extent_name, locations, ctxt): total += array(extent) except TypeError: raise RuntimeError(extent) - return _Vector(total) - - -class _Vector: - def __init__(self, np_arr): - self.array = np_arr - - @property - def x(self): - """Get X vector.""" - return self.array[0] - - @property - def y(self): - """Get Y vector.""" - return self.array[1] - - @property - def z(self): - """Get Z vector.""" - return self.array[2] + return tuple(total) def _limit(limit, expr, locations, ctxt): @@ -401,7 +384,7 @@ def centroid(locations, ctxt=None): ctxt : Any, optional Returns ------- - float + tuple """ return _extent_vectors("Centroid", locations, ctxt) @@ -416,7 +399,7 @@ def force(locations, ctxt=None): ctxt : Any, optional Returns ------- - float + tuple """ return _extent_vectors("Force", locations, ctxt) @@ -431,7 +414,7 @@ def pressure_force(locations, ctxt=None): ctxt : Any, optional Returns ------- - float + tuple """ return _extent_vectors("PressureForce", locations, ctxt) @@ -446,7 +429,7 @@ def viscous_force(locations, ctxt=None): ctxt : Any, optional Returns ------- - float + tuple """ return _extent_vectors("ViscousForce", locations, ctxt) @@ -462,7 +445,7 @@ def moment(expression, locations, ctxt=None): ctxt : Any, optional Returns ------- - float + tuple """ return _extent_moment_vector("Moment", expression, locations, ctxt) diff --git a/src/ansys/fluent/core/solver/settings_builtin_data.py b/src/ansys/fluent/core/solver/settings_builtin_data.py index e680a5f5743..afa98f31660 100644 --- a/src/ansys/fluent/core/solver/settings_builtin_data.py +++ b/src/ansys/fluent/core/solver/settings_builtin_data.py @@ -1,6 +1,6 @@ """Data for for builtin setting classes.""" -from ansys.fluent.core import FluentVersion +from ansys.fluent.core.utils.fluent_version import FluentVersion # {: (, )} DATA = { diff --git a/src/ansys/fluent/core/solver/settings_external.py b/src/ansys/fluent/core/solver/settings_external.py index 89940e4e3e1..1ebd512944e 100644 --- a/src/ansys/fluent/core/solver/settings_external.py +++ b/src/ansys/fluent/core/solver/settings_external.py @@ -1,8 +1,5 @@ """Miscellaneous utility functions.""" -import os -import re - def expand_api_file_argument(command_name, value, kwargs): """Expand API file argument.""" @@ -14,28 +11,3 @@ def expand_api_file_argument(command_name, value, kwargs): return [value, data_file] else: return [value] - - -def use_search(codegen_outdir: str, version: str): - """Whether to use ``_search()`` in the error handling. - - Parameters - ---------- - codegen_outdir: str - Codegen directory. - version: str - Fluent version. - """ - fluent_version_str = version - fluent_version_int = int(fluent_version_str.replace(".", "")[0:3]) - api_tree_files = [ - file for file in os.listdir(codegen_outdir) if file.endswith("pickle") - ] - api_tree_file_versions = [ - int(re.findall(r"\d+", file)[0]) for file in api_tree_files - ] - latest_api_tree_version = max(api_tree_file_versions) - if len(api_tree_files) == 1 and fluent_version_int == latest_api_tree_version: - return True - else: - return False diff --git a/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py b/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py index 5c0ff5e1363..1627d8870d1 100644 --- a/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py +++ b/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py @@ -66,12 +66,12 @@ def _process_streaming(self, id, stream_begin_method, started_evt, *args, **kwar elif response.HasField("commandAttributeChangedEventResponse"): value = response.commandAttributeChangedEventResponse.value cb[1](_convert_variant_to_value(value)) - elif ( - response.HasField("modifiedEventResponse") - or response.HasField("deletedEventResponse") - or response.HasField("affectedEventResponse") - ): + elif response.HasField( + "modifiedEventResponse" + ) or response.HasField("affectedEventResponse"): cb[1](cb[0]) + elif response.HasField("deletedEventResponse"): + cb[1]() elif response.HasField("commandExecutedEventResponse"): command = response.commandExecutedEventResponse.command args = _convert_variant_to_value( diff --git a/src/ansys/fluent/core/streaming_services/events_streaming.py b/src/ansys/fluent/core/streaming_services/events_streaming.py index a7b79a6f59e..c0f11dd4879 100644 --- a/src/ansys/fluent/core/streaming_services/events_streaming.py +++ b/src/ansys/fluent/core/streaming_services/events_streaming.py @@ -1,5 +1,6 @@ """Module for events management.""" +from dataclasses import dataclass, field, fields from enum import Enum from functools import partial import inspect @@ -7,11 +8,41 @@ from typing import Callable, Generic, Literal, Type, TypeVar import warnings +from google.protobuf.json_format import MessageToDict + from ansys.api.fluent.v0 import events_pb2 as EventsProtoModule from ansys.fluent.core.exceptions import InvalidArgument from ansys.fluent.core.streaming_services.streaming import StreamingService from ansys.fluent.core.warnings import PyFluentDeprecationWarning +__all__ = [ + "EventsManager", + "Event", + "SolverEvent", + "MeshingEvent", + "TimestepStartedEventInfo", + "TimestepEndedEventInfo", + "IterationEndedEventInfo", + "CalculationsStartedEventInfo", + "CalculationsEndedEventInfo", + "CalculationsPausedEventInfo", + "CalculationsResumedEventInfo", + "AboutToLoadCaseEventInfo", + "CaseLoadedEventInfo", + "AboutToLoadDataEventInfo", + "DataLoadedEventInfo", + "AboutToInitializeSolutionEventInfo", + "SolutionInitializedEventInfo", + "ReportDefinitionUpdatedEventInfo", + "ReportPlotSetUpdatedEventInfo", + "ResidualPlotUpdatedEventInfo", + "SettingsClearedEventInfo", + "SolutionPausedEventInfo", + "ProgressUpdatedEventInfo", + "SolverTimeEstimateUpdatedEventInfo", + "FatalErrorEventInfo", +] + network_logger = logging.getLogger("pyfluent.networking") @@ -70,6 +101,269 @@ def _missing_(cls, value: str): return _missing_for_events(cls, value) +class EventInfoBase: + """Base class for event information classes.""" + + derived_classes = {} + + def __init_subclass__(cls, event, **kwargs): + super().__init_subclass__(**kwargs) + cls.derived_classes[event] = cls + + def __post_init__(self): + for f in fields(self): + # Cast to the correct type + setattr(self, f.name, f.type(getattr(self, f.name))) + + def __getattr__(self, name): + for f in fields(self): + if f.metadata.get("deprecated_name") == name: + warnings.warn( + f"'{name}' is deprecated. Use '{f.name}' instead.", + PyFluentDeprecationWarning, + ) + return getattr(self, f.name) + return self.__getattribute__(name) + + +@dataclass +class TimestepStartedEventInfo(EventInfoBase, event=SolverEvent.TIMESTEP_STARTED): + """Information about the event triggered when a timestep is started. + + Attributes + ---------- + index : int + Timestep index. + size : float + Timestep size. + """ + + index: int + size: float + + +@dataclass +class TimestepEndedEventInfo(EventInfoBase, event=SolverEvent.TIMESTEP_ENDED): + """Information about the event triggered when a timestep is ended. + + Attributes + ---------- + index : int + Timestep index. + size : float + Timestep size. + """ + + index: int + size: float + + +@dataclass +class IterationEndedEventInfo(EventInfoBase, event=SolverEvent.ITERATION_ENDED): + """Information about the event triggered when an iteration is ended. + + Attributes + ---------- + index : int + Iteration index. + """ + + index: int + + +class CalculationsStartedEventInfo( + EventInfoBase, event=SolverEvent.CALCULATIONS_STARTED +): + """Information about the event triggered when calculations are started.""" + + +class CalculationsEndedEventInfo(EventInfoBase, event=SolverEvent.CALCULATIONS_ENDED): + """Information about the event triggered when calculations are ended.""" + + +class CalculationsPausedEventInfo(EventInfoBase, event=SolverEvent.CALCULATIONS_PAUSED): + """Information about the event triggered when calculations are paused.""" + + +class CalculationsResumedEventInfo( + EventInfoBase, event=SolverEvent.CALCULATIONS_RESUMED +): + """Information about the event triggered when calculations are resumed.""" + + +@dataclass +class AboutToLoadCaseEventInfo(EventInfoBase, event=SolverEvent.ABOUT_TO_LOAD_CASE): + """Information about the event triggered just before a case file is loaded. + + Attributes + ---------- + case_file_name : str + Case filename. + """ + + case_file_name: str = field(metadata=dict(deprecated_name="casefilepath")) + + +@dataclass +class CaseLoadedEventInfo(EventInfoBase, event=SolverEvent.CASE_LOADED): + """Information about the event triggered after a case file is loaded. + + Attributes + ---------- + case_file_name : str + Case filename. + """ + + case_file_name: str = field(metadata=dict(deprecated_name="casefilepath")) + + +@dataclass +class AboutToLoadDataEventInfo(EventInfoBase, event=SolverEvent.ABOUT_TO_LOAD_DATA): + """Information about the event triggered just before a data file is loaded. + + Attributes + ---------- + data_file_name : str + Data filename. + """ + + data_file_name: str = field(metadata=dict(deprecated_name="datafilepath")) + + +@dataclass +class DataLoadedEventInfo(EventInfoBase, event=SolverEvent.DATA_LOADED): + """Information about the event triggered after a data file is loaded. + + Attributes + ---------- + data_file_name : str + Data filename. + """ + + data_file_name: str = field(metadata=dict(deprecated_name="datafilepath")) + + +class AboutToInitializeSolutionEventInfo( + EventInfoBase, event=SolverEvent.ABOUT_TO_INITIALIZE_SOLUTION +): + """Information about the event triggered just before solution is initialized.""" + + +class SolutionInitializedEventInfo( + EventInfoBase, event=SolverEvent.SOLUTION_INITIALIZED +): + """Information about the event triggered after solution is initialized.""" + + +@dataclass +class ReportDefinitionUpdatedEventInfo( + EventInfoBase, event=SolverEvent.REPORT_DEFINITION_UPDATED +): + """Information about the event triggered when a report definition is updated. + + Attributes + ---------- + report_name : str + Report name. + """ + + report_name: str = field(metadata=dict(deprecated_name="reportdefinitionname")) + + +@dataclass +class ReportPlotSetUpdatedEventInfo( + EventInfoBase, event=SolverEvent.REPORT_PLOT_SET_UPDATED +): + """Information about the event triggered when a report plot set is updated. + + Attributes + ---------- + plot_set_name : str + Plot set name. + """ + + plot_set_name: str = field(metadata=dict(deprecated_name="plotsetname")) + + +class ResidualPlotUpdatedEventInfo( + EventInfoBase, event=SolverEvent.RESIDUAL_PLOT_UPDATED +): + """Information about the event triggered when residual plots are updated.""" + + +class SettingsClearedEventInfo(EventInfoBase, event=SolverEvent.SETTINGS_CLEARED): + """Information about the event triggered when settings are cleared.""" + + +@dataclass +class SolutionPausedEventInfo(EventInfoBase, event=SolverEvent.SOLUTION_PAUSED): + """Information about the event triggered when solution is paused. + + Attributes + ---------- + level : str + Level of the pause event. + index : int + Index of the pause event. + """ + + level: str + index: int + + +@dataclass +class ProgressUpdatedEventInfo(EventInfoBase, event=SolverEvent.PROGRESS_UPDATED): + """Information about the event triggered when progress is updated. + + Attributes + ---------- + message : str + Progress message. + percentage : int + Progress percentage. + """ + + message: str + percentage: int = field(metadata=dict(deprecated_name="percentComplete")) + + +@dataclass +class SolverTimeEstimateUpdatedEventInfo( + EventInfoBase, event=SolverEvent.SOLVER_TIME_ESTIMATE_UPDATED +): + """Information about the event triggered when solver time estimate is updated. + + Attributes + ---------- + hours : float + Hours of solver time estimate. + minutes : float + Minutes of solver time estimate. + seconds : float + Seconds of solver time estimate. + """ + + hours: float + minutes: float + seconds: float + + +@dataclass +class FatalErrorEventInfo(EventInfoBase, event=SolverEvent.FATAL_ERROR): + """Information about the event triggered when a fatal error occurs. + + Attributes + ---------- + message : str + Error message. + error_code : int + Error code. + """ + + message: str + error_code: int = field(metadata=dict(deprecated_name="errorCode")) + + TEvent = TypeVar("TEvent") @@ -100,6 +394,18 @@ def __init__( self._session = session self._sync_event_ids = {} + def _construct_event_info( + self, response: EventsProtoModule.BeginStreamingResponse, event: TEvent + ): + event_info_msg = getattr(response, event.value.lower()) + event_info_dict = MessageToDict( + event_info_msg, including_default_value_fields=True + ) + solver_event = SolverEvent(event.value) + event_info_cls = EventInfoBase.derived_classes.get(solver_event) + # Key names can be different, but their order is the same + return event_info_cls(*event_info_dict.values()) + def _process_streaming( self, service, id, stream_begin_method, started_evt, *args, **kwargs ): @@ -129,30 +435,33 @@ def _process_streaming( for callback in callbacks_map.values(): callback( session=self._session, - event_info=getattr(response, event_name.value.lower()), + event_info=self._construct_event_info(response, event_name), ) except StopIteration: break @staticmethod def _make_callback_to_call(callback: Callable, args, kwargs): - old_style = "session_id" in inspect.signature(callback).parameters - if old_style: + params = inspect.signature(callback).parameters + if "session_id" in params: warnings.warn( "Update event callback function signatures" " substituting 'session' for 'session_id'.", PyFluentDeprecationWarning, ) - fn = partial(callback, *args, **kwargs) - return ( - ( - lambda session, event_info: fn( - session_id=session.id, event_info=event_info - ) + return lambda session, event_info: callback( + *args, session_id=session.id, event_info=event_info, **kwargs + ) + else: + positional_args = [ + p + for p in params + if p not in kwargs and p not in ("session", "event_info") + ] + kwargs.update(dict(zip(positional_args, args))) + return lambda session, event_info: callback( + session=session, event_info=event_info, **kwargs ) - if old_style - else fn - ) def register_callback( self, @@ -229,8 +538,8 @@ def unregister_callback(self, callback_id: str): del callbacks_map[callback_id] sync_event_id = self._sync_event_ids.pop(callback_id, None) if sync_event_id: - self._session.scheme_eval.scheme_eval( - f"(cancel-solution-monitor 'pyfluent-{sync_event_id})" + self._session._app_utilities.unregister_pause_on_solution_events( + registration_id=sync_event_id ) def start(self, *args, **kwargs) -> None: @@ -247,44 +556,20 @@ def _register_solution_event_sync_callback( callback_id: str, callback: Callable, ) -> tuple[Literal[SolverEvent.SOLUTION_PAUSED], Callable]: - unique_id = self._session.scheme_eval.scheme_eval( - f""" - (let - ((ids - (let loop ((i 1)) - (define next-id (string->symbol (format #f "pyfluent-~d" i))) - (if (check-monitor-existence next-id) - (loop (1+ i)) - (list i next-id) - ) - ) - )) - (register-solution-monitor - (cadr ids) - (lambda (niter time) - (if (integer? niter) - (begin - (events/transmit 'auto-pause (cons (car ids) niter)) - (grpcserver/auto-pause (is-server-running?) (cadr ids)) - ) - ) - () - ) - {'#t' if event_type == SolverEvent.TIMESTEP_ENDED else '#f'} - ) - (car ids) - ) - """ + unique_id: int = self._session._app_utilities.register_pause_on_solution_events( + solution_event=event_type ) - def on_pause(session, event_info: EventsProtoModule.AutoPauseEvent): - if unique_id == event_info.level: - event_info_cls = ( - EventsProtoModule.TimestepEndedEvent - if event_type == SolverEvent.TIMESTEP_ENDED - else EventsProtoModule.IterationEndedEvent - ) - event_info = event_info_cls(index=event_info.index) + def on_pause(session, event_info: SolutionPausedEventInfo): + if unique_id == int(event_info.level): + if event_type == SolverEvent.ITERATION_ENDED: + event_info = IterationEndedEventInfo(index=event_info.index) + else: + event_info = TimestepEndedEventInfo( + # TODO: Timestep size is currently not available + index=event_info.index, + size=0, + ) try: callback(session, event_info) except Exception as e: @@ -293,8 +578,8 @@ def on_pause(session, event_info: EventsProtoModule.AutoPauseEvent): exc_info=True, ) finally: - session.scheme_eval.scheme_eval( - f"(grpcserver/auto-resume (is-server-running?) 'pyfluent-{unique_id})" + session._app_utilities.resume_on_solution_event( + registration_id=unique_id ) self._sync_event_ids[callback_id] = unique_id diff --git a/src/ansys/fluent/core/utils/__init__.py b/src/ansys/fluent/core/utils/__init__.py index ee9f5549fea..b6e5bdd73d2 100644 --- a/src/ansys/fluent/core/utils/__init__.py +++ b/src/ansys/fluent/core/utils/__init__.py @@ -5,7 +5,7 @@ from pathlib import Path import sys -from ansys.fluent.core.search import _search # noqa: F401 +from ansys.fluent.core.search import search # noqa: F401 logger = logging.getLogger("pyfluent.general") diff --git a/tests/conftest.py b/tests/conftest.py index 5133fdf27c4..d55e3c903dd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,11 @@ from contextlib import nullcontext import functools +import inspect import operator import os +from pathlib import Path +import shutil +import sys from packaging.specifiers import SpecifierSet from packaging.version import Version @@ -12,6 +16,8 @@ from ansys.fluent.core.utils.file_transfer_service import RemoteFileTransferStrategy from ansys.fluent.core.utils.fluent_version import FluentVersion +sys.path.append(Path(__file__).parent / "util") + def pytest_addoption(parser): parser.addoption( @@ -26,6 +32,12 @@ def pytest_addoption(parser): parser.addoption( "--solvermode", action="store_true", default=False, help="run solvermode tests" ) + parser.addoption( + "--write-fluent-journals", + action="store_true", + default=False, + help="Write Fluent journals for unittests", + ) def pytest_runtest_setup(item): @@ -65,6 +77,75 @@ def pytest_runtest_setup(item): pytest.skip() +def pytest_collection_finish(session): + if session.config.getoption("--write-fluent-journals"): + import_path = Path(__file__).parent + sys.path.append(str(import_path)) + import fluent_fixtures + + launcher_args_by_fixture = {} + for k, v in fluent_fixtures.__dict__.items(): + if hasattr(v, "fluent_launcher_args"): + launcher_args_by_fixture[k] = v.fluent_launcher_args + fluent_test_root = import_path / "fluent" + shutil.rmtree(fluent_test_root, ignore_errors=True) + for item in session.items: + skip = False + for mark in item.iter_markers(name="skip"): + skip = True + for mark in item.iter_markers(name="fluent_version"): + spec = mark.args[0] + # TODO: Support older versions + if not ( + spec == "latest" + or Version(FluentVersion.current_dev().value) in SpecifierSet(spec) + ): + skip = True + if skip: + continue + fluent_test_dir = fluent_test_root / item.module.__name__ / item.name + fluent_test_config = fluent_test_dir / "test.yaml" + fluent_test_file = fluent_test_dir / "test.py" + launcher_args = "" + parameters = inspect.signature(item.function).parameters + parameter_set = {p for p in parameters} + if not (parameter_set & set(launcher_args_by_fixture.keys())): + # Skipping as unittest doesn't use fluent fixture + continue + for param in parameters: + if param not in dir(fluent_fixtures): + print(f"Skipping {item.nodeid} because of missing fixture {param}") + skip = True + break + if skip: + continue + for param in parameters: + if param in launcher_args_by_fixture: + launcher_args = launcher_args_by_fixture[param] + break + fluent_test_dir.mkdir(parents=True, exist_ok=True) + with open(fluent_test_config, "w") as f: + f.write(f"launcher_args: {launcher_args}\n") + with open(fluent_test_file, "w") as f: + f.write("import sys\n") + f.write('sys.path.append("/testing")\n') + f.write( + f"from {item.module.__name__} import {item.name} # noqa: E402\n" + ) + f.write("from fluent_fixtures import ( # noqa: E402\n") + for param in parameters: + f.write(f" {param},\n") + f.write(")\n") + f.write("\n") + f.write(f"{item.name}(") + f.write(", ".join([f"{p}(globals())" for p in parameters])) + f.write(")\n") + f.write("exit()\n") + print(f"Written {fluent_test_file}") + session.items = [] + session.testscollected = 0 + + @pytest.fixture(autouse=True) def run_before_each_test( monkeypatch: pytest.MonkeyPatch, request: pytest.FixtureRequest @@ -202,6 +283,13 @@ def new_solver_session(): solver.exit() +@pytest.fixture +def new_solver_session_t4(): + solver = create_session(processor_count=4) + yield solver + solver.exit() + + @pytest.fixture def new_solver_session_sp(): solver = create_session(precision="single") @@ -248,6 +336,14 @@ def mixing_elbow_settings_session(new_solver_session): return solver +@pytest.fixture +def mixing_elbow_case_session_t4(new_solver_session_t4): + solver = new_solver_session_t4 + case_name = download_file("mixing_elbow.cas.h5", "pyfluent/mixing_elbow") + solver.settings.file.read(file_type="case", file_name=case_name) + return solver + + @pytest.fixture def mixing_elbow_case_data_session(new_solver_session): solver = new_solver_session @@ -304,3 +400,16 @@ def periodic_rot_settings_session(new_solver_session): @pytest.fixture def disable_datamodel_cache(monkeypatch: pytest.MonkeyPatch): monkeypatch.setattr(pyfluent, "DATAMODEL_USE_STATE_CACHE", False) + + +@pytest.fixture(params=["old", "new"]) +def datamodel_api_version_all(request, monkeypatch: pytest.MonkeyPatch) -> None: + if request.param == "new": + monkeypatch.setenv("REMOTING_NEW_DM_API", "1") + monkeypatch.setenv("REMOTING_MAPPED_NEW_DM_API", "1") + + +@pytest.fixture +def datamodel_api_version_new(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setenv("REMOTING_NEW_DM_API", "1") + monkeypatch.setenv("REMOTING_MAPPED_NEW_DM_API", "1") diff --git a/tests/fluent/test_assert/test.py b/tests/fluent/test_assert/test.py deleted file mode 100644 index dcdd53f9d8c..00000000000 --- a/tests/fluent/test_assert/test.py +++ /dev/null @@ -1,2 +0,0 @@ -assert False, "Expected exception" -exit() diff --git a/tests/fluent/test_meshing_workflow/test.py b/tests/fluent/test_meshing_workflow/test.py deleted file mode 100644 index efa1e2b321e..00000000000 --- a/tests/fluent/test_meshing_workflow/test.py +++ /dev/null @@ -1,21 +0,0 @@ -from ansys.fluent.core.examples import download_file - -geometry_file = download_file("mixing_elbow.pmdb", "pyfluent/mixing_elbow") -watertight = meshing.watertight() # noqa: F821 -watertight.import_geometry.file_name.set_state(geometry_file) -assert watertight.import_geometry.length_unit() == "mm" -watertight.import_geometry.length_unit = "in" -assert watertight.import_geometry.length_unit() == "in" -assert watertight.import_geometry.cad_import_options.feature_angle() == 40.0 -watertight.import_geometry.cad_import_options.feature_angle.set_state(25.0) -assert watertight.import_geometry.cad_import_options.feature_angle() == 25.0 -assert watertight.import_geometry.cad_import_options.one_zone_per.allowed_values() == [ - "body", - "face", - "object", -] -assert watertight.import_geometry.cad_import_options.one_zone_per() == "body" -watertight.import_geometry.cad_import_options.one_zone_per = "face" -assert watertight.import_geometry.cad_import_options.one_zone_per() == "face" -watertight.import_geometry() -exit() diff --git a/tests/fluent/test_meshing_workflow/test.yaml b/tests/fluent/test_meshing_workflow/test.yaml deleted file mode 100644 index a78241353e1..00000000000 --- a/tests/fluent/test_meshing_workflow/test.yaml +++ /dev/null @@ -1 +0,0 @@ -launcher_args: -meshing diff --git a/tests/fluent/test_settings_api/test.py b/tests/fluent/test_settings_api/test.py deleted file mode 100644 index b695111bdcd..00000000000 --- a/tests/fluent/test_settings_api/test.py +++ /dev/null @@ -1,12 +0,0 @@ -from ansys.fluent.core.examples import download_file - -case_name = download_file("mixing_elbow.cas.h5", "pyfluent/mixing_elbow") -solver.settings.file.read_case(file_name=case_name) # noqa: F821 -viscous_settings = solver.settings.setup.models.viscous # noqa: F821 -assert viscous_settings.model() == "k-omega" -allowed_values = viscous_settings.model.allowed_values() -assert "k-epsilon" in allowed_values -assert len(allowed_values) > 5 -viscous_settings.model = "k-epsilon" -assert viscous_settings.model() == "k-epsilon" -exit() diff --git a/tests/fluent/test_version/test.py b/tests/fluent/test_version/test.py deleted file mode 100644 index f2c0611dff0..00000000000 --- a/tests/fluent/test_version/test.py +++ /dev/null @@ -1,2 +0,0 @@ -assert ansys.fluent.core.__version__ == "0.27.dev0" # noqa: F821 -exit() diff --git a/tests/fluent_fixtures.py b/tests/fluent_fixtures.py new file mode 100644 index 00000000000..27562a3a091 --- /dev/null +++ b/tests/fluent_fixtures.py @@ -0,0 +1,195 @@ +from typing import Callable + +import pytest + +import ansys.fluent.core as pyfluent +from ansys.fluent.core.data_model_cache import DataModelCache +from ansys.fluent.core.examples import download_file + + +def fluent_launcher_args(args: str): + def fluent_launcher_args_inner(f: Callable): + def wrapper(*args, **kwargs): + return f(*args, **kwargs) + + wrapper.fluent_launcher_args = args + return wrapper + + return fluent_launcher_args_inner + + +def mixing_elbow_geometry_filename(globals): + return download_file( + file_name="mixing_elbow.pmdb", directory="pyfluent/mixing_elbow" + ) + + +def exhaust_system_geometry_filename(globals): + return download_file( + file_name="exhaust_system.fmd", directory="pyfluent/exhaust_system" + ) + + +@fluent_launcher_args("3ddp -meshing") +def new_meshing_session(globals): + meshing = globals["meshing"] + return meshing + + +@fluent_launcher_args("3ddp -meshing") +def new_pure_meshing_session(globals): + return new_meshing_session(globals) + + +@fluent_launcher_args("3ddp -meshing") +def watertight_workflow_session(globals): + meshing = new_meshing_session(globals) + meshing.workflow.InitializeWorkflow(WorkflowType="Watertight Geometry") + return meshing + + +@fluent_launcher_args("3ddp -meshing") +def fault_tolerant_workflow_session(globals): + meshing = new_meshing_session(globals) + meshing.workflow.InitializeWorkflow(WorkflowType="Fault-tolerant Meshing") + return meshing + + +@fluent_launcher_args("3ddp -meshing") +def mixing_elbow_watertight_pure_meshing_session(globals): + meshing = new_pure_meshing_session(globals) + geometry_filename = mixing_elbow_geometry_filename(globals) + meshing.workflow.InitializeWorkflow(WorkflowType="Watertight Geometry") + meshing.workflow.TaskObject["Import Geometry"].Arguments = dict( + FileName=geometry_filename, LengthUnit="in" + ) + return meshing + + +@fluent_launcher_args("3ddp") +def new_solver_session(globals): + solver = globals["solver"] + return solver + + +@fluent_launcher_args("3d") +def new_solver_session_sp(globals): + return new_solver_session(globals) + + +@fluent_launcher_args("2ddp") +def new_solver_session_2d(globals): + return new_solver_session(globals) + + +@fluent_launcher_args("3ddp") +def static_mixer_settings_session(globals): + solver = new_solver_session(globals) + case_name = download_file("Static_Mixer_main.cas.h5", "pyfluent/static_mixer") + solver.file.read( + file_type="case", + file_name=case_name, + lightweight_setup=True, + ) + return solver + + +@fluent_launcher_args("3ddp") +def static_mixer_case_session(globals): + solver = new_solver_session(globals) + case_name = download_file("Static_Mixer_main.cas.h5", "pyfluent/static_mixer") + solver.file.read(file_type="case", file_name=case_name) + return solver + + +@fluent_launcher_args("3ddp") +def mixing_elbow_settings_session(globals): + solver = new_solver_session(globals) + case_name = download_file("mixing_elbow.cas.h5", "pyfluent/mixing_elbow") + solver.settings.file.read( + file_type="case", + file_name=case_name, + lightweight_setup=True, + ) + return solver + + +@fluent_launcher_args("3ddp") +def mixing_elbow_case_data_session(globals): + solver = new_solver_session(globals) + case_name = download_file("mixing_elbow.cas.h5", "pyfluent/mixing_elbow") + download_file("mixing_elbow.dat.h5", "pyfluent/mixing_elbow") + solver.settings.file.read(file_type="case-data", file_name=case_name) + return solver + + +@fluent_launcher_args("3ddp") +def mixing_elbow_param_case_data_session(globals): + solver = new_solver_session(globals) + case_name = download_file("elbow_param.cas.h5", "pyfluent/mixing_elbow") + download_file("elbow_param.dat.h5", "pyfluent/mixing_elbow") + solver.settings.file.read(file_type="case-data", file_name=case_name) + return solver + + +@fluent_launcher_args("2ddp") +def disk_settings_session(globals): + solver = new_solver_session_2d(globals) + case_name = download_file("disk.cas.h5", "pyfluent/rotating_disk") + solver.file.read( + file_type="case", + file_name=case_name, + lightweight_setup=True, + ) + return solver + + +@fluent_launcher_args("2ddp") +def disk_case_session(globals): + solver = new_solver_session_2d(globals) + case_name = download_file("disk.cas.h5", "pyfluent/rotating_disk") + solver.file.read(file_type="case", file_name=case_name) + return solver + + +@fluent_launcher_args("3ddp") +def periodic_rot_settings_session(globals): + solver = new_solver_session(globals) + case_name = download_file( + "periodic_rot.cas.h5", + "pyfluent/periodic_rot", + ) + solver.file.read( + file_type="case", + file_name=case_name, + lightweight_setup=True, + ) + return solver + + +monkeypatch = pytest.MonkeyPatch() + + +def disable_datamodel_cache(globals): + monkeypatch.setattr(pyfluent, "DATAMODEL_USE_STATE_CACHE", False) + + +def display_names_as_keys_in_cache(globals): + DataModelCache.use_display_name = True + + +def new_meshing_session2(globals): + session = pyfluent.launch_fluent(mode=pyfluent.LaunchMode.MESHING) + return session + + +def new_solver_session2(globals): + session = pyfluent.launch_fluent() + return session + + +def static_mixer_case_session2(globals): + session = new_solver_session2(globals) + case_name = download_file("Static_Mixer_main.cas.h5", "pyfluent/static_mixer") + session.file.read(file_type="case", file_name=case_name) + return session diff --git a/tests/test_builtin_settings.py b/tests/test_builtin_settings.py index fede8ed233e..c60191a35f0 100644 --- a/tests/test_builtin_settings.py +++ b/tests/test_builtin_settings.py @@ -4,7 +4,7 @@ import pytest try: - from ansys.fluent.core import ( + from ansys.fluent.core.solver import ( Ablation, Battery, BoundaryCondition, diff --git a/tests/test_codegen.py b/tests/test_codegen.py index 65f312b3949..214bbd29588 100644 --- a/tests/test_codegen.py +++ b/tests/test_codegen.py @@ -430,416 +430,6 @@ def _get_query_settings_static_info(name, args): } -_expected_init_settings_api_output = '''# -# This is an auto-generated file. DO NOT EDIT! -# - -"""A package providing Fluent's Settings Objects in Python.""" -from ansys.fluent.core.solver.flobject import * - -SHASH = "3e6d76a4601701388ea8258912d145b7b7c436699a50b6c7fe9a29f41eeff194" -from .root import root''' - - -_expected_root_settings_api_output = '''# -# This is an auto-generated file. DO NOT EDIT! -# - -from ansys.fluent.core.solver.flobject import * - -from ansys.fluent.core.solver.flobject import ( - _ChildNamedObjectAccessorMixin, - CreatableNamedObjectMixin, - _NonCreatableNamedObjectMixin, - AllowedValuesMixin, - _InputFile, - _OutputFile, - _InOutFile, -) - -from .G1 import G1 as G1_cls -from .P1 import P1 as P1_cls -from .N1 import N1 as N1_cls -from .C1 import C1 as C1_cls -from .Q1 import Q1 as Q1_cls - -class root(Group): - """ - 'root' object. - """ - - fluent_name = "" - version = "251" - - child_names = \\ - ['G1', 'P1', 'N1'] - - command_names = \\ - ['C1'] - - query_names = \\ - ['Q1'] - - _child_classes = dict( - G1=G1_cls, - P1=P1_cls, - N1=N1_cls, - C1=C1_cls, - Q1=Q1_cls, - )''' - - -_expected_A1_settings_api_output = '''# -# This is an auto-generated file. DO NOT EDIT! -# - -from ansys.fluent.core.solver.flobject import * - -from ansys.fluent.core.solver.flobject import ( - _ChildNamedObjectAccessorMixin, - CreatableNamedObjectMixin, - _NonCreatableNamedObjectMixin, - AllowedValuesMixin, - _InputFile, - _OutputFile, - _InOutFile, -) - - -class A1(String): - """ - A1 help. - """ - - fluent_name = "A1" - version = "251"''' - - -_expected_C1_settings_api_output = '''# -# This is an auto-generated file. DO NOT EDIT! -# - -from ansys.fluent.core.solver.flobject import * - -from ansys.fluent.core.solver.flobject import ( - _ChildNamedObjectAccessorMixin, - CreatableNamedObjectMixin, - _NonCreatableNamedObjectMixin, - AllowedValuesMixin, - _InputFile, - _OutputFile, - _InOutFile, -) - -from .A1 import A1 as A1_cls - -class C1(Command): - """ - C1 help. - - Parameters - ---------- - A1 : str - A1 help. - - """ - - fluent_name = "C1" - version = "251" - - argument_names = \\ - ['A1'] - - _child_classes = dict( - A1=A1_cls, - )''' # noqa: W293 - - -_expected_G1_settings_api_output = '''# -# This is an auto-generated file. DO NOT EDIT! -# - -from ansys.fluent.core.solver.flobject import * - -from ansys.fluent.core.solver.flobject import ( - _ChildNamedObjectAccessorMixin, - CreatableNamedObjectMixin, - _NonCreatableNamedObjectMixin, - AllowedValuesMixin, - _InputFile, - _OutputFile, - _InOutFile, -) - -from .G2 import G2 as G2_cls -from .P2 import P2 as P2_cls -from .C2 import C2 as C2_cls -from .Q2 import Q2 as Q2_cls - -class G1(Group): - """ - G1 help. - """ - - fluent_name = "G1" - version = "251" - - child_names = \\ - ['G2', 'P2'] - - command_names = \\ - ['C2'] - - query_names = \\ - ['Q2'] - - _child_classes = dict( - G2=G2_cls, - P2=P2_cls, - C2=C2_cls, - Q2=Q2_cls, - )''' - - -_expected_N1_settings_api_output = '''# -# This is an auto-generated file. DO NOT EDIT! -# - -from ansys.fluent.core.solver.flobject import * - -from ansys.fluent.core.solver.flobject import ( - _ChildNamedObjectAccessorMixin, - CreatableNamedObjectMixin, - _NonCreatableNamedObjectMixin, - AllowedValuesMixin, - _InputFile, - _OutputFile, - _InOutFile, -) - -from .P4 import P4 as P4_cls -from .N1_child import N1_child - - -class N1(NamedObject[N1_child], _NonCreatableNamedObjectMixin[N1_child]): - """ - N1 help. - """ - - fluent_name = "N1" - version = "251" - - child_names = \\ - ['P4'] - - _child_classes = dict( - P4=P4_cls, - ) - - child_object_type: N1_child = N1_child - """ - child_object_type of N1. - """''' - - -_expected_N1_child_settings_api_output = '''# -# This is an auto-generated file. DO NOT EDIT! -# - -from ansys.fluent.core.solver.flobject import * - -from ansys.fluent.core.solver.flobject import ( - _ChildNamedObjectAccessorMixin, - CreatableNamedObjectMixin, - _NonCreatableNamedObjectMixin, - AllowedValuesMixin, - _InputFile, - _OutputFile, - _InOutFile, -) - - -class N1_child(Group): - """ - 'child_object_type' of N1. - """ - - fluent_name = "child-object-type" - version = "251"''' - - -_expected_P1_settings_api_output = '''# -# This is an auto-generated file. DO NOT EDIT! -# - -from ansys.fluent.core.solver.flobject import * - -from ansys.fluent.core.solver.flobject import ( - _ChildNamedObjectAccessorMixin, - CreatableNamedObjectMixin, - _NonCreatableNamedObjectMixin, - AllowedValuesMixin, - _InputFile, - _OutputFile, - _InOutFile, -) - - -class P1(String): - """ - P1 help. - """ - - fluent_name = "P1" - version = "251"''' - - -_expected_Q1_settings_api_output = '''# -# This is an auto-generated file. DO NOT EDIT! -# - -from ansys.fluent.core.solver.flobject import * - -from ansys.fluent.core.solver.flobject import ( - _ChildNamedObjectAccessorMixin, - CreatableNamedObjectMixin, - _NonCreatableNamedObjectMixin, - AllowedValuesMixin, - _InputFile, - _OutputFile, - _InOutFile, -) - -from .A1 import A1 as A1_cls - -class Q1(Query): - """ - Q1 help. - - Parameters - ---------- - A1 : str - A1 help. - - """ - - fluent_name = "Q1" - version = "251" - - argument_names = \\ - ['A1'] - - _child_classes = dict( - A1=A1_cls, - )''' # noqa: W293 - - -def test_codegen_old_with_settings_static_info(monkeypatch): - monkeypatch.setenv("PYFLUENT_USE_OLD_SETTINGSGEN", "1") - codegen_outdir = Path(tempfile.mkdtemp()) - monkeypatch.setattr(pyfluent, "CODEGEN_OUTDIR", codegen_outdir) - version = "251" - static_infos = {} - static_infos[StaticInfoType.SETTINGS] = _settings_static_info - allapigen.generate(version, static_infos) - generated_paths = list(codegen_outdir.iterdir()) - assert len(generated_paths) == 2 - assert set(p.name for p in generated_paths) == { - f"api_tree_{version}.pickle", - "solver", - } - solver_paths = list((codegen_outdir / "solver").iterdir()) - assert len(solver_paths) == 1 - assert set(p.name for p in solver_paths) == {f"settings_{version}"} - settings_paths = list((codegen_outdir / "solver" / f"settings_{version}").iterdir()) - filenames = [ - "root", - "A1", - "A2", - "C1", - "C2", - "G1", - "G2", - "N1", - "N1_child", - "P1", - "P2", - "P3", - "P4", - "Q1", - "Q2", - ] - filenames = ( - ["__init__.py"] - + [f"{f}.py" for f in filenames] - + [f"{f}.pyi" for f in filenames] - ) - assert set(p.name for p in settings_paths) == set(filenames) - with open( - codegen_outdir / "solver" / f"settings_{version}" / "__init__.py", "r" - ) as f: - assert f.read().strip() == _expected_init_settings_api_output - with open(codegen_outdir / "solver" / f"settings_{version}" / "root.py", "r") as f: - assert f.read().strip() == _expected_root_settings_api_output - with open(codegen_outdir / "solver" / f"settings_{version}" / "A1.py", "r") as f: - assert f.read().strip() == _expected_A1_settings_api_output - with open(codegen_outdir / "solver" / f"settings_{version}" / "C1.py", "r") as f: - assert f.read().strip() == _expected_C1_settings_api_output - with open(codegen_outdir / "solver" / f"settings_{version}" / "G1.py", "r") as f: - assert f.read().strip() == _expected_G1_settings_api_output - with open(codegen_outdir / "solver" / f"settings_{version}" / "N1.py", "r") as f: - assert f.read().strip() == _expected_N1_settings_api_output - with open( - codegen_outdir / "solver" / f"settings_{version}" / "N1_child.py", "r" - ) as f: - assert f.read().strip() == _expected_N1_child_settings_api_output - with open(codegen_outdir / "solver" / f"settings_{version}" / "P1.py", "r") as f: - assert f.read().strip() == _expected_P1_settings_api_output - with open(codegen_outdir / "solver" / f"settings_{version}" / "Q1.py", "r") as f: - assert f.read().strip() == _expected_Q1_settings_api_output - api_tree_file = get_api_tree_file_name(version) - with open(api_tree_file, "rb") as f: - api_tree = pickle.load(f) - settings_tree = { - "C1": "Command", - "G1": { - "C2": "Command", - "G2": {"P3": "Parameter"}, - "P2": "Parameter", - "Q2": "Query", - }, - "N1:": {"P4": "Parameter"}, - "P1": "Parameter", - "Q1": "Query", - } - api_tree_expected = {} - api_tree_expected[""] = {} - api_tree_expected[""] = settings_tree - assert api_tree == api_tree_expected - shutil.rmtree(str(codegen_outdir)) - - -def test_codegen_old_with_zipped_settings_static_info(monkeypatch): - monkeypatch.setenv("PYFLUENT_USE_OLD_SETTINGSGEN", "1") - codegen_outdir = Path(tempfile.mkdtemp()) - monkeypatch.setattr(pyfluent, "CODEGEN_OUTDIR", codegen_outdir) - monkeypatch.setattr(pyfluent, "CODEGEN_ZIP_SETTINGS", True) - version = "251" - static_infos = {} - static_infos[StaticInfoType.SETTINGS] = _settings_static_info - allapigen.generate(version, static_infos) - generated_paths = list(codegen_outdir.iterdir()) - assert len(generated_paths) == 2 - assert set(p.name for p in generated_paths) == { - f"api_tree_{version}.pickle", - "solver", - } - solver_paths = list((codegen_outdir / "solver").iterdir()) - assert len(solver_paths) == 1 - assert set(p.name for p in solver_paths) == {f"settings_{version}.zip"} - shutil.rmtree(str(codegen_outdir)) - - _expected_settings_api_output = '''# # This is an auto-generated file. DO NOT EDIT! # diff --git a/tests/test_datamodel_api.py b/tests/test_datamodel_api.py new file mode 100644 index 00000000000..a78b4fca9fa --- /dev/null +++ b/tests/test_datamodel_api.py @@ -0,0 +1,449 @@ +import time + +import pytest +from util import create_datamodel_root_in_server, create_root_using_datamodelgen + +from ansys.fluent.core.services.datamodel_se import ( + SubscribeEventError, + convert_path_to_se_path, +) +from ansys.fluent.core.utils.execution import timeout_loop + +rule_str = ( + "RULES:\n" + " STRING: X\n" + " default = ijk\n" + " END\n" + " SINGLETON: ROOT\n" + " members = A, B, D, G\n" + " commands= C\n" + " SINGLETON: A\n" + " members = X\n" + " x = $./X\n" + " END\n" + " OBJECT: B\n" + " members = X\n" + " END\n" + " SINGLETON: D\n" + " members = E, F, X\n" + " SINGLETON: E\n" + " members = X\n" + " END\n" + " SINGLETON: F\n" + " members = X\n" + " END\n" + " END\n" + " SINGLETON: G\n" + " members = H\n" + " DICT: H\n" + " END\n" + " END\n" + " COMMAND: C\n" + " arguments = X\n" + " x = $/A/X\n" + " END\n" + " END\n" + "END\n" +) + + +@pytest.mark.fluent_version(">=25.2") +def test_env_var_setting(datamodel_api_version_all, request, new_solver_session): + solver = new_solver_session + test_name = request.node.name + for var in ["REMOTING_NEW_DM_API", "REMOTING_MAPPED_NEW_DM_API"]: + # TODO: It might be possible to check the param value in the fixture + # instead of checking the test name here. + if test_name.endswith("[old]"): + assert solver.scheme_eval.scheme_eval(f'(getenv "{var}")') is None + elif test_name.endswith("[new]"): + assert solver.scheme_eval.scheme_eval(f'(getenv "{var}")') == "1" + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_child_created(datamodel_api_version_all, new_solver_session): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + + called = 0 + created = [] + + def cb(obj): + nonlocal called + nonlocal created + called += 1 + created.append(convert_path_to_se_path(obj.path)) + + subscription = service.add_on_child_created(app_name, "/", "B", root, cb) + assert called == 0 + assert created == [] + service.set_state(app_name, "/", {"B:b": {"_name_": "b"}}) + timeout_loop(lambda: called == 1, timeout=5) + assert called == 1 + assert created == ["/B:b"] + subscription.unsubscribe() + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_changed(datamodel_api_version_all, new_solver_session): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + called = 0 + state = None + called_obj = 0 + state_obj = None + + def cb(obj): + nonlocal called + nonlocal state + state = obj() + called += 1 + + def cb_obj(obj): + nonlocal called_obj + nonlocal state_obj + state_obj = obj() + called_obj += 1 + + subscription = service.add_on_changed(app_name, "/A/X", root.A.X, cb) + subscription_obj = service.add_on_changed(app_name, "/A", root.A, cb_obj) + assert called == 0 + assert state is None + assert called_obj == 0 + assert state_obj is None + service.set_state(app_name, "/A/X", "lmn") + timeout_loop(lambda: called == 1, timeout=5) + assert called == 1 + assert state == "lmn" + assert called_obj == 1 + assert state_obj == {"X": "lmn"} + service.set_state(app_name, "/A/X", "abc") + timeout_loop(lambda: called == 2, timeout=5) + assert called == 2 + assert state == "abc" + assert called_obj == 2 + assert state_obj == {"X": "abc"} + subscription.unsubscribe() + subscription_obj.unsubscribe() + service.set_state(app_name, "/A/X", "xyz") + time.sleep(5) + assert called == 2 + assert state == "abc" + assert called_obj == 2 + assert state_obj == {"X": "abc"} + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_affected(datamodel_api_version_all, new_solver_session): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + called = 0 + + def cb(obj): + nonlocal called + called += 1 + + subscription = service.add_on_affected(app_name, "/D", root.D, cb) + assert called == 0 + service.set_state(app_name, "/D/X", "lmn") + timeout_loop(lambda: called == 1, timeout=5) + assert called == 1 + service.set_state(app_name, "/D/E/X", "lmn") + timeout_loop(lambda: called == 2, timeout=5) + assert called == 2 + service.set_state(app_name, "/A/X", "lmn") + time.sleep(5) + assert called == 2 + subscription.unsubscribe() + service.set_state(app_name, "/D/E/X", "pqr") + time.sleep(5) + assert called == 2 + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_affected_at_type_path( + datamodel_api_version_all, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + called = 0 + + def cb(obj): + nonlocal called + called += 1 + + subscription = service.add_on_affected_at_type_path( + app_name, "/D", "E", root.D.E, cb + ) + assert called == 0 + service.set_state(app_name, "/D/X", "lmn") + time.sleep(5) + assert called == 0 + service.set_state(app_name, "/D/E/X", "lmn") + timeout_loop(lambda: called == 1, timeout=5) + assert called == 1 + service.set_state(app_name, "/D/F/X", "lmn") + time.sleep(5) + assert called == 1 + subscription.unsubscribe() + service.set_state(app_name, "/D/E/X", "pqr") + time.sleep(5) + assert called == 1 + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_deleted( + datamodel_api_version_all, request, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + called = False + called_obj = False + + def cb(): + nonlocal called + called = True + + def cb_obj(): + nonlocal called_obj + called_obj = True + + service.set_state(app_name, "/", {"B:b": {"_name_": "b"}}) + subscription = service.add_on_deleted(app_name, "/B:b/X", root.B["b"].X, cb) + subscription_obj = service.add_on_deleted(app_name, "/B:b", root.B["b"], cb_obj) + assert not called + assert not called_obj + service.delete_object(app_name, "/B:b") + time.sleep(5) + test_name = request.node.name + # TODO: Note comment in StateEngine test testDataModelAPIOnDeleted + if test_name.endswith("[old]"): + assert called + elif test_name.endswith("[new]"): + assert not called + assert called_obj + subscription.unsubscribe() + subscription_obj.unsubscribe() + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_attribute_changed( + datamodel_api_version_all, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + called = 0 + value = None + + def cb(val): + nonlocal called + nonlocal value + value = val + called += 1 + + subscription = service.add_on_attribute_changed(app_name, "/A", "x", root.A, cb) + assert called == 0 + assert value is None + service.set_state(app_name, "/A/X", "cde") + timeout_loop(lambda: called == 1, timeout=5) + assert called == 1 + assert value == "cde" + service.set_state(app_name, "/A/X", "xyz") + timeout_loop(lambda: called == 2, timeout=5) + assert called == 2 + assert value == "xyz" + subscription.unsubscribe() + service.set_state(app_name, "/A/X", "abc") + time.sleep(5) + assert called == 2 + assert value == "xyz" + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_command_attribute_changed( + datamodel_api_version_all, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + called = 0 + value = None + + def cb(val): + nonlocal called + nonlocal value + value = val + called += 1 + + subscription = service.add_on_command_attribute_changed( + app_name, "/", "C", "x", root.C, cb + ) + assert called == 0 + assert value is None + service.set_state(app_name, "/A/X", "cde") + timeout_loop(lambda: called == 1, timeout=5) + assert called == 1 + assert value == "cde" + service.set_state(app_name, "/A/X", "xyz") + timeout_loop(lambda: called == 2, timeout=5) + assert called == 2 + # TODO: value is still "cde" in both old and new API + # assert value == "xyz" + subscription.unsubscribe() + service.set_state(app_name, "/A/X", "abc") + time.sleep(5) + assert called == 2 + # Commented out because of the issue above + # assert value == "xyz" + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_command_executed( + datamodel_api_version_all, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + executed = 0 + command = None + arguments = None + + def cb(obj, cmd, args): + nonlocal executed + nonlocal command + nonlocal arguments + command = cmd + arguments = args + executed += 1 + + # TODO: In C++ API, we don't need to pass the command name + subscription = service.add_on_command_executed(app_name, "/", "C", root, cb) + assert executed == 0 + assert command is None + assert arguments is None + service.execute_command(app_name, "/", "C", dict(X="abc")) + timeout_loop(lambda: executed == 1, timeout=5) + assert executed == 1 + assert command == "C" + assert arguments == {"X": "abc"} + subscription.unsubscribe() + service.execute_command(app_name, "/", "C", dict(X="uvw")) + time.sleep(5) + assert executed == 1 + assert command == "C" + assert arguments == {"X": "abc"} + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_get_state(datamodel_api_version_all, new_solver_session): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + assert service.get_state(app_name, "/A/X") == "ijk" + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_set_state(datamodel_api_version_all, new_solver_session): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + service.set_state(app_name, "/A/X", "new_val") + assert service.get_state(app_name, "/A/X") == "new_val" + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_update_dict(datamodel_api_version_all, new_solver_session): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + service.update_dict(app_name, "/G/H", {"X": "abc"}) + assert service.get_state(app_name, "/G/H") == {"X": "abc"} + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_bad_input( + datamodel_api_version_all, request, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + test_name = request.node.name + new_api = test_name.endswith("[new]") + with pytest.raises(SubscribeEventError): + service.add_on_child_created(app_name, "", "", root, lambda _: None) + with pytest.raises(RuntimeError if new_api else SubscribeEventError): # TODO: issue + service.add_on_child_created(app_name, "/BB", "B", root, lambda _: None) + with pytest.raises(SubscribeEventError): + service.add_on_child_created(app_name, "/", "A", root, lambda _: None) + with pytest.raises(SubscribeEventError): + service.add_on_child_created(app_name, "/", "BB", root, lambda _: None) + with pytest.raises(RuntimeError if new_api else SubscribeEventError): # TODO: issue + service.add_on_changed(app_name, "/BB", root, lambda _: None) + with pytest.raises(RuntimeError if new_api else SubscribeEventError): # TODO: issue + service.add_on_deleted(app_name, "/BB", root, lambda: None) + with pytest.raises(RuntimeError if new_api else SubscribeEventError): # TODO: issue + service.add_on_affected(app_name, "/BB", root, lambda _: None) + with pytest.raises(RuntimeError if new_api else SubscribeEventError): # TODO: issue + service.add_on_affected_at_type_path(app_name, "/BB", "B", root, lambda: None) + # TODO: not raised in the old API - issue + if new_api: + with pytest.raises(SubscribeEventError): + service.add_on_affected_at_type_path( + app_name, "/", "BB", root, lambda: None + ) + with pytest.raises(RuntimeError if new_api else SubscribeEventError): # TODO: issue + service.add_on_attribute_changed( + app_name, "/BB", "isActive", root, lambda _: None + ) + with pytest.raises(SubscribeEventError): + service.add_on_attribute_changed(app_name, "/A", "", root, lambda _: None) + with pytest.raises(RuntimeError if new_api else SubscribeEventError): # TODO: issue + service.add_on_command_attribute_changed( + app_name, "/BB", "C", "isActive", root, lambda _: None + ) + with pytest.raises(SubscribeEventError): + service.add_on_command_attribute_changed( + app_name, "/A", "CC", "", root, lambda _: None + ) + with pytest.raises(SubscribeEventError): + service.add_on_command_attribute_changed( + app_name, "/", "CC", "isActive", root, lambda _: None + ) + with pytest.raises(RuntimeError if new_api else SubscribeEventError): # TODO: issue + service.add_on_command_executed(app_name, "/BB", "C", root, lambda _: None) + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_static_info(datamodel_api_version_all, new_solver_session): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rule_str, app_name) + service = solver._se_service + assert service.get_static_info(app_name) diff --git a/tests/test_datamodel_service.py b/tests/test_datamodel_service.py index 074bc925d24..289c5bc8a0e 100644 --- a/tests/test_datamodel_service.py +++ b/tests/test_datamodel_service.py @@ -3,17 +3,14 @@ from google.protobuf.json_format import MessageToDict import pytest +from util import create_datamodel_root_in_server, create_root_using_datamodelgen from ansys.api.fluent.v0 import datamodel_se_pb2 from ansys.api.fluent.v0.variant_pb2 import Variant import ansys.fluent.core as pyfluent from ansys.fluent.core import examples from ansys.fluent.core.services.datamodel_se import ( - PyCommand, - PyMenu, PyMenuGeneric, - PyNamedObjectContainer, - PyTextual, ReadOnlyObjectError, _convert_value_to_variant, _convert_variant_to_value, @@ -125,7 +122,7 @@ def test_add_on_deleted(new_meshing_session): meshing.workflow.InitializeWorkflow(WorkflowType="Watertight Geometry") data = [] _ = meshing.workflow.TaskObject["Import Geometry"].add_on_deleted( - lambda obj: data.append(convert_path_to_se_path(obj.path)) + lambda: data.append(True) ) assert data == [] meshing.workflow.InitializeWorkflow(WorkflowType="Fault-tolerant Meshing") @@ -543,47 +540,12 @@ def test_read_only_set_state(new_meshing_session): ) -class test_root(PyMenu): - def __init__(self, service, rules, path): - self.A = self.__class__.A(service, rules, path + [("A", "")]) - super().__init__(service, rules, path) - - class A(PyNamedObjectContainer): - class _A(PyMenu): - def __init__(self, service, rules, path): - self.B = self.__class__.B(service, rules, path + [("B", "")]) - self.X = self.__class__.X(service, rules, path + [("X", "")]) - self.C = self.__class__.C(service, rules, "C", path) - super().__init__(service, rules, path) - - class B(PyNamedObjectContainer): - class _B(PyMenu): - pass - - class X(PyTextual): - pass - - class C(PyCommand): - pass - - -def _create_datamodel_root(session, rules_str) -> PyMenu: - rules_file_name = "test.fdl" - session.scheme_eval.scheme_eval( - f'(with-output-to-file "{rules_file_name}" (lambda () (format "~a" "{rules_str}")))' - ) - session.scheme_eval.scheme_eval( - '(state/register-new-state-engine "test" "test.fdl")' - ) - session.scheme_eval.scheme_eval(f'(remove-file "{rules_file_name}")') - assert session.scheme_eval.scheme_eval('(state/find-root "test")') > 0 - return test_root(session._se_service, "test", []) - - @pytest.mark.fluent_version(">=24.2") def test_on_child_created_lifetime(new_solver_session): solver = new_solver_session - root = _create_datamodel_root(solver, test_rules) + app_name = "test" + create_datamodel_root_in_server(solver, test_rules, app_name) + root = create_root_using_datamodelgen(solver._se_service, app_name) root.A["A1"] = {} data = [] _ = root.A["A1"].add_on_child_created("B", lambda _: data.append(1)) @@ -601,11 +563,13 @@ def test_on_child_created_lifetime(new_solver_session): @pytest.mark.fluent_version(">=24.2") def test_on_deleted_lifetime(new_solver_session): solver = new_solver_session - root = _create_datamodel_root(solver, test_rules) + app_name = "test" + create_datamodel_root_in_server(solver, test_rules, app_name) + root = create_root_using_datamodelgen(solver._se_service, app_name) root.A["A1"] = {} data = [] - _ = root.A["A1"].add_on_deleted(lambda _: data.append(1)) - root.A["A1"].add_on_deleted(lambda _: data.append(2)) + _ = root.A["A1"].add_on_deleted(lambda: data.append(1)) + root.A["A1"].add_on_deleted(lambda: data.append(2)) gc.collect() assert "/test/deleted/A:A1" in solver._se_service.subscriptions assert "/test/deleted/A:A1-1" in solver._se_service.subscriptions @@ -622,7 +586,9 @@ def test_on_deleted_lifetime(new_solver_session): @pytest.mark.fluent_version(">=24.2") def test_on_changed_lifetime(new_solver_session): solver = new_solver_session - root = _create_datamodel_root(solver, test_rules) + app_name = "test" + create_datamodel_root_in_server(solver, test_rules, app_name) + root = create_root_using_datamodelgen(solver._se_service, app_name) root.A["A1"] = {} data = [] _ = root.A["A1"].X.add_on_changed(lambda _: data.append(1)) @@ -640,7 +606,9 @@ def test_on_changed_lifetime(new_solver_session): @pytest.mark.fluent_version(">=24.2") def test_on_affected_lifetime(new_solver_session): solver = new_solver_session - root = _create_datamodel_root(solver, test_rules) + app_name = "test" + create_datamodel_root_in_server(solver, test_rules, app_name) + root = create_root_using_datamodelgen(solver._se_service, app_name) root.A["A1"] = {} data = [] _ = root.A["A1"].add_on_affected(lambda _: data.append(1)) @@ -658,7 +626,9 @@ def test_on_affected_lifetime(new_solver_session): @pytest.mark.fluent_version(">=24.2") def test_on_affected_at_type_path_lifetime(new_solver_session): solver = new_solver_session - root = _create_datamodel_root(solver, test_rules) + app_name = "test" + create_datamodel_root_in_server(solver, test_rules, app_name) + root = create_root_using_datamodelgen(solver._se_service, app_name) root.A["A1"] = {} data = [] _ = root.A["A1"].add_on_affected_at_type_path("B", lambda _: data.append(1)) @@ -676,7 +646,9 @@ def test_on_affected_at_type_path_lifetime(new_solver_session): @pytest.mark.fluent_version(">=24.2") def test_on_command_executed_lifetime(new_solver_session): solver = new_solver_session - root = _create_datamodel_root(solver, test_rules) + app_name = "test" + create_datamodel_root_in_server(solver, test_rules, app_name) + root = create_root_using_datamodelgen(solver._se_service, app_name) root.A["A1"] = {} data = [] _ = root.A["A1"].add_on_command_executed("C", lambda *args: data.append(1)) @@ -694,7 +666,9 @@ def test_on_command_executed_lifetime(new_solver_session): @pytest.mark.fluent_version(">=24.2") def test_on_attribute_changed_lifetime(new_solver_session): solver = new_solver_session - root = _create_datamodel_root(solver, test_rules) + app_name = "test" + create_datamodel_root_in_server(solver, test_rules, app_name) + root = create_root_using_datamodelgen(solver._se_service, app_name) root.A["A1"] = {} data = [] _ = root.A["A1"].add_on_attribute_changed("isABC", lambda _: data.append(1)) @@ -714,7 +688,9 @@ def test_on_attribute_changed_lifetime(new_solver_session): @pytest.mark.fluent_version(">=24.2") def test_on_command_attribute_changed_lifetime(new_solver_session): solver = new_solver_session - root = _create_datamodel_root(solver, test_rules) + app_name = "test" + create_datamodel_root_in_server(solver, test_rules, app_name) + root = create_root_using_datamodelgen(solver._se_service, app_name) root.A["A1"] = {} data = [] _ = root.A["A1"].add_on_command_attribute_changed( @@ -748,7 +724,9 @@ def test_on_command_attribute_changed_lifetime(new_solver_session): @pytest.mark.fluent_version(">=24.2") def test_on_affected_lifetime_with_delete_child_objects(new_solver_session): solver = new_solver_session - root = _create_datamodel_root(solver, test_rules) + app_name = "test" + create_datamodel_root_in_server(solver, test_rules, app_name) + root = create_root_using_datamodelgen(solver._se_service, app_name) pyfluent.logging.enable() root.A["A1"] = {} data = [] @@ -767,7 +745,9 @@ def test_on_affected_lifetime_with_delete_child_objects(new_solver_session): @pytest.mark.fluent_version(">=24.2") def test_on_affected_lifetime_with_delete_all_child_objects(new_solver_session): solver = new_solver_session - root = _create_datamodel_root(solver, test_rules) + app_name = "test" + create_datamodel_root_in_server(solver, test_rules, app_name) + root = create_root_using_datamodelgen(solver._se_service, app_name) pyfluent.logging.enable() root.A["A1"] = {} data = [] diff --git a/tests/test_events_manager.py b/tests/test_events_manager.py index 11bd0ca1b27..fea1b35b067 100644 --- a/tests/test_events_manager.py +++ b/tests/test_events_manager.py @@ -1,7 +1,10 @@ +from pathlib import Path + import pytest import ansys.fluent.core as pyfluent -from ansys.fluent.core import MeshingEvent, SolverEvent, examples +from ansys.fluent.core import FluentVersion, MeshingEvent, SolverEvent, examples +from ansys.fluent.core.warnings import PyFluentDeprecationWarning def test_receive_events_on_case_loaded(new_solver_session) -> None: @@ -18,10 +21,19 @@ def on_case_loaded_old_with_args(x, y, session_id, event_info): def on_case_loaded(session, event_info): on_case_loaded.loaded = True + if session.get_fluent_version() >= FluentVersion.v232: + assert Path(event_info.case_file_name).name == Path(case_file_name).name + with pytest.warns(PyFluentDeprecationWarning): + assert Path(event_info.casefilepath).name == Path(case_file_name).name on_case_loaded.loaded = False - def on_case_loaded_with_args(x, y, session, event_info): + def on_case_loaded_with_args_optional_first(x, y, session, event_info): + on_case_loaded_with_args_optional_first.state = dict(x=x, y=y) + + on_case_loaded_with_args_optional_first.state = None + + def on_case_loaded_with_args(session, event_info, x, y): on_case_loaded_with_args.state = dict(x=x, y=y) on_case_loaded_with_args.state = None @@ -36,6 +48,10 @@ def on_case_loaded_with_args(x, y, session, event_info): solver.events.register_callback(SolverEvent.CASE_LOADED, on_case_loaded) + solver.events.register_callback( + SolverEvent.CASE_LOADED, on_case_loaded_with_args_optional_first, 12, y=42 + ) + solver.events.register_callback( SolverEvent.CASE_LOADED, on_case_loaded_with_args, 12, y=42 ) @@ -47,6 +63,7 @@ def on_case_loaded_with_args(x, y, session, event_info): assert not on_case_loaded_old.loaded assert not on_case_loaded.loaded assert not on_case_loaded_old_with_args.state + assert not on_case_loaded_with_args_optional_first.state assert not on_case_loaded_with_args.state try: @@ -57,11 +74,16 @@ def on_case_loaded_with_args(x, y, session, event_info): assert on_case_loaded_old.loaded assert on_case_loaded.loaded assert on_case_loaded_old_with_args.state == dict(x=12, y=42) + assert on_case_loaded_with_args_optional_first.state == dict(x=12, y=42) assert on_case_loaded_with_args.state == dict(x=12, y=42) def test_receive_meshing_events_on_case_loaded(new_meshing_session) -> None: + case_file_name = examples.download_file( + "mixing_elbow.cas.h5", "pyfluent/mixing_elbow" + ) + def on_case_loaded(session, event_info): on_case_loaded.loaded = True @@ -71,10 +93,6 @@ def on_case_loaded(session, event_info): meshing.events.register_callback(MeshingEvent.CASE_LOADED, on_case_loaded) - case_file_name = examples.download_file( - "mixing_elbow.cas.h5", "pyfluent/mixing_elbow" - ) - assert not on_case_loaded.loaded meshing.tui.file.read_case(case_file_name) diff --git a/tests/test_field_data.py b/tests/test_field_data.py index 782b06f1ecf..87e337f003a 100644 --- a/tests/test_field_data.py +++ b/tests/test_field_data.py @@ -432,3 +432,35 @@ def test_field_data_does_not_modify_case(new_solver_session): field_name="absolute-pressure", surfaces=["cold-inlet"] ) assert not solver.scheme_eval.scheme_eval("(case-modified?)") + + +@pytest.mark.fluent_version(">=24.1") +def test_field_data_streaming_in_meshing_mode(new_meshing_session): + meshing = new_meshing_session + import_file_name = examples.download_file( + "mixing_elbow.pmdb", "pyfluent/mixing_elbow" + ) + + mesh_data = {} + + def plot_mesh(index, field_name, data): + if data is not None: + if index in mesh_data: + mesh_data[index].update({field_name: data}) + else: + mesh_data[index] = {field_name: data} + + meshing.fields.field_data_streaming.register_callback(plot_mesh) + meshing.fields.field_data_streaming.start(provideBytesStream=True, chunkSize=1024) + + meshing.workflow.InitializeWorkflow(WorkflowType="Watertight Geometry") + meshing.workflow.TaskObject["Import Geometry"].Arguments = { + "FileName": import_file_name, + "LengthUnit": "in", + } + meshing.workflow.TaskObject["Import Geometry"].Execute() + + assert len(mesh_data[5]["vertices"]) == 66 + assert len(mesh_data[5]["faces"]) == 80 + + assert list(mesh_data[12].keys()) == ["vertices", "faces"] diff --git a/tests/test_flobject.py b/tests/test_flobject.py index b3ac63799ee..6e18db0b350 100644 --- a/tests/test_flobject.py +++ b/tests/test_flobject.py @@ -11,7 +11,6 @@ from ansys.fluent.core.solver import flobject from ansys.fluent.core.solver.flobject import ( InactiveObjectError, - _combine_set_states, _gethash, find_children, ) @@ -361,10 +360,6 @@ def cb(self, a1, a2): } -class _SchemeEval: - version = "25.1.0" - - class Proxy: """Proxy class.""" @@ -372,7 +367,6 @@ class Proxy: def __init__(self): self.r = self.root(None) - self._scheme_eval = _SchemeEval() def get_obj(self, path): if not path: @@ -1239,39 +1233,3 @@ def test_default_argument_names_for_commands(static_mixer_settings_session): assert solver.results.graphics.contour.delete.argument_names == ["name_list"] # The following is the default behavior when no arguments are associated with the command. assert solver.results.graphics.contour.list.argument_names == [] - - -def test_combine_set_states(): - assert _combine_set_states( - [ - ("A/B/C", 1), - ] - ) == ("A/B/C", 1) - - assert _combine_set_states( - [ - ("A/B/C", 1), - ("A/B/C", 2), - ] - ) == ("A/B/C", 2) - - assert _combine_set_states( - [ - ("A/B/C", 1), - ("A/B/C", {"X": 2}), - ] - ) == ("A/B/C", {"X": 2}) - - assert _combine_set_states( - [ - ("A/B/C", 1), - ("A/B/D", 2), - ] - ) == ("A/B", {"C": 1, "D": 2}) - - assert _combine_set_states( - [ - ("A/B/C", {"X": 1}), - ("A/B/D/E", 2), - ] - ) == ("A/B", {"C": {"X": 1}, "D": {"E": 2}}) diff --git a/tests/test_launcher.py b/tests/test_launcher.py index 114bfdd4de5..31e34c774cb 100644 --- a/tests/test_launcher.py +++ b/tests/test_launcher.py @@ -1,6 +1,7 @@ import os from pathlib import Path import platform +import tempfile from tempfile import TemporaryDirectory import pytest @@ -412,9 +413,8 @@ def test_build_journal_argument(topy, journal_file_names, result, raises): assert _build_journal_argument(topy, journal_file_names) == result -@pytest.mark.filterwarnings("error::FutureWarning") def test_show_gui_raises_warning(): - with pytest.raises(PyFluentDeprecationWarning): + with pytest.warns(PyFluentDeprecationWarning): pyfluent.launch_fluent(show_gui=True) @@ -499,3 +499,31 @@ def test_fluent_automatic_transcript(monkeypatch): with TemporaryDirectory(dir=pyfluent.EXAMPLES_PATH) as tmp_dir: with pyfluent.launch_fluent(container_dict=dict(working_dir=tmp_dir)): assert not list(Path(tmp_dir).glob("*.trn")) + + +def test_standalone_launcher_dry_run(monkeypatch): + monkeypatch.setenv("PYFLUENT_LAUNCH_CONTAINER", "0") + fluent_path = r"\x\y\z\fluent.exe" + fluent_launch_string, server_info_file_name = pyfluent.launch_fluent( + fluent_path=fluent_path, dry_run=True, ui_mode="no_gui" + ) + assert str(Path(server_info_file_name).parent) == tempfile.gettempdir() + assert ( + fluent_launch_string + == f"{fluent_path} 3ddp -gu -sifile={server_info_file_name} -nm" + ) + + +def test_standalone_launcher_dry_run_with_server_info_dir(monkeypatch): + monkeypatch.setenv("PYFLUENT_LAUNCH_CONTAINER", "0") + with tempfile.TemporaryDirectory() as tmp_dir: + monkeypatch.setenv("SERVER_INFO_DIR", tmp_dir) + fluent_path = r"\x\y\z\fluent.exe" + fluent_launch_string, server_info_file_name = pyfluent.launch_fluent( + fluent_path=fluent_path, dry_run=True, ui_mode="no_gui" + ) + assert str(Path(server_info_file_name).parent) == tmp_dir + assert ( + fluent_launch_string + == f"{fluent_path} 3ddp -gu -sifile={Path(server_info_file_name).name} -nm" + ) diff --git a/tests/test_launcher_remote.py b/tests/test_launcher_remote.py index 7815a741bb6..21d9d7a1949 100644 --- a/tests/test_launcher_remote.py +++ b/tests/test_launcher_remote.py @@ -2,6 +2,8 @@ from concurrent import futures import os +from pathlib import Path +import shutil from unittest.mock import create_autospec import uuid @@ -12,7 +14,7 @@ from ansys.api.fluent.v0 import scheme_eval_pb2_grpc import ansys.fluent.core as pyfluent -from ansys.fluent.core import examples +from ansys.fluent.core import EXAMPLES_PATH, examples from ansys.fluent.core.fluent_connection import ( FluentConnection, UnsupportedRemoteFluentInstance, @@ -26,7 +28,6 @@ from ansys.fluent.core.utils.fluent_version import FluentVersion from ansys.fluent.core.utils.networking import get_free_port import ansys.platform.instancemanagement as pypim -from tests.util import rename_downloaded_file def test_launch_remote_instance(monkeypatch, new_solver_session): @@ -143,6 +144,81 @@ def is_configured(self): return True +def rename_downloaded_file(file_path: str, suffix: str) -> str: + """Rename downloaded file by appending a suffix to the file name. + + Parameters + ---------- + file_path : str + Downloaded file path. Can be absolute or relative. + suffix : str + Suffix to append to the file name. + + Returns: + -------- + str + New file path with the suffix appended to the file name. + """ + ext = "".join(Path(file_path).suffixes) + orig_path = Path(file_path) + file_path = file_path.removesuffix(ext) + file_path = Path(file_path) + if file_path.is_absolute(): + new_stem = f"{file_path.stem}{suffix}" + new_path = file_path.with_stem(new_stem) + new_path = new_path.with_suffix(ext) + orig_path.rename(new_path) + return str(new_path) + else: + orig_abs_path = Path(EXAMPLES_PATH) / orig_path + abs_path = Path(EXAMPLES_PATH) / file_path + new_stem = f"{file_path.stem}{suffix}" + new_path = abs_path.with_stem(new_stem) + new_path = new_path.with_suffix(ext) + orig_abs_path.rename(new_path) + return str(file_path.with_stem(new_stem).with_suffix(ext)) + + +@pytest.mark.parametrize( + "ext,a,b,c,d", + [(".cas", "a1", "b1", "c1", "d1"), (".cas.gz", "a2", "b2", "c2", "d2")], +) +def test_rename_downloaded_file(ext, a, b, c, d): + try: + file_path = Path(EXAMPLES_PATH) / f"{a}{ext}" + file_path.touch() + file_path = str(file_path) + new_file_path = rename_downloaded_file(file_path, "_1") + assert new_file_path == str(Path(EXAMPLES_PATH) / f"{a}_1{ext}") + except Exception: + raise + finally: + Path(new_file_path).unlink(missing_ok=True) + + try: + file_path = f"{b}{ext}" + (Path(EXAMPLES_PATH) / file_path).touch() + new_file_path = rename_downloaded_file(file_path, "_1") + assert new_file_path == f"{b}_1{ext}" + except Exception: + raise + finally: + (Path(EXAMPLES_PATH) / new_file_path).unlink(missing_ok=True) + + try: + dir_path = Path(EXAMPLES_PATH) / c + dir_path.mkdir() + file_path = dir_path / f"{d}{ext}" + file_path.touch() + file_path = str(Path(c) / f"{d}{ext}") + new_file_path = rename_downloaded_file(file_path, "_1") + assert new_file_path == str(Path(c) / f"{d}_1{ext}") + except Exception: + raise + finally: + shutil.rmtree(dir_path, ignore_errors=True) + + @pytest.mark.codegen_required @pytest.mark.fluent_version(">=24.2") def test_file_purpose_on_remote_instance( diff --git a/tests/test_mapped_api.py b/tests/test_mapped_api.py new file mode 100644 index 00000000000..6132bcc33f0 --- /dev/null +++ b/tests/test_mapped_api.py @@ -0,0 +1,785 @@ +import time + +import pytest +from util import create_datamodel_root_in_server, create_root_using_datamodelgen + +from ansys.fluent.core.services.datamodel_se import convert_path_to_se_path +from ansys.fluent.core.utils.execution import timeout_loop + +rules_str = ( + "RULES:\n" + " STRING: X\n" + " allowedValues = yes, no\n" + " logicalMapping = True, False\n" + " END\n" + " STRING: Y\n" + ' allowedValues = \\"1\\", \\"2\\", \\"3\\"\n' + ' default = \\"2\\"\n' + " isNumerical = True\n" + " END\n" + " INTEGER: Z\n" + " END\n" + " SINGLETON: ROOT\n" + " members = A\n" + " commands = C, D\n" + " SINGLETON: A\n" + " members = X, Y, Z\n" + " END\n" + " COMMAND: C\n" + " arguments = X\n" + " functionName = CFunc\n" + " END\n" + " COMMAND: D\n" + " arguments = X\n" + " functionName = CFunc\n" + " APIName = dd\n" + " END\n" + " END\n" + "END\n" +) + + +rules_str_caps = ( + "RULES:\n" + " STRING: X\n" + " allowedValues = Yes, No\n" + " default = No\n" + " logicalMapping = True, False\n" + " END\n" + " SINGLETON: ROOT\n" + " members = A\n" + " SINGLETON: A\n" + " members = X\n" + " END\n" + " END\n" + "END\n" +) + + +def get_static_info_value(static_info, type_path): + for p in type_path.removeprefix("/").split("/"): + static_info = static_info[p] + return static_info + + +def get_state_from_remote_app(session, app_name, type_path): + return session.scheme_eval.scheme_eval( + f'(state/object/get-state (state/object/find-child (state/find-root "{app_name}") "{type_path}"))' + ) + + +def get_error_state_message_from_remote_app(session, app_name, type_path): + return session.scheme_eval.scheme_eval( + f'(state/object/get-error-state-message (state/object/find-child (state/find-root "{app_name}") "{type_path}"))' + ) + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_bool_for_str_has_correct_type( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + static_info = service.get_static_info("test") + assert ( + get_static_info_value(static_info, "/singletons/A/parameters/X/type") + == "Logical" + ) + cmd_args = get_static_info_value(static_info, "/commands/C/commandinfo/args") + arg0 = cmd_args[0] + assert arg0["type"] == "Logical" + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_set_bool_for_str(datamodel_api_version_new, new_solver_session): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + service.set_state(app_name, "/A/X", "yes") + assert service.get_state(app_name, "/A/X") is True + assert get_state_from_remote_app(solver, app_name, "/A/X") == "yes" + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_set_bool_nested_for_str( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + service.set_state(app_name, "/A", {"X": True}) + assert service.get_state(app_name, "/A/X") is True + assert get_error_state_message_from_remote_app(solver, app_name, "/A/X") is None + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_get_set_bool_for_str_with_flexible_strs_no_errors( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str_caps, app_name) + service = solver._se_service + service.set_state(app_name, "/A/X", True) + assert service.get_state(app_name, "/A/X") is True + assert get_error_state_message_from_remote_app(solver, app_name, "/A/X") is None + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_get_attrs_bool_for_str( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + assert service.get_attribute_value(app_name, "/A/Z", "allowedValues") is None + assert service.get_attribute_value(app_name, "/A/X", "allowedValues") is None + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_get_and_set_int_for_str( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + service.set_state(app_name, "/A/Y", 1) + assert service.get_state(app_name, "/A/Y") == 1 + assert get_error_state_message_from_remote_app(solver, app_name, "/A/Y") is None + + +# TODO: what are the equivalent of following tests in Python? +# testPopulateMappingAttrTablePaths +# testMapAPIStateToDM +# testMapDMStateToAPI +# testMapNestedAPIStateToDM +# testUpdateStateDictWithMapping + + +@pytest.mark.fluent_version(">=25.2") +def test_state_of_command_args_with_mapping( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + c_name = service.create_command_arguments(app_name, "/", "C") + with pytest.raises(RuntimeError): + service.set_state(app_name, f"/C:{c_name}/X", False) + assert service.get_state(app_name, f"/C:{c_name}") == {"X": None} + service.set_state(app_name, f"/C:{c_name}", {"X": False}) + assert service.get_state(app_name, f"/C:{c_name}") == {"X": False} + service.set_state(app_name, f"/C:{c_name}", {"X": True}) + assert service.get_state(app_name, f"/C:{c_name}") == {"X": True} + + +def register_external_function_in_remote_app(session, app_name, func_name): + session.scheme_eval.scheme_eval( + f'(state/register-external-fn "{app_name}" "{func_name}" (lambda (obj . args) (car args)) (cons "Variant" (list "ModelObject" "Variant")))' + ) + + +@pytest.mark.fluent_version(">=25.2") +def test_execute_command_with_args_mapping( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + register_external_function_in_remote_app(solver, app_name, "CFunc") + result = service.execute_command(app_name, "/", "C", {"X": True}) + assert result == "yes" + + +@pytest.mark.fluent_version(">=25.2") +def test_execute_command_with_args_and_path_mapping( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + register_external_function_in_remote_app(solver, app_name, "CFunc") + result = service.execute_command(app_name, "/", "dd", {"X": True}) + assert result == "yes" + + +@pytest.mark.fluent_version(">=25.2") +def test_execute_query_with_args_mapping(datamodel_api_version_new, new_solver_session): + rules_str = ( + "RULES:\n" + " STRING: X\n" + " allowedValues = yes, no\n" + " logicalMapping = True, False\n" + " END\n" + " SINGLETON: ROOT\n" + " queries = Q\n" + " QUERY: Q\n" + " arguments = X\n" + " functionName = QFunc\n" + " END\n" + " END\n" + "END\n" + ) + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + register_external_function_in_remote_app(solver, app_name, "QFunc") + result = service.execute_query(app_name, "/", "Q", {"X": True}) + assert result == "yes" + + +@pytest.mark.fluent_version(">=25.2") +def test_get_mapped_attr(datamodel_api_version_new, new_solver_session): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + assert service.get_attribute_value(app_name, "/A/X", "allowedValues") is None + assert service.get_attribute_value(app_name, "/A/Y", "allowedValues") is None + assert service.get_attribute_value(app_name, "/A/Y", "min") == 1 + assert service.get_attribute_value(app_name, "/A/Y", "max") == 3 + assert service.get_attribute_value(app_name, "/A/Y", "default") == 2 + + +@pytest.mark.fluent_version(">=25.2") +def test_get_mapped_attr_defaults(datamodel_api_version_new, new_solver_session): + rules_str = ( + "RULES:\n" + " STRING: X\n" + " allowedValues = yes, no\n" + " default = no\n" + " logicalMapping = True, False\n" + " END\n" + " STRING: Y\n" + ' allowedValues = \\"1\\", \\"2\\", \\"3\\"\n' + ' default = \\"2\\"\n' + " isNumerical = True\n" + " END\n" + " INTEGER: Z\n" + " default = 42\n" + " END\n" + " SINGLETON: ROOT\n" + " members = A\n" + " SINGLETON: A\n" + " members = X, Y, Z\n" + " END\n" + " END\n" + "END\n" + ) + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + assert service.get_attribute_value(app_name, "/A/X", "default") is False + assert service.get_attribute_value(app_name, "/A/Y", "default") == 2 + assert service.get_attribute_value(app_name, "/A/Z", "default") == 42 + + +@pytest.mark.fluent_version(">=25.2") +def test_get_mapped_enum_attr(datamodel_api_version_new, new_solver_session): + rules_str = ( + "RULES:\n" + " STRING: X\n" + " allowedValues = ijk, lmn\n" + " default = lmn\n" + " enum = green, yellow\n" + " END\n" + " SINGLETON: ROOT\n" + " members = A\n" + " SINGLETON: A\n" + " members = X\n" + " END\n" + " END\n" + "END\n" + ) + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + assert service.get_attribute_value(app_name, "/A/X", "allowedValues") == [ + "green", + "yellow", + ] + assert service.get_attribute_value(app_name, "/A/X", "default") == "yellow" + + +@pytest.mark.fluent_version(">=25.2") +def test_get_mapped_dynamic_enum_attr(datamodel_api_version_new, new_solver_session): + rules_str = ( + "RULES:\n" + " LOGICAL: B\n" + " default = True\n" + " END\n" + " STRING: X\n" + ' allowedValues = IF($../B, (\\"ijk\\", \\"lmn\\"), (\\"ijk\\", \\"lmn\\", \\"opq\\"))\n' + " default = lmn\n" + ' enum = IF($../B, (\\"green\\", \\"yellow\\"), (\\"green\\", \\"yellow\\", \\"blue\\"))\n' + " END\n" + " SINGLETON: ROOT\n" + " members = A\n" + " SINGLETON: A\n" + " members = B, X\n" + " END\n" + " END\n" + "END\n" + ) + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + assert service.get_attribute_value(app_name, "/A/X", "allowedValues") == [ + "green", + "yellow", + ] + assert service.get_attribute_value(app_name, "/A/X", "default") == "yellow" + + +@pytest.mark.fluent_version(">=25.2") +def test_get_mapped_command_attr(datamodel_api_version_new, new_solver_session): + rules_str = ( + "RULES:\n" + " STRING: X\n" + " allowedValues = yes, no\n" + " default = no\n" + " logicalMapping = True, False\n" + " END\n" + " STRING: Y\n" + ' allowedValues = \\"1\\", \\"2\\", \\"3\\"\n' + ' default = \\"2\\"\n' + " isNumerical = True\n" + " END\n" + " INTEGER: Z\n" + " default = 42\n" + " END\n" + " SINGLETON: ROOT\n" + " commands = C\n" + " COMMAND: C\n" + " arguments = X, Y, Z\n" + " END\n" + " END\n" + "END\n" + ) + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + c_name = service.create_command_arguments(app_name, "/", "C") + # TODO: Attribute query at command argument level is not working + assert ( + service.get_attribute_value(app_name, f"/C:{c_name}", "X/allowedValues") is None + ) + assert ( + service.get_attribute_value(app_name, f"/C:{c_name}", "Y/allowedValues") is None + ) + assert service.get_attribute_value(app_name, f"/C:{c_name}", "Y/min") == 1 + assert service.get_attribute_value(app_name, f"/C:{c_name}", "Y/max") == 3 + assert service.get_attribute_value(app_name, f"/C:{c_name}", "X/default") is False + assert service.get_attribute_value(app_name, f"/C:{c_name}", "Y/default") == 2 + assert service.get_attribute_value(app_name, f"/C:{c_name}", "Z/default") == 42 + + +@pytest.mark.fluent_version(">=25.2") +def test_on_changed_is_mapped(datamodel_api_version_new, new_solver_session): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + + called = 0 + state = None + called_obj = 0 + state_obj = None + + def on_changed(value): + nonlocal called + nonlocal state + state = value() + called += 1 + + def on_changed_obj(value): + nonlocal called_obj + nonlocal state_obj + state_obj = value() + called_obj += 1 + + subscription = service.add_on_changed(app_name, "/A/X", root.A.X, on_changed) + subscription_obj = service.add_on_changed(app_name, "/A", root.A, on_changed_obj) + + assert called == 0 + assert state is None + assert called_obj == 0 + assert state_obj is None + + service.set_state(app_name, "/A/X", True) + timeout_loop(lambda: called == 1, timeout=5) + assert called == 1 + assert state is True + assert called_obj == 1 + assert state_obj == {"X": True, "Y": 2, "Z": None} + + service.set_state(app_name, "/A/X", False) + timeout_loop(lambda: called == 2, timeout=5) + assert called == 2 + assert state is False + assert called_obj == 2 + assert state_obj == {"X": False, "Y": 2, "Z": None} + + subscription.unsubscribe() + subscription_obj.unsubscribe() + + service.set_state(app_name, "/A/X", True) + time.sleep(5) + assert called == 2 + assert state is False + assert called_obj == 2 + assert state_obj == {"X": False, "Y": 2, "Z": None} + + +@pytest.mark.fluent_version(">=25.2") +def test_mapped_on_attribute_changed(datamodel_api_version_new, new_solver_session): + rules_str = ( + "RULES:\n" + " STRING: X\n" + " allowedValues = yes, no\n" + " default = $../Y\n" + " logicalMapping = True, False\n" + " END\n" + " STRING: Y\n" + " END\n" + " SINGLETON: ROOT\n" + " members = A\n" + " commands = C\n" + " SINGLETON: A\n" + " members = X, Y\n" + " END\n" + " COMMAND: C\n" + " arguments = X, Y\n" + " END\n" + " END\n" + "END\n" + ) + + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + called = 0 + value = None + + def cb(val): + nonlocal called + nonlocal value + value = val + called += 1 + + subscription = service.add_on_attribute_changed( + app_name, "/A/X", "default", root.A.X, cb + ) + assert called == 0 + assert value is None + + service.set_state(app_name, "/A/Y", "no") + timeout_loop(lambda: called == 1, timeout=5) + assert called == 1 + assert value is False + + service.set_state(app_name, "/A/Y", "yes") + timeout_loop(lambda: called == 2, timeout=5) + assert called == 2 + assert value is True + + subscription.unsubscribe() + service.set_state(app_name, "/A/Y", "no") + time.sleep(5) + assert called == 2 + assert value is True + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_command_executed_mapped_args( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, rules_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + register_external_function_in_remote_app(solver, app_name, "CFunc") + executed = False + command = None + arguments = None + + def cb(obj, cmd, args): + nonlocal executed + nonlocal command + nonlocal arguments + command = cmd + arguments = args + executed = True + + subscription = service.add_on_command_executed(app_name, "/", "C", root, cb) + assert not executed + assert command is None + assert arguments is None + + service.execute_command(app_name, "/", "C", {"X": True}) + timeout_loop(lambda: executed, timeout=5) + assert executed + assert command == "C" + assert arguments == {"X": True} + + executed = False + command = None + arguments = None + + subscription.unsubscribe() + service.execute_command(app_name, "/", "C", {"X": False}) + time.sleep(5) + assert not executed + assert command is None + assert arguments is None + + +api_name_rules_str = ( + "RULES:\n" + " STRING: __X\n" + " allowedValues = yes, no\n" + " logicalMapping = True, False\n" + " attr1 = 42.0\n" + " APIName = xxx\n" + " END\n" + " STRING: __Y\n" + ' allowedValues = \\"1\\", \\"2\\", \\"3\\"\n' + ' default = \\"2\\"\n' + " isNumerical = True\n" + " APIName = yyy\n" + " END\n" + " INTEGER: Z\n" + " END\n" + " SINGLETON: ROOT\n" + " members = __A, B, __E\n" + " commands = __C, D\n" + " SINGLETON: __A\n" + " members = __X\n" + " APIName = aaa\n" + " END\n" + " OBJECT: B\n" + " members = __Y, Z\n" + " END\n" + " OBJECT: __E\n" + " members = __Y\n" + " APIName = eee\n" + " END\n" + " COMMAND: __C\n" + " arguments = __X\n" + " functionName = CFunc\n" + " APIName = ccc\n" + " END\n" + " COMMAND: D\n" + " arguments = __X\n" + " functionName = CFunc\n" + " END\n" + " END\n" + "END\n" +) + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_with_mapped_names(datamodel_api_version_new, new_solver_session): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, api_name_rules_str, app_name) + service = solver._se_service + static_info = service.get_static_info(app_name) + assert ( + get_static_info_value(static_info, "/singletons/aaa/parameters/xxx/type") + == "Logical" + ) + assert ( + get_static_info_value(static_info, "/namedobjects/B/parameters/yyy/type") + == "Integer" + ) + assert ( + get_static_info_value(static_info, "/namedobjects/B/parameters/Z/type") + == "Integer" + ) + + command_args = [ + { + "helpstring": "", + "name": "xxx", + "type": "Logical", + } + ] + command_args = [sorted(x.items()) for x in command_args] + ccc_args = get_static_info_value( # noqa: F841 + static_info, "/commands/ccc/commandinfo/args" + ) + # TODO: helpstring is not being set + # assert command_args == [sorted(x.items()) for x in ccc_args] + d_args = get_static_info_value( # noqa: F841 + static_info, "/commands/D/commandinfo/args" + ) + # TODO: helpstring is not being returned + # assert command_args == [sorted(x.items()) for x in d_args] + + +# TODO: what are the equivalent of following tests in Python? +# testMapperMapDataModelPathToAPIPath +# testMapperMapAPIPathToDataModelPath +# testMapperMapDMValueToAPI + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_root_get_and_set_state_with_mapped_names( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, api_name_rules_str, app_name) + service = solver._se_service + assert service.get_state(app_name, "/") == {"aaa": {"xxx": None}} + service.set_state(app_name, "/__A/__X", "yes") + assert service.get_state(app_name, "/") == {"aaa": {"xxx": True}} + service.set_state(app_name, "/", {"aaa": {"xxx": False}}) + assert service.get_state(app_name, "/") == {"aaa": {"xxx": False}} + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_root_get_attrs_with_mapped_names( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, api_name_rules_str, app_name) + service = solver._se_service + assert service.get_attribute_value(app_name, "/aaa/xxx", "attr1") == 42.0 + service.set_state(app_name, "/", {"B:b": {}}) + assert service.get_attribute_value(app_name, "/B:b/yyy", "default") == 2 + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_cmd_args_op_with_mapped_names( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, api_name_rules_str, app_name) + service = solver._se_service + c_name = service.create_command_arguments(app_name, "/", "ccc") + x_path_str = f"/__C:{c_name}/xxx" # noqa: F841 + # TODO: issue + # service.set_state(app_name, x_path_str, True) + service.set_state(app_name, f"/__C:{c_name}", {"xxx": True}) + assert service.get_state(app_name, f"/__C:{c_name}") == {"xxx": True} + assert service.get_attribute_value(app_name, f"/__C:{c_name}", "xxx/attr1") == 42.0 + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_rename_with_mapped_names( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, api_name_rules_str, app_name) + service = solver._se_service + service.set_state(app_name, "/", {"B:b": {}}) + service.rename(app_name, "/B:b", "c") + service.set_state(app_name, "/", {"eee:e": {}}) + assert service.get_state(app_name, "/B:c/yyy") == 2 + service.rename(app_name, "/eee:e", "x") + assert service.get_state(app_name, "/eee:x/yyy") == 2 + + +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_delete_object_with_mapped_names( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, api_name_rules_str, app_name) + service = solver._se_service + service.set_state(app_name, "/", {"B:b": {}}) + service.delete_object(app_name, "/B:b") + + +@pytest.mark.skip +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_created_on_changed_on_deleted_with_mapped_names( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, api_name_rules_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + called_paths = [] + delete_count = 0 + changes = [] + + def create_cb(obj): + called_paths.append(convert_path_to_se_path(obj.path)) + + def delete_cb(): + nonlocal delete_count + delete_count += 1 + + def changed_cb(value): + changes.append(value()) + + service.add_on_child_created(app_name, "/", "eee", root, create_cb) + # TODO: fails at event streaming callback of on_child_created + # as the name "eee" is not available in the PyFluent side. + service.set_state(app_name, "/", {"eee:b": {}}) + service.set_state(app_name, "/", {"eee:c": {}}) + service.set_state(app_name, "/", {"B:d": {}}) + service.add_on_deleted(app_name, "/eee:b", root, delete_cb) + service.add_on_deleted(app_name, "/eee:c", root, delete_cb) + # TODO: Affected by name mangling of dunder members + service.add_on_changed(app_name, "/eee:b/yyy", root.__E["b"].__Y, changed_cb) + service.delete_object(app_name, "/eee:c") + service.set_state(app_name, "/", {"eee:b": {"yyy": 42}}) + assert called_paths == ["/eee:b", "/eee:c"] + assert delete_count == 1 + assert changes == [42] + + +@pytest.mark.skip +@pytest.mark.fluent_version(">=25.2") +def test_datamodel_api_on_changed_with_mapped_names( + datamodel_api_version_new, new_solver_session +): + solver = new_solver_session + app_name = "test" + create_datamodel_root_in_server(solver, api_name_rules_str, app_name) + service = solver._se_service + root = create_root_using_datamodelgen(service, app_name) + changes = [] + + def changed_cb(value): + changes.append(value()) + + service.set_state(app_name, "/", {"eee:b": {}}) + # TODO: Can't get this working due to name mangling of dunder members + service.add_on_changed(app_name, "/eee:b/yyy", root.__E["b"].__Y, changed_cb) + service.set_state(app_name, "/", {"eee:b": {"yyy": 42}}) + assert changes == [42] + + +# TODO: what are the equivalent of following tests in Python? +# testDataModelAPIWithNullCustomNameMapper +# testDataModelAPIWithAppendingCustomNameMapper +# testDataModelAPIWithSnakeyCustomNameMapper +# testDataModelAPIWithSnakeyCustomNameMapperAndMoreCamels diff --git a/tests/test_new_meshing_workflow.py b/tests/test_new_meshing_workflow.py index 8e7b83d781f..83ebae8ce2b 100644 --- a/tests/test_new_meshing_workflow.py +++ b/tests/test_new_meshing_workflow.py @@ -5,7 +5,6 @@ from ansys.fluent.core import FluentVersion, examples from ansys.fluent.core.workflow import camel_to_snake_case -from tests.conftest import new_meshing_session @pytest.mark.nightly @@ -1468,7 +1467,9 @@ def test_created_workflow(new_meshing_session): ) -new_meshing_session2 = new_meshing_session +@pytest.fixture +def new_meshing_session2(new_meshing_session): + return new_meshing_session @pytest.mark.codegen_required diff --git a/tests/test_reduction.py b/tests/test_reduction.py index 06924b8146d..405174801ad 100644 --- a/tests/test_reduction.py +++ b/tests/test_reduction.py @@ -2,7 +2,9 @@ import pytest +from ansys.fluent.core.examples import download_file from ansys.fluent.core.services.reduction import _locn_names_and_objs +from ansys.fluent.core.solver.function import reduction def _test_locn_extraction(solver1, solver2): @@ -70,29 +72,22 @@ def _test_area_average(solver): def _test_min(solver1, solver2): - solver1.solution.initialization.hybrid_initialize() - solver2.solution.initialization.hybrid_initialize() - solver1_named_expr = solver1.setup.named_expressions - solver1_named_expr["test_expr_1"] = {} - test_expr1 = solver1_named_expr["test_expr_1"] - test_expr1.definition = "sqrt(VelocityMagnitude)" - solver2_named_expr = solver2.setup.named_expressions - solver2_named_expr["test_expr_2"] = {} - test_expr2 = solver2_named_expr["test_expr_2"] - test_expr2.definition = "minimum(test_expr_2, ['outlet'])" - # (MK) Is the expression definition valid? - # expected_result = test_expr2.get_value() - solver1.fields.reduction.minimum( - expression=test_expr1.definition(), + s1_min = solver1.fields.reduction.minimum( + expression="AbsolutePressure", + locations=[solver1.setup.boundary_conditions.velocity_inlet], + ) + s2_min = solver2.fields.reduction.minimum( + expression="AbsolutePressure", + locations=[solver2.setup.boundary_conditions.velocity_inlet], + ) + result = reduction.minimum( + expression="AbsolutePressure", locations=[ - solver1.setup.boundary_conditions["outlet"], - solver2.setup.boundary_conditions["outlet"], + solver1.setup.boundary_conditions.velocity_inlet, + solver2.setup.boundary_conditions.velocity_inlet, ], ) - - # assert result == expected_result - solver1.setup.named_expressions.pop(key="test_expr_1") - solver1.setup.named_expressions.pop(key="test_expr_2") + assert result == min(s1_min, s2_min) def _test_count(solver): @@ -367,6 +362,25 @@ def _test_sum_if(solver): solver.setup.named_expressions.pop(key="test_expr_1") +def _test_centroid_2_sources(solver1, solver2): + s1_cent = solver1.fields.reduction.centroid( + locations=[solver1.setup.boundary_conditions.velocity_inlet] + ) + s2_cent = solver2.fields.reduction.centroid( + locations=[solver2.setup.boundary_conditions.velocity_inlet] + ) + + result = reduction.centroid( + locations=[ + solver1.setup.boundary_conditions.velocity_inlet, + solver2.setup.boundary_conditions.velocity_inlet, + ] + ) + assert [round(x, 5) for x in result] == [ + (round(x, 5) + round(y, 5)) / 2 for x, y in zip(*[s1_cent, s2_cent]) + ] + + @pytest.fixture def static_mixer_case_session2(static_mixer_case_session: Any): return static_mixer_case_session @@ -382,7 +396,6 @@ def test_reductions( _test_context(solver1) _test_locn_extraction(solver1, solver2) _test_area_average(solver1) - _test_min(solver1, solver2) _test_count(solver1) _test_count_if(solver1) _test_centroid(solver1) @@ -392,6 +405,19 @@ def test_reductions( _test_moment(solver1) _test_sum(solver1) _test_sum_if(solver1) + # The case and data are changed after this point to check the functional reduction with multiple solvers + case_path = download_file( + file_name="exhaust_system.cas.h5", directory="pyfluent/exhaust_system" + ) + download_file( + file_name="exhaust_system.dat.h5", directory="pyfluent/exhaust_system" + ) + solver1.file.read_case_data(file_name=case_path) + case_path1 = download_file("elbow1.cas.h5", "pyfluent/file_session") + download_file("elbow1.dat.h5", "pyfluent/file_session") + solver2.file.read_case_data(file_name=case_path1) + _test_min(solver1, solver2) + _test_centroid_2_sources(solver1, solver2) @pytest.mark.fluent_version(">=24.2") @@ -426,3 +452,33 @@ def test_fix_for_invalid_location_inputs(static_mixer_case_session: Any): with pytest.raises(ValueError): assert solver.fields.reduction.area(locations=["inlet-1"]) + + +@pytest.mark.fluent_version(">=25.2") +def test_fix_for_empty_location_inputs(static_mixer_case_session: Any): + solver = static_mixer_case_session + solver.solution.initialization.hybrid_initialize() + + assert solver.fields.reduction.area(locations=["inlet1"]) + + with pytest.raises(RuntimeError): + assert reduction.area(locations=[], ctxt=solver) + + with pytest.raises(RuntimeError): + assert reduction.area_average( + expression="AbsolutePressure", locations=[], ctxt=solver + ) + + with pytest.raises(RuntimeError): + assert reduction.centroid(locations=[], ctxt=solver) + + with pytest.raises(RuntimeError): + assert solver.fields.reduction.area(locations=[]) + + with pytest.raises(RuntimeError): + assert solver.fields.reduction.area_average( + expression="AbsolutePressure", locations=[] + ) + + with pytest.raises(RuntimeError): + assert solver.fields.reduction.centroid(locations=[]) diff --git a/tests/test_search.py b/tests/test_search.py index ac541d1c38d..da1894682e7 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1,3 +1,5 @@ +import sys + import pytest import ansys.fluent.core as pyfluent @@ -7,9 +9,7 @@ _get_close_matches_for_word_from_names, _get_exact_match_for_word_from_names, _get_match_case_for_word_from_names, - _get_version_path_prefix_from_obj, _get_wildcard_matches_for_word_from_names, - _search, _search_semantic, _search_whole_word, _search_wildcard, @@ -33,7 +33,7 @@ def test_nltk_data_download(): @pytest.mark.codegen_required def test_get_exact_match_for_word_from_names(): api_tree_data = _get_api_tree_data() - api_object_names = list(api_tree_data["all_api_object_name_synsets"].keys()) + api_object_names = api_tree_data["all_api_object_names"] exact_match = _get_exact_match_for_word_from_names( "VideoResoutionY", names=api_object_names, @@ -46,7 +46,7 @@ def test_get_exact_match_for_word_from_names(): @pytest.mark.codegen_required def test_get_capitalize_match_for_word_from_names(): api_tree_data = _get_api_tree_data() - api_object_names = list(api_tree_data["all_api_object_name_synsets"].keys()) + api_object_names = api_tree_data["all_api_object_names"] capitalize_match_cases = _get_capitalize_match_for_word_from_names( "font", names=api_object_names, @@ -73,7 +73,7 @@ def test_get_capitalize_match_for_word_from_names(): @pytest.mark.codegen_required def test_get_match_case_for_word_from_names(): api_tree_data = _get_api_tree_data() - api_object_names = list(api_tree_data["all_api_object_name_synsets"].keys()) + api_object_names = api_tree_data["all_api_object_names"] match_cases = _get_match_case_for_word_from_names( "font", names=api_object_names, @@ -106,7 +106,7 @@ def test_get_match_case_for_word_from_names(): @pytest.mark.codegen_required def test_get_wildcard_matches_for_word_from_names(): api_tree_data = _get_api_tree_data() - api_object_names = list(api_tree_data["all_api_object_name_synsets"].keys()) + api_object_names = api_tree_data["all_api_object_names"] wildcard_matches = _get_wildcard_matches_for_word_from_names( "iter*", names=api_object_names, @@ -134,7 +134,7 @@ def test_get_wildcard_matches_for_word_from_names(): @pytest.mark.codegen_required def test_get_close_matches_for_word_from_names(): api_tree_data = _get_api_tree_data() - api_object_names = list(api_tree_data["all_api_object_name_synsets"].keys()) + api_object_names = api_tree_data["all_api_object_names"] close_matches = _get_close_matches_for_word_from_names( "font", names=api_object_names, @@ -229,10 +229,7 @@ def test_whole_word_search(capsys): lines = capsys.readouterr().out.splitlines() assert "font" not in lines assert ".preferences.Appearance.Charts.Font (Object)" in lines - assert ( - ".preferences.Graphics.ColormapSettings.TextFontAutomaticUnits (Parameter)" - in lines - ) + assert ".preferences.Appearance.Charts.Font (Object)" in lines @pytest.mark.fluent_version("==24.2") @@ -243,14 +240,8 @@ def test_match_case_search(capsys): for line in lines: assert "Font" not in line assert "font" in line - assert ( - '.results.graphics.pathline[""].color_map.font_name (Parameter)' - in lines - ) - assert ( - '.results.graphics.vector[""].color_map.font_automatic (Parameter)' - in lines - ) + assert ".tui.preferences.appearance.charts.font (Object)" in lines + assert ".tui.preferences.appearance.charts.font (Object)" in lines @pytest.mark.fluent_version("==24.2") @@ -266,10 +257,7 @@ def test_match_whole_word_and_case_search(capsys): ".preferences.Graphics.ColormapSettings.TextFontAutomaticUnits (Parameter)" not in lines ) - assert ( - '.results.graphics.lic[""].color_map.font_name (Parameter)' - in lines - ) + assert ".tui.display.set_grid.label_font (Command)" in lines @pytest.mark.fluent_version("==24.2") @@ -312,182 +300,45 @@ def test_japanese_semantic_search(capsys): assert ".tui.preferences.appearance.charts.font (Object)" in lines -@pytest.mark.codegen_required -def test_search(): - results = _search("display") - assert ".tui.display (Object)" in results - assert ".tui.display.update_scene.display (Command)" in results - assert ( - ".preferences.Graphics.MeshingMode.GraphicsWindowDisplayTimeout (Parameter)" - in results - ) - assert ( - '.results.graphics.mesh[""].display (Command)' in results - ) - assert ( - '.results.graphics.mesh[""].display_state_name (Parameter)' - in results - ) - - results = _search("display", match_whole_word=True) - assert ( - '.results.graphics.mesh[""].display (Command)' in results - ) - assert ( - '.results.graphics.mesh[""].display_state_name (Parameter)' - not in results - ) - - results = _search("Display", match_case=True) - assert ".tui.display (Object)" not in results - assert ( - ".preferences.Graphics.MeshingMode.GraphicsWindowDisplayTimeout (Parameter)" - in results - ) - - results = _search( - "GraphicsWindowDisplayTimeout", match_whole_word=True, match_case=True - ) - assert ( - ".preferences.Graphics.MeshingMode.GraphicsWindowDisplayTimeout (Parameter)" - in results - ) - assert ( - ".preferences.Graphics.MeshingMode.GraphicsWindowDisplayTimeoutValue (Parameter)" - not in results - ) - - -@pytest.mark.codegen_required -@pytest.mark.fluent_version("latest") -def test_get_version_path_prefix_from_obj( - watertight_workflow_session, new_solver_session -): - meshing = watertight_workflow_session - solver = new_solver_session - version = solver._version - assert _get_version_path_prefix_from_obj(meshing) == ( - version, - [""], - "", - ) - assert _get_version_path_prefix_from_obj(solver) == ( - version, - [""], - "", - ) - assert _get_version_path_prefix_from_obj(meshing.tui.file.import_) == ( - version, - ["", "tui", "file", "import_"], - "", - ) - assert _get_version_path_prefix_from_obj(meshing.tui.file.read_case) == ( - None, - None, - None, - ) - assert _get_version_path_prefix_from_obj(meshing.meshing) == ( - version, - ["", "meshing"], - "", - ) - assert _get_version_path_prefix_from_obj(meshing.workflow) == ( - version, - ["", "workflow"], - "", - ) - assert _get_version_path_prefix_from_obj(solver.workflow) == ( - version, - ["", "workflow"], - "", - ) - assert _get_version_path_prefix_from_obj(meshing.workflow.TaskObject) == ( - version, - ["", "workflow", "TaskObject:"], - '[""]', - ) - assert _get_version_path_prefix_from_obj( - meshing.workflow.TaskObject["Import Geometry"] - ) == ( - version, - ["", "workflow", "TaskObject:"], - "", - ) - assert _get_version_path_prefix_from_obj(meshing.preferences.Appearance.Charts) == ( - version, - ["", "preferences", "Appearance", "Charts"], - "", - ) - assert _get_version_path_prefix_from_obj(solver.setup.models) == ( - version, - [""], - "", - ) - assert _get_version_path_prefix_from_obj(solver.file.cff_files) == ( - None, - None, - None, - ) - - -@pytest.mark.codegen_required -@pytest.mark.fluent_version("latest") -def test_search_from_root(watertight_workflow_session): - meshing = watertight_workflow_session - results = _search("display", search_root=meshing) - assert ".tui.display (Object)" in results - results = _search("display", search_root=meshing.tui) - assert ".display (Object)" in results - results = _search("display", search_root=meshing.tui.display) - assert ".update_scene.display (Command)" in results - assert ".display_states (Object)" in results - results = _search("cad", search_root=meshing.meshing) - assert ".GlobalSettings.EnableCleanCAD (Parameter)" in results - assert ".LoadCADGeometry (Command)" in results - results = _search("next", search_root=meshing.workflow) - assert '.TaskObject[""].InsertNextTask (Command)' in results - results = _search("next", search_root=meshing.workflow.TaskObject) - assert '[""].InsertNextTask (Command)' in results - results = _search( - "next", search_root=meshing.workflow.TaskObject["Import Geometry"] - ) - assert ".InsertNextTask (Command)" in results - results = _search("timeout", search_root=meshing.preferences) - assert ".General.IdleTimeout (Parameter)" in results - results = _search("timeout", search_root=meshing.preferences.General) - assert ".IdleTimeout (Parameter)" in results - - -@pytest.mark.codegen_required -@pytest.mark.fluent_version(">=25.1") -def test_search_settings_from_root(capsys, static_mixer_settings_session): - solver = static_mixer_settings_session - results = _search("conduction", search_root=solver) - assert ".tui.define.models.shell_conduction (Object)" in results - assert ( - '.setup.boundary_conditions.wall[""].phase[""].thermal.enable_shell_conduction (Parameter)' - in results - ) - results = _search("conduction", search_root=solver.setup.boundary_conditions) - assert ( - '.wall[""].phase[""].thermal.conduction_layers[] (Object)' - in results - ) - results = _search("conduction", search_root=solver.setup.boundary_conditions.wall) - assert ( - '[""].phase[""].thermal.conduction_layers[] (Object)' - in results - ) - results = _search( - "conduction", search_root=solver.setup.boundary_conditions.wall["wall"] - ) - assert ( - '.phase[""].thermal.conduction_layers[] (Object)' - in results - ) - results = _search( - "conduction", search_root=solver.setup.boundary_conditions.wall["wall"].phase - ) - assert ( - '[""].thermal.conduction_layers[] (Object)' in results - ) +def test_match_whole_word(monkeypatch): + monkeypatch.setattr(pyfluent, "PRINT_SEARCH_RESULTS", False) + api_tree_data = { + "api_objects": [ + ".parent (Object)", + ".parent.child (Parameter)", + ".first_last (Object)", + ".none (Object)", + ], + "api_tui_objects": [], + "all_api_object_name_synsets": { + "parent": ["parent"], + "child": ["child"], + "first_last": ["first_last"], + "none": ["none"], + }, + "all_api_object_names": ["parent", "child", "first_last", "none"], + } + + search_module = sys.modules["ansys.fluent.core.search"] + monkeypatch.setattr(search_module, "_get_api_tree_data", lambda: api_tree_data) + + assert _search_whole_word("parent", api_tree_data=api_tree_data) == [ + ".parent (Object)" + ] + assert _search_whole_word("child", api_tree_data=api_tree_data) == [ + ".parent.child (Parameter)" + ] + assert pyfluent.search("parent", match_whole_word=True) == [ + ".parent (Object)" + ] + + assert pyfluent.search("first", match_whole_word=True) == [ + ".first_last (Object)" + ] + assert pyfluent.search("last", match_whole_word=True) == [ + ".first_last (Object)" + ] + + assert pyfluent.search("first_last", match_whole_word=True) == [ + ".first_last (Object)" + ] diff --git a/tests/test_session.py b/tests/test_session.py index 5e55e72a40c..35e6bf34195 100644 --- a/tests/test_session.py +++ b/tests/test_session.py @@ -25,7 +25,6 @@ from ansys.fluent.core.utils.fluent_version import FluentVersion from ansys.fluent.core.utils.networking import get_free_port from ansys.fluent.core.warnings import PyFluentDeprecationWarning -from tests.conftest import new_solver_session class MockSettingsServicer(settings_pb2_grpc.SettingsServicer): @@ -307,7 +306,7 @@ def test_journal_creation(file_format, new_meshing_session): if session.connection_properties.inside_container: session.journal.start(file_name.name) else: - session.journal.start(file_name) + session.journal.start(str(file_name)) session = session.switch_to_solver() session.journal.stop() new_stat = file_name.stat() @@ -391,7 +390,9 @@ def test_help_does_not_throw(new_solver_session): help(new_solver_session.file.read) -new_solver_session2 = new_solver_session +@pytest.fixture +def new_solver_session2(new_solver_session): + return new_solver_session def test_build_from_fluent_connection(new_solver_session, new_solver_session2): @@ -410,7 +411,11 @@ def test_build_from_fluent_connection(new_solver_session, new_solver_session2): ) assert solver1.health_check.is_serving assert solver2.health_check.is_serving - assert not health_check_service1.is_serving + timeout_loop( + not health_check_service1.is_serving, + timeout=60, + idle_period=1, + ) assert solver1._fluent_connection.connection_properties.cortex_pid == cortex_pid2 assert solver2._fluent_connection.connection_properties.cortex_pid == cortex_pid2 @@ -572,3 +577,18 @@ def test_general_exception_behaviour_in_session(new_solver_session): # This appears to be a surface mesh.\nSurface meshes cannot be read under the /file/read-case functionality. # with pytest.raises(RuntimeError): # solver.settings.file.read(file_type='case', file_name=mesh_file_2d) + + +@pytest.mark.fluent_version(">=23.2") +def test_app_utilities_new_and_old(mixing_elbow_settings_session): + solver = mixing_elbow_settings_session + + assert solver._app_utilities.get_app_mode() == pyfluent.FluentMode.SOLVER + + assert not solver._app_utilities.is_beta_enabled() + + assert not solver._app_utilities.is_wildcard("no") + + assert solver._app_utilities.is_wildcard("yes*") + + assert not solver._app_utilities.is_solution_data_available() diff --git a/tests/test_settings_api.py b/tests/test_settings_api.py index 44a57e5359f..92535218cca 100644 --- a/tests/test_settings_api.py +++ b/tests/test_settings_api.py @@ -226,13 +226,13 @@ def test_deprecated_settings_with_custom_aliases(new_solver_session): solver = new_solver_session case_path = download_file("mixing_elbow.cas.h5", "pyfluent/mixing_elbow") download_file("mixing_elbow.dat.h5", "pyfluent/mixing_elbow") - solver.file._setattr("_child_aliases", {"rcd": "read_case_data"}) + solver.file._setattr("_child_aliases", {"rcd": ("read_case_data", "rcd")}) with pytest.warns(DeprecatedSettingWarning): solver.file.rcd(file_name=case_path) solver.setup.boundary_conditions.velocity_inlet.child_object_type._child_aliases[ "mom" - ] = "momentum" + ] = ("momentum", "mom") with pytest.warns(DeprecatedSettingWarning): solver.setup.boundary_conditions.velocity_inlet["hot-inlet"].mom.velocity = 20 assert ( @@ -261,12 +261,9 @@ def test_deprecated_settings_with_custom_aliases(new_solver_session): ) > 0 ) - assert ( - solver.setup.boundary_conditions.wall[ - "wall-inlet" - ].thermal.temperature._child_aliases["constant"] - == "value" - ) + assert solver.setup.boundary_conditions.wall[ + "wall-inlet" + ].thermal.temperature._child_aliases["constant"] == ("value", "constant") with pytest.warns(DeprecatedSettingWarning): solver.setup.boundary_conditions.wall[ "wall-inlet" @@ -298,7 +295,7 @@ def test_deprecated_settings_with_custom_aliases(new_solver_session): == 410 ) - solver.setup.boundary_conditions._setattr("_child_aliases", {"w": "wall"}) + solver.setup.boundary_conditions._setattr("_child_aliases", {"w": ("wall", "w")}) with pytest.warns(DeprecatedSettingWarning): solver.setup.boundary_conditions.w["wall-inlet"].thermal.temperature.value = 420 @@ -307,7 +304,7 @@ def test_deprecated_settings_with_custom_aliases(new_solver_session): == 420 ) - solver.setup._setattr("_child_aliases", {"bc": "boundary_conditions"}) + solver.setup._setattr("_child_aliases", {"bc": ("boundary_conditions", "bc")}) with pytest.warns(DeprecatedSettingWarning): solver.setup.bc.wall["wall-inlet"].thermal.temperature.value = 430 @@ -326,7 +323,7 @@ def test_deprecated_settings_with_custom_aliases(new_solver_session): == 400 ) - solver.results._setattr("_child_aliases", {"gr": "graphics"}) + solver.results._setattr("_child_aliases", {"gr": ("graphics", "gr")}) with pytest.warns(DeprecatedSettingWarning): solver.results.gr.contour.create("c1") @@ -342,7 +339,10 @@ def test_deprecated_settings_with_custom_aliases(new_solver_session): solver.setup.boundary_conditions.velocity_inlet[ "hot-inlet" - ].momentum.velocity._child_aliases["hd"] = "../../turbulence/hydraulic_diameter" + ].momentum.velocity._child_aliases["hd"] = ( + "../../turbulence/hydraulic_diameter", + "hd", + ) with pytest.warns(DeprecatedSettingWarning): solver.setup.boundary_conditions.velocity_inlet[ "hot-inlet" @@ -377,6 +377,44 @@ def test_deprecated_settings_with_settings_api_aliases(mixing_elbow_case_data_se "minimum": -0.0001, "maximum": 0.0001, } + solver.settings.results.graphics.contour["temperature"] = {} + solver.settings.results.graphics.contour["temperature"] = { + "field": "temperature", + "surfaces_list": "wall*", + "color_map": { + "visible": True, + "size": 100, + "color": "field-velocity", + "log_scale": False, + "format": "%0.1f", + "user_skip": 9, + "show_all": True, + "position": 1, + "font_name": "Helvetica", + "font_automatic": True, + "font_size": 0.032, + "length": 0.54, + "width": 6, + "bground_transparent": True, + "bground_color": "#CCD3E2", + "title_elements": "Variable and Object Name", + }, + "range_option": { + "option": "auto-range-off", + "auto_range_off": { + "maximum": 400.0, + "minimum": 300, + "clip_to_range": False, + }, + }, + } + assert solver.settings.results.graphics.contour["temperature"].range_options() == { + "global_range": True, + "auto_range": False, + "clip_to_range": False, + "minimum": 300, + "maximum": 400.0, + } @pytest.mark.fluent_version(">=23.1") @@ -457,6 +495,7 @@ def test_generated_code_special_cases(new_solver_session): assert _OutputFile in write_file_bases +@pytest.mark.skip("https://github.com/ansys/pyfluent/issues/3591") @pytest.mark.fluent_version(">=25.1") def test_child_alias_with_parent_path(mixing_elbow_settings_session): solver = mixing_elbow_settings_session @@ -481,7 +520,10 @@ def test_child_alias_with_parent_path(mixing_elbow_settings_session): solver.settings.solution.initialization.hybrid_initialize() assert ( solver.settings.setup.models.discrete_phase.numerics.node_based_averaging.kernel._child_aliases - == {"gaussian_factor": "../gaussian_factor", "option": "../kernel_type"} + == { + "gaussian_factor": ("../gaussian_factor", "gaussian-factor"), + "option": ("../kernel_type", "option"), + } ) solver.settings.setup.models.discrete_phase.numerics.node_based_averaging.enabled = ( True diff --git a/tests/test_solution_variables.py b/tests/test_solution_variables.py index 68da304e901..2c7d3d5d24e 100644 --- a/tests/test_solution_variables.py +++ b/tests/test_solution_variables.py @@ -212,3 +212,30 @@ def test_solution_variable_does_not_modify_case(new_solver_session): domain_name="mixture", ) assert not solver.scheme_eval.scheme_eval("(case-modified?)") + + +@pytest.mark.fluent_version(">=25.2") +def test_solution_variable_udm_data(mixing_elbow_case_session_t4): + solver = mixing_elbow_case_session_t4 + solver.tui.define.user_defined.user_defined_memory("2") + solver.settings.solution.initialization.hybrid_initialize() + solver.settings.solution.run_calculation.iterate(iter_count=1) + udm_data = solver.fields.solution_variable_data.get_data( + solution_variable_name="SV_UDM_I", + domain_name="mixture", + zone_names=["wall-elbow"], + )["wall-elbow"] + np.testing.assert_array_equal(udm_data, np.zeros(4336)) + udm_data[:2168] = 5 + udm_data[2168:] = 10 + solver.fields.solution_variable_data.set_data( + solution_variable_name="SV_UDM_I", + domain_name="mixture", + zone_names_to_solution_variable_data={"wall-elbow": udm_data}, + ) + new_array = solver.fields.solution_variable_data.get_data( + solution_variable_name="SV_UDM_I", + domain_name="mixture", + zone_names=["wall-elbow"], + )["wall-elbow"] + np.testing.assert_array_equal(new_array, udm_data) diff --git a/tests/test_tests_util.py b/tests/test_tests_util.py deleted file mode 100644 index 64e3a75615e..00000000000 --- a/tests/test_tests_util.py +++ /dev/null @@ -1,47 +0,0 @@ -from pathlib import Path -import shutil - -import pytest - -from ansys.fluent.core import EXAMPLES_PATH -from tests.util import rename_downloaded_file - - -@pytest.mark.parametrize( - "ext,a,b,c,d", - [(".cas", "a1", "b1", "c1", "d1"), (".cas.gz", "a2", "b2", "c2", "d2")], -) -def test_rename_downloaded_file(ext, a, b, c, d): - try: - file_path = Path(EXAMPLES_PATH) / f"{a}{ext}" - file_path.touch() - file_path = str(file_path) - new_file_path = rename_downloaded_file(file_path, "_1") - assert new_file_path == str(Path(EXAMPLES_PATH) / f"{a}_1{ext}") - except Exception: - raise - finally: - Path(new_file_path).unlink(missing_ok=True) - - try: - file_path = f"{b}{ext}" - (Path(EXAMPLES_PATH) / file_path).touch() - new_file_path = rename_downloaded_file(file_path, "_1") - assert new_file_path == f"{b}_1{ext}" - except Exception: - raise - finally: - (Path(EXAMPLES_PATH) / new_file_path).unlink(missing_ok=True) - - try: - dir_path = Path(EXAMPLES_PATH) / c - dir_path.mkdir() - file_path = dir_path / f"{d}{ext}" - file_path.touch() - file_path = str(Path(c) / f"{d}{ext}") - new_file_path = rename_downloaded_file(file_path, "_1") - assert new_file_path == str(Path(c) / f"{d}_1{ext}") - except Exception: - raise - finally: - shutil.rmtree(dir_path, ignore_errors=True) diff --git a/tests/util/__init__.py b/tests/util/__init__.py index 2a95cf4894f..2be1c68ced9 100644 --- a/tests/util/__init__.py +++ b/tests/util/__init__.py @@ -1,38 +1,36 @@ from pathlib import Path +from tempfile import TemporaryDirectory +import uuid -from ansys.fluent.core import EXAMPLES_PATH +from pytest import MonkeyPatch +import ansys.fluent.core as pyfluent +from ansys.fluent.core.codegen import StaticInfoType, datamodelgen +from ansys.fluent.core.utils import load_module -def rename_downloaded_file(file_path: str, suffix: str) -> str: - """Rename downloaded file by appending a suffix to the file name. - Parameters - ---------- - file_path : str - Downloaded file path. Can be absolute or relative. - suffix : str - Suffix to append to the file name. +def create_datamodel_root_in_server(session, rules_str, app_name) -> None: + rules_file_name = f"{uuid.uuid4()}.fdl" + session.scheme_eval.scheme_eval( + f'(with-output-to-file "{rules_file_name}" (lambda () (format "~a" "{rules_str}")))', + ) + session.scheme_eval.scheme_eval( + f'(state/register-new-state-engine "{app_name}" "{rules_file_name}")' + ) + session.scheme_eval.scheme_eval(f'(remove-file "{rules_file_name}")') + assert session.scheme_eval.scheme_eval(f'(state/find-root "{app_name}")') > 0 - Returns: - -------- - str - New file path with the suffix appended to the file name. - """ - ext = "".join(Path(file_path).suffixes) - orig_path = Path(file_path) - file_path = file_path.removesuffix(ext) - file_path = Path(file_path) - if file_path.is_absolute(): - new_stem = f"{file_path.stem}{suffix}" - new_path = file_path.with_stem(new_stem) - new_path = new_path.with_suffix(ext) - orig_path.rename(new_path) - return str(new_path) - else: - orig_abs_path = Path(EXAMPLES_PATH) / orig_path - abs_path = Path(EXAMPLES_PATH) / file_path - new_stem = f"{file_path.stem}{suffix}" - new_path = abs_path.with_stem(new_stem) - new_path = new_path.with_suffix(ext) - orig_abs_path.rename(new_path) - return str(file_path.with_stem(new_stem).with_suffix(ext)) + +def create_root_using_datamodelgen(service, app_name): + version = "252" + static_info = service.get_static_info(app_name) + with TemporaryDirectory() as temp_dir: + with MonkeyPatch.context() as m: + m.setattr(pyfluent, "CODEGEN_OUTDIR", Path(temp_dir)) + # TODO: Refactor datamdodelgen so we don't need to hardcode StaticInfoType + datamodelgen.generate( + version, static_infos={StaticInfoType.DATAMODEL_WORKFLOW: static_info} + ) + gen_file = Path(temp_dir) / f"datamodel_{version}" / "workflow.py" + module = load_module("datamodel", gen_file) + return module.Root(service, app_name, [])