diff --git a/.ci/build_wheel.py b/.ci/build_wheel.py index c3d43099ee..6acac20032 100644 --- a/.ci/build_wheel.py +++ b/.ci/build_wheel.py @@ -3,6 +3,7 @@ import argparse import subprocess +from pathlib import Path import os import sys import shutil @@ -39,15 +40,13 @@ print("Created temporary directory: ", tmpdirname) # Create the temporary build-opts.cfg - build_opts_path = os.path.join(tmpdirname, "build-opts.cfg") - with open(build_opts_path, "w") as build_opts_file: - build_opts_file.write(f"[bdist_wheel]\nplat-name={requested_platform}") - os.environ["DIST_EXTRA_CONFIG"] = build_opts_path + build_opts_path = Path(tmpdirname) / "build-opts.cfg" + + build_opts_path.write_text(f"[bdist_wheel]\nplat-name={requested_platform}", encoding="utf-8") + os.environ["DIST_EXTRA_CONFIG"] = str(build_opts_path) # Move the binaries - gatebin_folder_path = os.path.join( - os.path.curdir, os.path.join("src", "ansys", "dpf", "gatebin") - ) + gatebin_folder_path = Path.cwd() / "src" / "ansys" / "dpf" / "gatebin" binaries_to_move = [] moved = [] if "win" in requested_platform or "any" == requested_platform: @@ -60,15 +59,15 @@ binaries_to_move.extend(["_version.py"]) for binary_name in binaries_to_move: - src = os.path.join(gatebin_folder_path, binary_name) - dst = os.path.join(tmpdirname, binary_name) + src = gatebin_folder_path / binary_name + dst = Path(tmpdirname) / binary_name print(f"Moving {src} to {dst}") shutil.move(src=src, dst=dst) moved.append([dst, src]) if "any" == requested_platform: # Also remove the gatebin folder - os.rmdir(gatebin_folder_path) + gatebin_folder_path.rmdir() # Call the build if not args.wheelhouse: @@ -83,7 +82,7 @@ if "any" == requested_platform: # Recreate the gatebin folder - os.mkdir(gatebin_folder_path) + gatebin_folder_path.mkdir() # Move binaries back for move_back in moved: diff --git a/.ci/code_generation.py b/.ci/code_generation.py index cb372e324a..98c3a9dd85 100644 --- a/.ci/code_generation.py +++ b/.ci/code_generation.py @@ -8,21 +8,22 @@ import shutil -local_dir = os.path.dirname(os.path.abspath(__file__)) -TARGET_PATH = os.path.join(local_dir, os.pardir, "src", "ansys", "dpf", "core", "operators") -files = glob.glob(os.path.join(TARGET_PATH, "*")) +local_dir = Path(__file__).parent +TARGET_PATH = local_dir.parent / "src" / "ansys" / "dpf" / "core" / "operators" +files = glob.glob(str(TARGET_PATH / "*")) for f in files: - if Path(f).stem == "specification": + file_path = Path(f) + if file_path.stem == "specification": continue - if Path(f).name == "build.py": + if file_path.name == "build.py": continue - if Path(f).name == "operator.mustache": + if file_path.name == "operator.mustache": continue try: - if os.path.isdir(f): - shutil.rmtree(f) + if file_path.is_dir(): + shutil.rmtree(file_path) else: - os.remove(f) + file_path.unlink() except: pass diff --git a/.ci/run_examples.py b/.ci/run_examples.py index 5a3da2b7bd..922ace00c3 100644 --- a/.ci/run_examples.py +++ b/.ci/run_examples.py @@ -1,6 +1,6 @@ import os import glob -import pathlib +from pathlib import Path import subprocess import sys @@ -11,8 +11,9 @@ os.environ["PYVISTA_OFF_SCREEN"] = "true" os.environ["MPLBACKEND"] = "Agg" -actual_path = pathlib.Path(__file__).parent.absolute() -print(os.path.join(actual_path, os.path.pardir, "examples")) +actual_path = Path(__file__).parent.absolute() +examples_path = actual_path.parent / "examples" +print(examples_path) # Get the DPF server version server = dpf.server.get_or_create_server(None) @@ -20,10 +21,10 @@ server.shutdown() print(f"Server version: {server_version}") -for root, subdirectories, files in os.walk(os.path.join(actual_path, os.path.pardir, "examples")): +for root, subdirectories, files in os.walk(examples_path): for subdirectory in subdirectories: - subdir = os.path.join(root, subdirectory) - for file in glob.iglob(os.path.join(subdir, "*.py")): + subdir = Path(root) / subdirectory + for file in glob.iglob(str(subdir / "*.py")): if sys.platform == "linux" and "08-python-operators" in file: continue elif "win" in sys.platform and "06-distributed_stress_averaging" in file: diff --git a/.ci/run_non_regression_examples.py b/.ci/run_non_regression_examples.py index 247e074531..e492492fa8 100644 --- a/.ci/run_non_regression_examples.py +++ b/.ci/run_non_regression_examples.py @@ -9,45 +9,26 @@ os.environ["MPLBACKEND"] = "Agg" actual_path = pathlib.Path(__file__).parent.absolute() -print(os.path.join(actual_path, os.path.pardir, "examples")) +examples_path = actual_path.parent / "examples" +print(examples_path) list_tests = [ - os.path.join(actual_path, os.path.pardir, "examples", "00-basic"), - os.path.join(actual_path, os.path.pardir, "examples", "01-transient_analyses"), - os.path.join(actual_path, os.path.pardir, "examples", "02-modal_analyses"), - os.path.join(actual_path, os.path.pardir, "examples", "03-harmonic_analyses"), - os.path.join(actual_path, os.path.pardir, "examples", "06-plotting", "00-basic_plotting.py"), - os.path.join( - actual_path, - os.path.pardir, - "examples", - "06-plotting", - "05-plot_on_warped_mesh.py", - ), - os.path.join( - actual_path, - os.path.pardir, - "examples", - "07-distributed-post", - "00-distributed_total_disp.py", - ), + examples_path / "00-basic", + examples_path / "01-transient_analyses", + examples_path / "02-modal_analyses", + examples_path / "03-harmonic_analyses", + examples_path / "06-plotting" / "00-basic_plotting.py", + examples_path / "06-plotting" / "05-plot_on_warped_mesh.py", + examples_path / "07-distributed-post" / "00-distributed_total_disp.py", ] if core.SERVER_CONFIGURATION != core.AvailableServerConfigs.InProcessServer: - list_tests.append( - os.path.join( - actual_path, - os.path.pardir, - "examples", - "08-python-operators", - "00-wrapping_numpy_capabilities.py", - ) - ) + list_tests.append(examples_path / "08-python-operators" / "00-wrapping_numpy_capabilities.py") for path in list_tests: - if os.path.isdir(path): - for file in glob.iglob(os.path.join(path, "*.py")): + if path.is_dir(): + for file in glob.iglob(str(path / "*.py")): print("\n--------------------------------------------------") print(file) try: diff --git a/.ci/update_dpf_dependencies.py b/.ci/update_dpf_dependencies.py index a6f9d72d13..8b201da3fb 100644 --- a/.ci/update_dpf_dependencies.py +++ b/.ci/update_dpf_dependencies.py @@ -15,7 +15,7 @@ import os import glob -import pathlib +from pathlib import Path import platform import shutil import zipfile @@ -23,21 +23,21 @@ grpc_path_key = "DPFDV_ROOT" gate_path_key = "ANSYSDPFPYGATE_ROOT" -core_path = pathlib.Path(__file__).parent.parent.resolve() +core_path = Path(__file__).parent.parent if "ANSYSDPFCORE_ROOT" in os.environ: core_path = os.environ["ANSYSDPFCORE_ROOT"] grpc_path = os.getenv(grpc_path_key, None) gate_path = os.getenv(gate_path_key, None) -if grpc_path is not None: +if grpc_path: # Update ansys-grpc-dpf with latest in proto/dist print("Updating ansys.grpc.dpf") - dist_path = os.path.join(grpc_path, "proto", "dist", "*") + dist_path = Path(grpc_path) / "proto" / "dist" / "*" print(f"from {dist_path}") - destination = os.path.join(core_path, "src") + destination = Path(core_path) / "src" print(f"into {destination}") - latest_wheel = max(glob.glob(dist_path), key=os.path.getctime) + latest_wheel = max(glob.glob(str(dist_path)), key=os.path.getctime) with zipfile.ZipFile(latest_wheel, "r") as wheel: for file in wheel.namelist(): # print(file) @@ -50,40 +50,34 @@ else: print(f"{grpc_path_key} environment variable is not defined. " "Cannot update ansys-grpc-dpf.") -if gate_path is not None: +if gate_path: # Update ansys-dpf-gate print("Updating ansys.dpf.gate generated code") - dist_path = os.path.join(gate_path, "ansys-dpf-gate", "ansys", "dpf", "gate", "generated") + dist_path = Path(gate_path) / "ansys-dpf-gate" / "ansys" / "dpf" / "gate" / "generated" print(f"from {dist_path}") - destination = os.path.join(core_path, "src", "ansys", "dpf", "gate", "generated") + destination = Path(core_path) / "src" / "ansys" / "dpf" / "gate" / "generated" print(f"into {destination}") shutil.copytree( src=dist_path, dst=destination, dirs_exist_ok=True, - ignore=lambda directory, contents: ["__pycache__"] if directory[-5:] == "gate" else [], + ignore=lambda directory, contents: ["__pycache__"] if str(directory)[-5:] == "gate" else [], ) - dist_path = os.path.join(gate_path, "ansys-dpf-gate", "ansys", "dpf", "gate", "__init__.py") + + dist_path = Path(gate_path) / "ansys-dpf-gate" / "ansys" / "dpf" / "gate" / "__init__.py" print(f"from {dist_path}") - destination = os.path.join(core_path, "src", "ansys", "dpf", "gate", "__init__.py") + destination = Path(core_path) / "src" / "ansys" / "dpf" / "gate" / "__init__.py" print(f"into {destination}") - shutil.copy( - src=dist_path, - dst=destination, - ) + shutil.copy(src=dist_path, dst=destination) print("Done updating ansys.dpf.gate generated code") # Update ansys-dpf-gatebin print("Updating ansys.dpf.gatebin") - dist_path = os.path.join(gate_path, "ansys-dpf-gatebin", "ansys") + dist_path = Path(gate_path) / "ansys-dpf-gatebin" / "ansys" print(f"from {dist_path}") - destination = os.path.join(core_path, "src", "ansys") + destination = Path(core_path) / "src" / "ansys" print(f"into {destination}") - shutil.copytree( - src=dist_path, - dst=destination, - dirs_exist_ok=True, - ) + shutil.copytree(src=dist_path, dst=destination, dirs_exist_ok=True) print(f"Done updating ansys.dpf.gatebin for {platform.system()}") else: print( diff --git a/examples/05-file-IO/00-hdf5_double_float_comparison.py b/examples/05-file-IO/00-hdf5_double_float_comparison.py index 730b00a72a..43cde7f7a5 100644 --- a/examples/05-file-IO/00-hdf5_double_float_comparison.py +++ b/examples/05-file-IO/00-hdf5_double_float_comparison.py @@ -36,7 +36,7 @@ # Import the ``dpf-core`` module and its examples files, and then create a # temporary directory. -import os +from pathlib import Path from ansys.dpf import core as dpf from ansys.dpf.core import examples @@ -78,8 +78,8 @@ # Define a temporary folder for outputs tmpdir = dpf.core.make_tmp_dir_server(dpf.SERVER) files = [ - dpf.path_utilities.join(tmpdir, "dpf_float.h5"), - dpf.path_utilities.join(tmpdir, "dpf_double.h5"), + Path(dpf.path_utilities.join(tmpdir, "dpf_float.h5")), + Path(dpf.path_utilities.join(tmpdir, "dpf_double.h5")), ] ############################################################################### # Export with simple precision. @@ -98,8 +98,8 @@ # Download the resulting .h5 files if necessary if not dpf.SERVER.local_server: - float_file_path = os.path.join(os.getcwd(), "dpf_float.h5") - double_file_path = os.path.join(os.getcwd(), "dpf_double.h5") + float_file_path = Path.cwd() / "dpf_float.h5" + double_file_path = Path.cwd() / "dpf_double.h5" dpf.download_file(files[0], float_file_path) dpf.download_file(files[1], double_file_path) else: @@ -109,8 +109,8 @@ ############################################################################### # Compare simple precision versus double precision. -float_precision = os.stat(float_file_path).st_size -double_precision = os.stat(double_file_path).st_size +float_precision = float_file_path.stat().st_size +double_precision = double_file_path.stat().st_size print( f"size with float precision: {float_precision}\n" f"size with double precision: {double_precision}" diff --git a/examples/05-file-IO/04-basic-load-file.py b/examples/05-file-IO/04-basic-load-file.py index 47b8fe14ea..adc3467780 100644 --- a/examples/05-file-IO/04-basic-load-file.py +++ b/examples/05-file-IO/04-basic-load-file.py @@ -58,16 +58,16 @@ # ~~~~~~~~~~~~~ # Export the fields container in the CSV format: -import os +from pathlib import Path csv_file_name = "simple_bar_fc.csv" # Define an output path for the resulting .csv file if not dpf.SERVER.local_server: # Define it server-side if using a remote server tmp_dir_path = dpf.core.make_tmp_dir_server(dpf.SERVER) - server_file_path = dpf.path_utilities.join(tmp_dir_path, csv_file_name) + server_file_path = Path(dpf.path_utilities.join(tmp_dir_path, csv_file_name)) else: - server_file_path = os.path.join(os.getcwd(), csv_file_name) + server_file_path = Path.cwd() / csv_file_name # Perform the export to csv on the server side export_csv_operator = dpf.operators.serialization.field_to_csv() @@ -81,7 +81,7 @@ # Download the file ``simple_bar_fc.csv``: if not dpf.SERVER.local_server: - downloaded_client_file_path = os.path.join(os.getcwd(), "simple_bar_fc_downloaded.csv") + downloaded_client_file_path = Path.cwd() / "simple_bar_fc_downloaded.csv" dpf.download_file(server_file_path, downloaded_client_file_path) else: downloaded_client_file_path = server_file_path @@ -98,7 +98,7 @@ mesh.plot(server_fc_out) # Remove file to avoid polluting. -os.remove(downloaded_client_file_path) +downloaded_client_file_path.unlink() ############################################################################### # Make operations over the fields container diff --git a/examples/08-python-operators/00-wrapping_numpy_capabilities.py b/examples/08-python-operators/00-wrapping_numpy_capabilities.py index 63d242ec39..90e0828b92 100644 --- a/examples/08-python-operators/00-wrapping_numpy_capabilities.py +++ b/examples/08-python-operators/00-wrapping_numpy_capabilities.py @@ -57,11 +57,12 @@ # Download and display the Python script. from ansys.dpf.core.examples import download_easy_statistics +from pathlib import Path -operator_file_path = download_easy_statistics() +operator_file_path = Path(download_easy_statistics()) -with open(operator_file_path, "r") as f: - for line in f.readlines(): +with operator_file_path.open() as file: + for line in file.readlines(): print("\t\t\t" + line) ############################################################################### @@ -76,15 +77,14 @@ # - The third argument is the name of the function used to record operators. # -import os from ansys.dpf import core as dpf from ansys.dpf.core import examples # Python plugins are not supported in process. dpf.start_local_server(config=dpf.AvailableServerConfigs.GrpcServer) -operator_server_file_path = dpf.upload_file_in_tmp_folder(operator_file_path) -dpf.load_library(os.path.dirname(operator_server_file_path), "py_easy_statistics", "load_operators") +operator_server_file_path = Path(dpf.upload_file_in_tmp_folder(operator_file_path)) +dpf.load_library(operator_server_file_path.parent, "py_easy_statistics", "load_operators") ############################################################################### # Instantiate the operator. diff --git a/examples/08-python-operators/01-package_python_operators.py b/examples/08-python-operators/01-package_python_operators.py index f6fd6cfc39..b2de42266c 100644 --- a/examples/08-python-operators/01-package_python_operators.py +++ b/examples/08-python-operators/01-package_python_operators.py @@ -72,8 +72,6 @@ # for the plug-in package that is used to record operators. # -import os - from ansys.dpf import core as dpf from ansys.dpf.core import examples @@ -83,7 +81,7 @@ tmp = dpf.make_tmp_dir_server() dpf.upload_files_in_folder(dpf.path_utilities.join(tmp, "average_filter_plugin"), plugin_folder) dpf.load_library( - os.path.join(dpf.path_utilities.join(tmp, "average_filter_plugin")), + dpf.path_utilities.join(tmp, "average_filter_plugin"), "py_average_filter", "load_operators", ) diff --git a/examples/08-python-operators/02-python_operators_with_dependencies.py b/examples/08-python-operators/02-python_operators_with_dependencies.py index d4f80e3199..08fc5a1189 100644 --- a/examples/08-python-operators/02-python_operators_with_dependencies.py +++ b/examples/08-python-operators/02-python_operators_with_dependencies.py @@ -58,11 +58,12 @@ # created for you. import os +from pathlib import Path from ansys.dpf.core import examples -plugin_path = examples.download_gltf_plugin() -folder_root = os.path.join(os.getcwd().rsplit("pydpf-core", 1)[0], "pydpf-core") +plugin_path = Path(examples.download_gltf_plugin()) +folder_root = Path(__file__).parent.parent.parent # %% # To add third-party modules as dependencies to a plug-in package, you must @@ -83,8 +84,9 @@ # To simplify this step, you can add a requirements file in the plug-in package: # print("\033[1m gltf_plugin/requirements.txt: \n \033[0m") -with open(os.path.join(plugin_path, "requirements.txt"), "r") as f: - for line in f.readlines(): +requirements_path = plugin_path / "requirements.txt" +with requirements_path.open("r") as file: + for line in file.readlines(): print("\t\t\t" + line) @@ -117,26 +119,21 @@ # # create_sites_for_python_operators.sh -pluginpath /path/to/plugin -zippath /path/to/plugin/assets/linx64.zip # noqa: E501 - -if os.name == "nt" and not os.path.exists( - os.path.join(plugin_path, "assets", "gltf_sites_winx64.zip") -): - cmd_file = os.path.join( - folder_root, - "doc", - "source", - "user_guide", - "create_sites_for_python_operators.ps1", +site_path = plugin_path / "assets" / "gltf_sites_winx64.zip" +if os.name == "nt" and not site_path.exists(): + cmd_file = ( + folder_root / "doc" / "source" / "user_guide" / "create_sites_for_python_operators.ps1" ) args = [ "powershell", - cmd_file, + str(cmd_file), "-pluginpath", - plugin_path, + str(plugin_path), "-zippath", - os.path.join(plugin_path, "assets", "gltf_sites_winx64.zip"), + str(plugin_path / "assets" / "gltf_sites_winx64.zip"), ] print(args) + import subprocess process = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -150,20 +147,15 @@ ) else: print("Installing pygltf in a virtual environment succeeded") -elif os.name == "posix" and not os.path.exists( - os.path.join(plugin_path, "assets", "gltf_sites_linx64.zip") -): - cmd_file = os.path.join( - folder_root, - "doc", - "source", - "user_guide", - "create_sites_for_python_operators.sh", + +elif os.name == "posix" and not site_path.exists(): + cmd_file = ( + folder_root / "doc" / "source" / "user_guide" / "create_sites_for_python_operators.sh" ) run_cmd = f"{cmd_file}" args = ( f' -pluginpath "{plugin_path}" ' - f"-zippath \"{os.path.join(plugin_path, 'assets', 'gltf_sites_linx64.zip')}\"" + f'-zippath \"{plugin_path / "assets" / "gltf_sites_winx64.zip"}\"' ) print(run_cmd + args) os.system(f"chmod u=rwx,o=x {cmd_file}") @@ -189,9 +181,9 @@ # Python plugins are not supported in process. dpf.start_local_server(config=dpf.AvailableServerConfigs.GrpcServer) -tmp = dpf.make_tmp_dir_server() -dpf.upload_files_in_folder(dpf.path_utilities.join(tmp, "plugins", "gltf_plugin"), plugin_path) -dpf.upload_file(plugin_path + ".xml", dpf.path_utilities.join(tmp, "plugins", "gltf_plugin.xml")) +tmp = Path(dpf.make_tmp_dir_server()) +dpf.upload_files_in_folder(dpf.path_utilities.join(str(tmp), "plugins", "gltf_plugin"), plugin_path) +dpf.upload_file(str(plugin_path) + ".xml", dpf.path_utilities.join(str(tmp), "plugins", "gltf_plugin.xml")) dpf.load_library( dpf.path_utilities.join(tmp, "plugins", "gltf_plugin"), @@ -235,8 +227,6 @@ # Use the custom operator # ----------------------- -import os - model = dpf.Model(dpf.upload_file_in_tmp_folder(examples.find_static_rst())) mesh = model.metadata.meshed_region @@ -245,14 +235,14 @@ displacement = model.results.displacement() displacement.inputs.mesh_scoping(skin_mesh) displacement.inputs.mesh(skin_mesh) -new_operator.inputs.path(os.path.join(tmp, "out")) +new_operator.inputs.path(str(tmp / "out")) new_operator.inputs.mesh(skin_mesh) new_operator.inputs.field(displacement.outputs.fields_container()[0]) new_operator.run() print("operator ran successfully") -dpf.download_file(os.path.join(tmp, "out.glb"), os.path.join(os.getcwd(), "out.glb")) +dpf.download_file(tmp / "out.glb", Path.cwd / "out.glb") # %% # You can download :download:`output ` from the ``gltf`` operator. diff --git a/pyproject.toml b/pyproject.toml index 02b4cb90c2..c0443473a7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,7 @@ select = [ # "F", # pyflakes, see https://beta.ruff.rs/docs/rules/#pyflakes-f # "I", # isort, see https://beta.ruff.rs/docs/rules/#isort-i # "N", # pep8-naming, see https://beta.ruff.rs/docs/rules/#pep8-naming-n -# "PTH", # flake9-use-pathlib, https://beta.ruff.rs/docs/rules/#flake8-use-pathlib-pth + "PTH", # flake9-use-pathlib, https://beta.ruff.rs/docs/rules/#flake8-use-pathlib-pth # "TD", # flake8-todos, https://docs.astral.sh/ruff/rules/#flake8-todos-td ] ignore = [ diff --git a/src/ansys/dpf/core/core.py b/src/ansys/dpf/core/core.py index 199f5295c4..e3b4cb7948 100644 --- a/src/ansys/dpf/core/core.py +++ b/src/ansys/dpf/core/core.py @@ -29,6 +29,7 @@ import logging import warnings import weakref +from pathlib import Path from ansys.dpf.core import errors, misc from ansys.dpf.core import server as server_module @@ -429,7 +430,6 @@ def load_library(self, file_path, name="", symbol="LoadOperators", generate_oper ) if generate_operators: # TODO: fix code generation upload posix - import os def __generate_code(TARGET_PATH, filename, name, symbol): from ansys.dpf.core.dpf_operator import Operator @@ -444,8 +444,8 @@ def __generate_code(TARGET_PATH, filename, name, symbol): except Exception as e: warnings.warn("Unable to generate the python code with error: " + str(e.args)) - local_dir = os.path.dirname(os.path.abspath(__file__)) - LOCAL_PATH = os.path.join(local_dir, "operators") + local_dir = Path(__file__).parent + LOCAL_PATH = local_dir / "operators" if not self._server().local_server: if self._server().os != "posix" or (not self._server().os and os.name != "posix"): # send local generated code @@ -762,23 +762,24 @@ def upload_files_in_folder( """ server_paths = [] for root, subdirectories, files in os.walk(client_folder_path): + root = Path(root) for subdirectory in subdirectories: - subdir = os.path.join(root, subdirectory) - for filename in os.listdir(subdir): - f = os.path.join(subdir, filename) + subdir = root / subdirectory + for filename in subdir.iterdir(): + f = subdir / filename server_paths = self._upload_and_get_server_path( specific_extension, - f, - filename, + str(f), + filename.name, server_paths, str(to_server_folder_path), subdirectory, ) for file in files: - f = os.path.join(root, file) + f = root / file server_paths = self._upload_and_get_server_path( specific_extension, - f, + str(f), file, server_paths, str(to_server_folder_path), @@ -836,7 +837,8 @@ def upload_file(self, file_path, to_server_file_path): server_file_path : str path generated server side """ - if os.stat(file_path).st_size == 0: + file_path = Path(file_path) + if file_path.stat().st_size == 0: raise ValueError(file_path + " is empty") if not self._server().has_client(): txt = """ @@ -868,11 +870,12 @@ def upload_file_in_tmp_folder(self, file_path, new_file_name=None): server_file_path : str path generated server side """ + file_path = Path(file_path) if new_file_name: file_name = new_file_name else: - file_name = os.path.basename(file_path) - if os.stat(file_path).st_size == 0: + file_name = Path(file_path).name + if file_path.stat().st_size == 0: raise ValueError(file_path + " is empty") if not self._server().has_client(): txt = """ diff --git a/src/ansys/dpf/core/custom_operator.py b/src/ansys/dpf/core/custom_operator.py index f5e8941951..6060e20a8b 100644 --- a/src/ansys/dpf/core/custom_operator.py +++ b/src/ansys/dpf/core/custom_operator.py @@ -30,8 +30,7 @@ import abc import ctypes -import os -import pathlib +from pathlib import Path import re import shutil import tempfile @@ -85,23 +84,23 @@ def update_virtual_environment_for_custom_operators( raise NotImplementedError( "Updating the dpf-site.zip of a DPF Server is only available when InProcess." ) - current_dpf_site_zip_path = os.path.join(server.ansys_path, "dpf", "python", "dpf-site.zip") + current_dpf_site_zip_path = Path(server.ansys_path) / "dpf" / "python" / "dpf-site.zip" # Get the path to where we store the original dpf-site.zip - original_dpf_site_zip_path = os.path.join( - server.ansys_path, "dpf", "python", "original", "dpf-site.zip" + original_dpf_site_zip_path = ( + Path(server.ansys_path) / "dpf" / "python" / "original" / "dpf-site.zip" ) # Restore the original dpf-site.zip if restore_original: - if os.path.exists(original_dpf_site_zip_path): + if original_dpf_site_zip_path.exists(): shutil.move(src=original_dpf_site_zip_path, dst=current_dpf_site_zip_path) - os.rmdir(os.path.dirname(original_dpf_site_zip_path)) + original_dpf_site_zip_path.parent.rmdir() else: warnings.warn("No original dpf-site.zip found. Current is most likely the original.") else: # Store original dpf-site.zip for this DPF Server if no original is stored - if not os.path.exists(os.path.dirname(original_dpf_site_zip_path)): - os.mkdir(os.path.dirname(original_dpf_site_zip_path)) - if not os.path.exists(original_dpf_site_zip_path): + if not original_dpf_site_zip_path.parent.exists(): + original_dpf_site_zip_path.parent.mkdir() + if not original_dpf_site_zip_path.exists(): shutil.move(src=current_dpf_site_zip_path, dst=original_dpf_site_zip_path) # Get the current paths to site_packages import site @@ -111,46 +110,47 @@ def update_virtual_environment_for_custom_operators( # Get the first one targeting an actual site-packages folder for path_to_site_packages in paths_to_current_site_packages: if path_to_site_packages[-13:] == "site-packages": - current_site_packages_path = pathlib.Path(path_to_site_packages) + current_site_packages_path = Path(path_to_site_packages) break if current_site_packages_path is None: warnings.warn("Could not find a currently loaded site-packages folder to update from.") return # If an ansys.dpf.core.path file exists, then the installation is editable - search_path = pathlib.Path(current_site_packages_path) + search_path = current_site_packages_path potential_editable = list(search_path.rglob("__editable__.ansys_dpf_core-*.pth")) if potential_editable: path_file = potential_editable[0] else: # Keep for older setuptools versions - path_file = os.path.join(current_site_packages_path, "ansys.dpf.core.pth") - if os.path.exists(path_file): + path_file = current_site_packages_path / "ansys.dpf.core.pth" + if path_file.exists(): # Treat editable installation of ansys-dpf-core - with open(path_file, "r") as f: + with path_file.open("r") as f: current_site_packages_path = f.readline().strip() with tempfile.TemporaryDirectory() as tmpdir: - os.mkdir(os.path.join(tmpdir, "ansys_dpf_core")) - ansys_dir = os.path.join(tmpdir, "ansys_dpf_core") - os.mkdir(os.path.join(ansys_dir, "ansys")) - os.mkdir(os.path.join(ansys_dir, "ansys", "dpf")) - os.mkdir(os.path.join(ansys_dir, "ansys", "grpc")) + tmpdir = Path(tmpdir) + ansys_dir = tmpdir / "ansys_dpf_core" + ansys_dir.mkdir() + ansys_dir.joinpath("ansys").mkdir() + ansys_dir.joinpath("ansys", "dpf").mkdir() + ansys_dir.joinpath("ansys", "grpc").mkdir() shutil.copytree( - src=os.path.join(current_site_packages_path, "ansys", "dpf", "core"), - dst=os.path.join(ansys_dir, "ansys", "dpf", "core"), + src=current_site_packages_path / "ansys" / "dpf" / "core", + dst=ansys_dir / "ansys" / "dpf" / "core", ignore=lambda directory, contents: ["__pycache__", "result_files"], ) shutil.copytree( - src=os.path.join(current_site_packages_path, "ansys", "dpf", "gate"), - dst=os.path.join(ansys_dir, "ansys", "dpf", "gate"), + src=current_site_packages_path / "ansys" / "dpf" / "gate", + dst=ansys_dir / "ansys" / "dpf" / "gate", ignore=lambda directory, contents: ["__pycache__"], ) shutil.copytree( - src=os.path.join(current_site_packages_path, "ansys", "grpc", "dpf"), - dst=os.path.join(ansys_dir, "ansys", "grpc", "dpf"), + src=current_site_packages_path / "ansys" / "grpc" / "dpf", + dst=ansys_dir / "ansys" / "grpc" / "dpf", ignore=lambda directory, contents: ["__pycache__"], ) # Find the .dist_info folder pattern = re.compile(r"^ansys_dpf_core\S*") - for p in pathlib.Path(current_site_packages_path).iterdir(): + for p in current_site_packages_path.iterdir(): if p.is_dir(): # print(p.stem) if re.search(pattern, p.stem): @@ -158,12 +158,12 @@ def update_virtual_environment_for_custom_operators( break shutil.copytree( src=dist_info_path, - dst=os.path.join(ansys_dir, dist_info_path.name), + dst=ansys_dir / dist_info_path.name, ) # Zip the files as dpf-site.zip - base_name = os.path.join(tmpdir, "ansys_dpf_core_zip") + base_name = tmpdir / "ansys_dpf_core_zip" base_dir = "." - root_dir = os.path.join(tmpdir, "ansys_dpf_core") # OK + root_dir = tmpdir / "ansys_dpf_core" # OK shutil.make_archive( base_name=base_name, root_dir=root_dir, base_dir=base_dir, format="zip" ) @@ -173,7 +173,7 @@ def update_virtual_environment_for_custom_operators( for item in original.infolist(): if "ansys" not in item.filename: archive.writestr(item, original.read(item)) - with zipfile.ZipFile(base_name + ".zip", mode="r") as original: + with zipfile.ZipFile(str(base_name) + ".zip", mode="r") as original: for item in original.infolist(): archive.writestr(item, original.read(item)) diff --git a/src/ansys/dpf/core/data_sources.py b/src/ansys/dpf/core/data_sources.py index d694f54b5a..f547f9e700 100644 --- a/src/ansys/dpf/core/data_sources.py +++ b/src/ansys/dpf/core/data_sources.py @@ -28,6 +28,7 @@ """ import os +from pathlib import Path import warnings import traceback from typing import Union @@ -142,7 +143,7 @@ def set_result_file_path(self, filepath, key=""): ['/tmp/file.rst'] """ - extension = os.path.splitext(filepath)[1] + extension = Path(filepath).suffix # Handle .res files from CFX if key == "" and extension == ".res": key = "cas" @@ -162,7 +163,7 @@ def set_result_file_path(self, filepath, key=""): def guess_result_key(filepath: str) -> str: """Guess result key for files without a file extension.""" result_keys = ["d3plot", "binout"] - base_name = os.path.basename(filepath) + base_name = Path(filepath).name # Handle files without extension for result_key in result_keys: if result_key in base_name: @@ -172,14 +173,13 @@ def guess_result_key(filepath: str) -> str: @staticmethod def guess_second_key(filepath: str) -> str: """For files with an h5 or cff extension, look for another extension.""" + + # These files usually end with .cas.h5 or .dat.h5 accepted = ["cas", "dat"] - without_ext = os.path.splitext(filepath)[0] - new_split = os.path.splitext(without_ext) + new_split = Path(filepath).suffixes new_key = "" - if len(new_split) > 1: - key = new_split[1][1:] - if key in accepted: - new_key = key + if new_split[0].strip(".") in accepted: + new_key = new_split[0].strip(".") return new_key def set_domain_result_file_path( @@ -241,9 +241,12 @@ def add_file_path(self, filepath, key="", is_domain: bool = False, domain_id=0): """ # The filename needs to be a fully qualified file name - if not os.path.dirname(filepath): + # if not os.path.dirname(filepath) + + filepath = Path(filepath) + if not filepath.parent.name: # append local path - filepath = os.path.join(os.getcwd(), os.path.basename(filepath)) + filepath = Path.cwd() / filepath.name if is_domain: if key == "": raise NotImplementedError("A key must be given when using is_domain=True.") @@ -280,9 +283,10 @@ def add_domain_file_path(self, filepath, key, domain_id): """ # The filename needs to be a fully qualified file name - if not os.path.dirname(filepath): + filepath = Path(filepath) + if not filepath.parent.name: # append local path - filepath = os.path.join(os.getcwd(), os.path.basename(filepath)) + filepath = Path.cwd() / filepath.name self._api.data_sources_add_domain_file_path_with_key_utf8( self, str(filepath), key, domain_id ) @@ -307,9 +311,10 @@ def add_file_path_for_specified_result(self, filepath, key="", result_key=""): The default is ``""``, in which case the key is found directly. """ # The filename needs to be a fully qualified file name - if not os.path.dirname(filepath): + filepath = Path(filepath) + if not filepath.parent.name: # append local path - filepath = os.path.join(os.getcwd(), os.path.basename(filepath)) + filepath = Path.cwd() / filepath.name self._api.data_sources_add_file_path_for_specified_result_utf8( self, str(filepath), key, result_key