diff --git a/examples/05-file-IO/00-hdf5_double_float_comparison.py b/examples/05-file-IO/00-hdf5_double_float_comparison.py index 43cde7f7a5..730b00a72a 100644 --- a/examples/05-file-IO/00-hdf5_double_float_comparison.py +++ b/examples/05-file-IO/00-hdf5_double_float_comparison.py @@ -36,7 +36,7 @@ # Import the ``dpf-core`` module and its examples files, and then create a # temporary directory. -from pathlib import Path +import os from ansys.dpf import core as dpf from ansys.dpf.core import examples @@ -78,8 +78,8 @@ # Define a temporary folder for outputs tmpdir = dpf.core.make_tmp_dir_server(dpf.SERVER) files = [ - Path(dpf.path_utilities.join(tmpdir, "dpf_float.h5")), - Path(dpf.path_utilities.join(tmpdir, "dpf_double.h5")), + dpf.path_utilities.join(tmpdir, "dpf_float.h5"), + dpf.path_utilities.join(tmpdir, "dpf_double.h5"), ] ############################################################################### # Export with simple precision. @@ -98,8 +98,8 @@ # Download the resulting .h5 files if necessary if not dpf.SERVER.local_server: - float_file_path = Path.cwd() / "dpf_float.h5" - double_file_path = Path.cwd() / "dpf_double.h5" + float_file_path = os.path.join(os.getcwd(), "dpf_float.h5") + double_file_path = os.path.join(os.getcwd(), "dpf_double.h5") dpf.download_file(files[0], float_file_path) dpf.download_file(files[1], double_file_path) else: @@ -109,8 +109,8 @@ ############################################################################### # Compare simple precision versus double precision. -float_precision = float_file_path.stat().st_size -double_precision = double_file_path.stat().st_size +float_precision = os.stat(float_file_path).st_size +double_precision = os.stat(double_file_path).st_size print( f"size with float precision: {float_precision}\n" f"size with double precision: {double_precision}" diff --git a/examples/05-file-IO/04-basic-load-file.py b/examples/05-file-IO/04-basic-load-file.py index adc3467780..47b8fe14ea 100644 --- a/examples/05-file-IO/04-basic-load-file.py +++ b/examples/05-file-IO/04-basic-load-file.py @@ -58,16 +58,16 @@ # ~~~~~~~~~~~~~ # Export the fields container in the CSV format: -from pathlib import Path +import os csv_file_name = "simple_bar_fc.csv" # Define an output path for the resulting .csv file if not dpf.SERVER.local_server: # Define it server-side if using a remote server tmp_dir_path = dpf.core.make_tmp_dir_server(dpf.SERVER) - server_file_path = Path(dpf.path_utilities.join(tmp_dir_path, csv_file_name)) + server_file_path = dpf.path_utilities.join(tmp_dir_path, csv_file_name) else: - server_file_path = Path.cwd() / csv_file_name + server_file_path = os.path.join(os.getcwd(), csv_file_name) # Perform the export to csv on the server side export_csv_operator = dpf.operators.serialization.field_to_csv() @@ -81,7 +81,7 @@ # Download the file ``simple_bar_fc.csv``: if not dpf.SERVER.local_server: - downloaded_client_file_path = Path.cwd() / "simple_bar_fc_downloaded.csv" + downloaded_client_file_path = os.path.join(os.getcwd(), "simple_bar_fc_downloaded.csv") dpf.download_file(server_file_path, downloaded_client_file_path) else: downloaded_client_file_path = server_file_path @@ -98,7 +98,7 @@ mesh.plot(server_fc_out) # Remove file to avoid polluting. -downloaded_client_file_path.unlink() +os.remove(downloaded_client_file_path) ############################################################################### # Make operations over the fields container diff --git a/examples/08-python-operators/00-wrapping_numpy_capabilities.py b/examples/08-python-operators/00-wrapping_numpy_capabilities.py index 90e0828b92..63d242ec39 100644 --- a/examples/08-python-operators/00-wrapping_numpy_capabilities.py +++ b/examples/08-python-operators/00-wrapping_numpy_capabilities.py @@ -57,12 +57,11 @@ # Download and display the Python script. from ansys.dpf.core.examples import download_easy_statistics -from pathlib import Path -operator_file_path = Path(download_easy_statistics()) +operator_file_path = download_easy_statistics() -with operator_file_path.open() as file: - for line in file.readlines(): +with open(operator_file_path, "r") as f: + for line in f.readlines(): print("\t\t\t" + line) ############################################################################### @@ -77,14 +76,15 @@ # - The third argument is the name of the function used to record operators. # +import os from ansys.dpf import core as dpf from ansys.dpf.core import examples # Python plugins are not supported in process. dpf.start_local_server(config=dpf.AvailableServerConfigs.GrpcServer) -operator_server_file_path = Path(dpf.upload_file_in_tmp_folder(operator_file_path)) -dpf.load_library(operator_server_file_path.parent, "py_easy_statistics", "load_operators") +operator_server_file_path = dpf.upload_file_in_tmp_folder(operator_file_path) +dpf.load_library(os.path.dirname(operator_server_file_path), "py_easy_statistics", "load_operators") ############################################################################### # Instantiate the operator. diff --git a/examples/08-python-operators/01-package_python_operators.py b/examples/08-python-operators/01-package_python_operators.py index b2de42266c..f6fd6cfc39 100644 --- a/examples/08-python-operators/01-package_python_operators.py +++ b/examples/08-python-operators/01-package_python_operators.py @@ -72,6 +72,8 @@ # for the plug-in package that is used to record operators. # +import os + from ansys.dpf import core as dpf from ansys.dpf.core import examples @@ -81,7 +83,7 @@ tmp = dpf.make_tmp_dir_server() dpf.upload_files_in_folder(dpf.path_utilities.join(tmp, "average_filter_plugin"), plugin_folder) dpf.load_library( - dpf.path_utilities.join(tmp, "average_filter_plugin"), + os.path.join(dpf.path_utilities.join(tmp, "average_filter_plugin")), "py_average_filter", "load_operators", ) diff --git a/examples/08-python-operators/02-python_operators_with_dependencies.py b/examples/08-python-operators/02-python_operators_with_dependencies.py index 5d769b305f..d4f80e3199 100644 --- a/examples/08-python-operators/02-python_operators_with_dependencies.py +++ b/examples/08-python-operators/02-python_operators_with_dependencies.py @@ -58,12 +58,11 @@ # created for you. import os -from pathlib import Path from ansys.dpf.core import examples -plugin_path = Path(examples.download_gltf_plugin()) -folder_root = Path(__file__).parent.parent.parent +plugin_path = examples.download_gltf_plugin() +folder_root = os.path.join(os.getcwd().rsplit("pydpf-core", 1)[0], "pydpf-core") # %% # To add third-party modules as dependencies to a plug-in package, you must @@ -84,9 +83,8 @@ # To simplify this step, you can add a requirements file in the plug-in package: # print("\033[1m gltf_plugin/requirements.txt: \n \033[0m") -requirements_path = plugin_path / "requirements.txt" -with requirements_path.open("r") as file: - for line in file.readlines(): +with open(os.path.join(plugin_path, "requirements.txt"), "r") as f: + for line in f.readlines(): print("\t\t\t" + line) @@ -119,21 +117,26 @@ # # create_sites_for_python_operators.sh -pluginpath /path/to/plugin -zippath /path/to/plugin/assets/linx64.zip # noqa: E501 -site_path = plugin_path / "assets" / "gltf_sites_winx64.zip" -if os.name == "nt" and not site_path.exists(): - cmd_file = ( - folder_root / "doc" / "source" / "user_guide" / "create_sites_for_python_operators.ps1" + +if os.name == "nt" and not os.path.exists( + os.path.join(plugin_path, "assets", "gltf_sites_winx64.zip") +): + cmd_file = os.path.join( + folder_root, + "doc", + "source", + "user_guide", + "create_sites_for_python_operators.ps1", ) args = [ "powershell", - str(cmd_file), + cmd_file, "-pluginpath", - str(plugin_path), + plugin_path, "-zippath", - str(plugin_path / "assets" / "gltf_sites_winx64.zip"), + os.path.join(plugin_path, "assets", "gltf_sites_winx64.zip"), ] print(args) - import subprocess process = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -147,15 +150,20 @@ ) else: print("Installing pygltf in a virtual environment succeeded") - -elif os.name == "posix" and not site_path.exists(): - cmd_file = ( - folder_root / "doc" / "source" / "user_guide" / "create_sites_for_python_operators.sh" +elif os.name == "posix" and not os.path.exists( + os.path.join(plugin_path, "assets", "gltf_sites_linx64.zip") +): + cmd_file = os.path.join( + folder_root, + "doc", + "source", + "user_guide", + "create_sites_for_python_operators.sh", ) run_cmd = f"{cmd_file}" args = ( f' -pluginpath "{plugin_path}" ' - f"-zippath \"{plugin_path / "assets" / "gltf_sites_winx64.zip"}\"" + f"-zippath \"{os.path.join(plugin_path, 'assets', 'gltf_sites_linx64.zip')}\"" ) print(run_cmd + args) os.system(f"chmod u=rwx,o=x {cmd_file}") @@ -181,9 +189,9 @@ # Python plugins are not supported in process. dpf.start_local_server(config=dpf.AvailableServerConfigs.GrpcServer) -tmp = Path(dpf.make_tmp_dir_server()) -dpf.upload_files_in_folder(dpf.path_utilities.join(str(tmp), "plugins", "gltf_plugin"), plugin_path) -dpf.upload_file(str(plugin_path) + ".xml", dpf.path_utilities.join(str(tmp), "plugins", "gltf_plugin.xml")) +tmp = dpf.make_tmp_dir_server() +dpf.upload_files_in_folder(dpf.path_utilities.join(tmp, "plugins", "gltf_plugin"), plugin_path) +dpf.upload_file(plugin_path + ".xml", dpf.path_utilities.join(tmp, "plugins", "gltf_plugin.xml")) dpf.load_library( dpf.path_utilities.join(tmp, "plugins", "gltf_plugin"), @@ -227,6 +235,8 @@ # Use the custom operator # ----------------------- +import os + model = dpf.Model(dpf.upload_file_in_tmp_folder(examples.find_static_rst())) mesh = model.metadata.meshed_region @@ -235,14 +245,14 @@ displacement = model.results.displacement() displacement.inputs.mesh_scoping(skin_mesh) displacement.inputs.mesh(skin_mesh) -new_operator.inputs.path(str(tmp / "out")) +new_operator.inputs.path(os.path.join(tmp, "out")) new_operator.inputs.mesh(skin_mesh) new_operator.inputs.field(displacement.outputs.fields_container()[0]) new_operator.run() print("operator ran successfully") -dpf.download_file(tmp / "out.glb", Path.cwd / "out.glb") +dpf.download_file(os.path.join(tmp, "out.glb"), os.path.join(os.getcwd(), "out.glb")) # %% # You can download :download:`output ` from the ``gltf`` operator.