diff --git a/.ci/build_wheel.py b/.ci/build_wheel.py index c3d43099ee..6acac20032 100644 --- a/.ci/build_wheel.py +++ b/.ci/build_wheel.py @@ -3,6 +3,7 @@ import argparse import subprocess +from pathlib import Path import os import sys import shutil @@ -39,15 +40,13 @@ print("Created temporary directory: ", tmpdirname) # Create the temporary build-opts.cfg - build_opts_path = os.path.join(tmpdirname, "build-opts.cfg") - with open(build_opts_path, "w") as build_opts_file: - build_opts_file.write(f"[bdist_wheel]\nplat-name={requested_platform}") - os.environ["DIST_EXTRA_CONFIG"] = build_opts_path + build_opts_path = Path(tmpdirname) / "build-opts.cfg" + + build_opts_path.write_text(f"[bdist_wheel]\nplat-name={requested_platform}", encoding="utf-8") + os.environ["DIST_EXTRA_CONFIG"] = str(build_opts_path) # Move the binaries - gatebin_folder_path = os.path.join( - os.path.curdir, os.path.join("src", "ansys", "dpf", "gatebin") - ) + gatebin_folder_path = Path.cwd() / "src" / "ansys" / "dpf" / "gatebin" binaries_to_move = [] moved = [] if "win" in requested_platform or "any" == requested_platform: @@ -60,15 +59,15 @@ binaries_to_move.extend(["_version.py"]) for binary_name in binaries_to_move: - src = os.path.join(gatebin_folder_path, binary_name) - dst = os.path.join(tmpdirname, binary_name) + src = gatebin_folder_path / binary_name + dst = Path(tmpdirname) / binary_name print(f"Moving {src} to {dst}") shutil.move(src=src, dst=dst) moved.append([dst, src]) if "any" == requested_platform: # Also remove the gatebin folder - os.rmdir(gatebin_folder_path) + gatebin_folder_path.rmdir() # Call the build if not args.wheelhouse: @@ -83,7 +82,7 @@ if "any" == requested_platform: # Recreate the gatebin folder - os.mkdir(gatebin_folder_path) + gatebin_folder_path.mkdir() # Move binaries back for move_back in moved: diff --git a/.ci/code_generation.py b/.ci/code_generation.py index cb372e324a..e073929848 100644 --- a/.ci/code_generation.py +++ b/.ci/code_generation.py @@ -8,21 +8,21 @@ import shutil -local_dir = os.path.dirname(os.path.abspath(__file__)) -TARGET_PATH = os.path.join(local_dir, os.pardir, "src", "ansys", "dpf", "core", "operators") -files = glob.glob(os.path.join(TARGET_PATH, "*")) -for f in files: - if Path(f).stem == "specification": +local_dir = Path(__file__).parent +TARGET_PATH = local_dir.parent / "src" / "ansys" / "dpf" / "core" / "operators" +files = TARGET_PATH.glob("*") +for file_path in files: + if file_path.stem == "specification": continue - if Path(f).name == "build.py": + if file_path.name == "build.py": continue - if Path(f).name == "operator.mustache": + if file_path.name == "operator.mustache": continue try: - if os.path.isdir(f): - shutil.rmtree(f) + if file_path.is_dir(): + shutil.rmtree(file_path) else: - os.remove(f) + file_path.unlink() except: pass diff --git a/.ci/run_examples.py b/.ci/run_examples.py index 5a3da2b7bd..91d9d56628 100644 --- a/.ci/run_examples.py +++ b/.ci/run_examples.py @@ -1,6 +1,6 @@ import os import glob -import pathlib +from pathlib import Path import subprocess import sys @@ -11,8 +11,9 @@ os.environ["PYVISTA_OFF_SCREEN"] = "true" os.environ["MPLBACKEND"] = "Agg" -actual_path = pathlib.Path(__file__).parent.absolute() -print(os.path.join(actual_path, os.path.pardir, "examples")) +actual_path = Path(__file__).parent.absolute() +examples_path = actual_path.parent / "examples" +print(examples_path) # Get the DPF server version server = dpf.server.get_or_create_server(None) @@ -20,13 +21,13 @@ server.shutdown() print(f"Server version: {server_version}") -for root, subdirectories, files in os.walk(os.path.join(actual_path, os.path.pardir, "examples")): +for root, subdirectories, files in os.walk(examples_path): for subdirectory in subdirectories: - subdir = os.path.join(root, subdirectory) - for file in glob.iglob(os.path.join(subdir, "*.py")): - if sys.platform == "linux" and "08-python-operators" in file: + subdir = Path(root) / subdirectory + for file in subdir.glob("*.py"): + if sys.platform == "linux" and "08-python-operators" in str(file): continue - elif "win" in sys.platform and "06-distributed_stress_averaging" in file: + elif "win" in sys.platform and "06-distributed_stress_averaging" in str(file): # Currently very unstable in the GH CI continue print("\n--------------------------------------------------") @@ -36,7 +37,7 @@ print(f"Example skipped as it requires DPF {minimum_version_str}.", flush=True) continue try: - out = subprocess.check_output([sys.executable, file]) + out = subprocess.check_output([sys.executable, str(file)]) except subprocess.CalledProcessError as e: sys.stderr.write(str(e.args)) if e.returncode != 3221225477: diff --git a/.ci/run_non_regression_examples.py b/.ci/run_non_regression_examples.py index 247e074531..fc3dc8fedc 100644 --- a/.ci/run_non_regression_examples.py +++ b/.ci/run_non_regression_examples.py @@ -9,49 +9,30 @@ os.environ["MPLBACKEND"] = "Agg" actual_path = pathlib.Path(__file__).parent.absolute() -print(os.path.join(actual_path, os.path.pardir, "examples")) +examples_path = actual_path.parent / "examples" +print(examples_path) list_tests = [ - os.path.join(actual_path, os.path.pardir, "examples", "00-basic"), - os.path.join(actual_path, os.path.pardir, "examples", "01-transient_analyses"), - os.path.join(actual_path, os.path.pardir, "examples", "02-modal_analyses"), - os.path.join(actual_path, os.path.pardir, "examples", "03-harmonic_analyses"), - os.path.join(actual_path, os.path.pardir, "examples", "06-plotting", "00-basic_plotting.py"), - os.path.join( - actual_path, - os.path.pardir, - "examples", - "06-plotting", - "05-plot_on_warped_mesh.py", - ), - os.path.join( - actual_path, - os.path.pardir, - "examples", - "07-distributed-post", - "00-distributed_total_disp.py", - ), + examples_path / "00-basic", + examples_path / "01-transient_analyses", + examples_path / "02-modal_analyses", + examples_path / "03-harmonic_analyses", + examples_path / "06-plotting" / "00-basic_plotting.py", + examples_path / "06-plotting" / "05-plot_on_warped_mesh.py", + examples_path / "07-distributed-post" / "00-distributed_total_disp.py", ] if core.SERVER_CONFIGURATION != core.AvailableServerConfigs.InProcessServer: - list_tests.append( - os.path.join( - actual_path, - os.path.pardir, - "examples", - "08-python-operators", - "00-wrapping_numpy_capabilities.py", - ) - ) + list_tests.append(examples_path / "08-python-operators" / "00-wrapping_numpy_capabilities.py") for path in list_tests: - if os.path.isdir(path): - for file in glob.iglob(os.path.join(path, "*.py")): + if path.is_dir(): + for file in path.glob("*.py"): print("\n--------------------------------------------------") print(file) try: - subprocess.check_call([sys.executable, file]) + subprocess.check_call([sys.executable, str(file)]) except subprocess.CalledProcessError as e: sys.stderr.write(str(e.args)) if e.returncode != 3221225477: @@ -61,7 +42,7 @@ print("\n--------------------------------------------------") print(path) try: - subprocess.check_call([sys.executable, file]) + subprocess.check_call([sys.executable, str(file)]) except subprocess.CalledProcessError as e: sys.stderr.write(str(e.args)) if e.returncode != 3221225477: diff --git a/.ci/update_dpf_dependencies.py b/.ci/update_dpf_dependencies.py index a6f9d72d13..c3426d8a9d 100644 --- a/.ci/update_dpf_dependencies.py +++ b/.ci/update_dpf_dependencies.py @@ -15,7 +15,7 @@ import os import glob -import pathlib +from pathlib import Path import platform import shutil import zipfile @@ -23,21 +23,21 @@ grpc_path_key = "DPFDV_ROOT" gate_path_key = "ANSYSDPFPYGATE_ROOT" -core_path = pathlib.Path(__file__).parent.parent.resolve() +core_path = Path(__file__).parent.parent if "ANSYSDPFCORE_ROOT" in os.environ: core_path = os.environ["ANSYSDPFCORE_ROOT"] grpc_path = os.getenv(grpc_path_key, None) gate_path = os.getenv(gate_path_key, None) -if grpc_path is not None: +if grpc_path: # Update ansys-grpc-dpf with latest in proto/dist print("Updating ansys.grpc.dpf") - dist_path = os.path.join(grpc_path, "proto", "dist", "*") + dist_path = Path(grpc_path) / "proto" / "dist" print(f"from {dist_path}") - destination = os.path.join(core_path, "src") + destination = Path(core_path) / "src" print(f"into {destination}") - latest_wheel = max(glob.glob(dist_path), key=os.path.getctime) + latest_wheel = max(dist_path.glob("*"), key=os.path.getctime) with zipfile.ZipFile(latest_wheel, "r") as wheel: for file in wheel.namelist(): # print(file) @@ -50,40 +50,34 @@ else: print(f"{grpc_path_key} environment variable is not defined. " "Cannot update ansys-grpc-dpf.") -if gate_path is not None: +if gate_path: # Update ansys-dpf-gate print("Updating ansys.dpf.gate generated code") - dist_path = os.path.join(gate_path, "ansys-dpf-gate", "ansys", "dpf", "gate", "generated") + dist_path = Path(gate_path) / "ansys-dpf-gate" / "ansys" / "dpf" / "gate" / "generated" print(f"from {dist_path}") - destination = os.path.join(core_path, "src", "ansys", "dpf", "gate", "generated") + destination = Path(core_path) / "src" / "ansys" / "dpf" / "gate" / "generated" print(f"into {destination}") shutil.copytree( src=dist_path, dst=destination, dirs_exist_ok=True, - ignore=lambda directory, contents: ["__pycache__"] if directory[-5:] == "gate" else [], + ignore=lambda directory, contents: ["__pycache__"] if str(directory)[-5:] == "gate" else [], ) - dist_path = os.path.join(gate_path, "ansys-dpf-gate", "ansys", "dpf", "gate", "__init__.py") + + dist_path = Path(gate_path) / "ansys-dpf-gate" / "ansys" / "dpf" / "gate" / "__init__.py" print(f"from {dist_path}") - destination = os.path.join(core_path, "src", "ansys", "dpf", "gate", "__init__.py") + destination = Path(core_path) / "src" / "ansys" / "dpf" / "gate" / "__init__.py" print(f"into {destination}") - shutil.copy( - src=dist_path, - dst=destination, - ) + shutil.copy(src=dist_path, dst=destination) print("Done updating ansys.dpf.gate generated code") # Update ansys-dpf-gatebin print("Updating ansys.dpf.gatebin") - dist_path = os.path.join(gate_path, "ansys-dpf-gatebin", "ansys") + dist_path = Path(gate_path) / "ansys-dpf-gatebin" / "ansys" print(f"from {dist_path}") - destination = os.path.join(core_path, "src", "ansys") + destination = Path(core_path) / "src" / "ansys" print(f"into {destination}") - shutil.copytree( - src=dist_path, - dst=destination, - dirs_exist_ok=True, - ) + shutil.copytree(src=dist_path, dst=destination, dirs_exist_ok=True) print(f"Done updating ansys.dpf.gatebin for {platform.system()}") else: print( diff --git a/examples/05-file-IO/00-hdf5_double_float_comparison.py b/examples/05-file-IO/00-hdf5_double_float_comparison.py index 730b00a72a..43cde7f7a5 100644 --- a/examples/05-file-IO/00-hdf5_double_float_comparison.py +++ b/examples/05-file-IO/00-hdf5_double_float_comparison.py @@ -36,7 +36,7 @@ # Import the ``dpf-core`` module and its examples files, and then create a # temporary directory. -import os +from pathlib import Path from ansys.dpf import core as dpf from ansys.dpf.core import examples @@ -78,8 +78,8 @@ # Define a temporary folder for outputs tmpdir = dpf.core.make_tmp_dir_server(dpf.SERVER) files = [ - dpf.path_utilities.join(tmpdir, "dpf_float.h5"), - dpf.path_utilities.join(tmpdir, "dpf_double.h5"), + Path(dpf.path_utilities.join(tmpdir, "dpf_float.h5")), + Path(dpf.path_utilities.join(tmpdir, "dpf_double.h5")), ] ############################################################################### # Export with simple precision. @@ -98,8 +98,8 @@ # Download the resulting .h5 files if necessary if not dpf.SERVER.local_server: - float_file_path = os.path.join(os.getcwd(), "dpf_float.h5") - double_file_path = os.path.join(os.getcwd(), "dpf_double.h5") + float_file_path = Path.cwd() / "dpf_float.h5" + double_file_path = Path.cwd() / "dpf_double.h5" dpf.download_file(files[0], float_file_path) dpf.download_file(files[1], double_file_path) else: @@ -109,8 +109,8 @@ ############################################################################### # Compare simple precision versus double precision. -float_precision = os.stat(float_file_path).st_size -double_precision = os.stat(double_file_path).st_size +float_precision = float_file_path.stat().st_size +double_precision = double_file_path.stat().st_size print( f"size with float precision: {float_precision}\n" f"size with double precision: {double_precision}" diff --git a/examples/05-file-IO/04-basic-load-file.py b/examples/05-file-IO/04-basic-load-file.py index 47b8fe14ea..adc3467780 100644 --- a/examples/05-file-IO/04-basic-load-file.py +++ b/examples/05-file-IO/04-basic-load-file.py @@ -58,16 +58,16 @@ # ~~~~~~~~~~~~~ # Export the fields container in the CSV format: -import os +from pathlib import Path csv_file_name = "simple_bar_fc.csv" # Define an output path for the resulting .csv file if not dpf.SERVER.local_server: # Define it server-side if using a remote server tmp_dir_path = dpf.core.make_tmp_dir_server(dpf.SERVER) - server_file_path = dpf.path_utilities.join(tmp_dir_path, csv_file_name) + server_file_path = Path(dpf.path_utilities.join(tmp_dir_path, csv_file_name)) else: - server_file_path = os.path.join(os.getcwd(), csv_file_name) + server_file_path = Path.cwd() / csv_file_name # Perform the export to csv on the server side export_csv_operator = dpf.operators.serialization.field_to_csv() @@ -81,7 +81,7 @@ # Download the file ``simple_bar_fc.csv``: if not dpf.SERVER.local_server: - downloaded_client_file_path = os.path.join(os.getcwd(), "simple_bar_fc_downloaded.csv") + downloaded_client_file_path = Path.cwd() / "simple_bar_fc_downloaded.csv" dpf.download_file(server_file_path, downloaded_client_file_path) else: downloaded_client_file_path = server_file_path @@ -98,7 +98,7 @@ mesh.plot(server_fc_out) # Remove file to avoid polluting. -os.remove(downloaded_client_file_path) +downloaded_client_file_path.unlink() ############################################################################### # Make operations over the fields container diff --git a/examples/08-python-operators/00-wrapping_numpy_capabilities.py b/examples/08-python-operators/00-wrapping_numpy_capabilities.py index 63d242ec39..90e0828b92 100644 --- a/examples/08-python-operators/00-wrapping_numpy_capabilities.py +++ b/examples/08-python-operators/00-wrapping_numpy_capabilities.py @@ -57,11 +57,12 @@ # Download and display the Python script. from ansys.dpf.core.examples import download_easy_statistics +from pathlib import Path -operator_file_path = download_easy_statistics() +operator_file_path = Path(download_easy_statistics()) -with open(operator_file_path, "r") as f: - for line in f.readlines(): +with operator_file_path.open() as file: + for line in file.readlines(): print("\t\t\t" + line) ############################################################################### @@ -76,15 +77,14 @@ # - The third argument is the name of the function used to record operators. # -import os from ansys.dpf import core as dpf from ansys.dpf.core import examples # Python plugins are not supported in process. dpf.start_local_server(config=dpf.AvailableServerConfigs.GrpcServer) -operator_server_file_path = dpf.upload_file_in_tmp_folder(operator_file_path) -dpf.load_library(os.path.dirname(operator_server_file_path), "py_easy_statistics", "load_operators") +operator_server_file_path = Path(dpf.upload_file_in_tmp_folder(operator_file_path)) +dpf.load_library(operator_server_file_path.parent, "py_easy_statistics", "load_operators") ############################################################################### # Instantiate the operator. diff --git a/examples/08-python-operators/01-package_python_operators.py b/examples/08-python-operators/01-package_python_operators.py index f6fd6cfc39..b2de42266c 100644 --- a/examples/08-python-operators/01-package_python_operators.py +++ b/examples/08-python-operators/01-package_python_operators.py @@ -72,8 +72,6 @@ # for the plug-in package that is used to record operators. # -import os - from ansys.dpf import core as dpf from ansys.dpf.core import examples @@ -83,7 +81,7 @@ tmp = dpf.make_tmp_dir_server() dpf.upload_files_in_folder(dpf.path_utilities.join(tmp, "average_filter_plugin"), plugin_folder) dpf.load_library( - os.path.join(dpf.path_utilities.join(tmp, "average_filter_plugin")), + dpf.path_utilities.join(tmp, "average_filter_plugin"), "py_average_filter", "load_operators", ) diff --git a/examples/08-python-operators/02-python_operators_with_dependencies.py b/examples/08-python-operators/02-python_operators_with_dependencies.py index d4f80e3199..e46147e7bd 100644 --- a/examples/08-python-operators/02-python_operators_with_dependencies.py +++ b/examples/08-python-operators/02-python_operators_with_dependencies.py @@ -58,11 +58,12 @@ # created for you. import os +from pathlib import Path from ansys.dpf.core import examples -plugin_path = examples.download_gltf_plugin() -folder_root = os.path.join(os.getcwd().rsplit("pydpf-core", 1)[0], "pydpf-core") +plugin_path = Path(examples.download_gltf_plugin()) +folder_root = Path(str(Path.cwd()).rsplit("pydpf-core", 1)[0]) / "pydpf-core" # %% # To add third-party modules as dependencies to a plug-in package, you must @@ -83,8 +84,9 @@ # To simplify this step, you can add a requirements file in the plug-in package: # print("\033[1m gltf_plugin/requirements.txt: \n \033[0m") -with open(os.path.join(plugin_path, "requirements.txt"), "r") as f: - for line in f.readlines(): +requirements_path = plugin_path / "requirements.txt" +with requirements_path.open("r") as file: + for line in file.readlines(): print("\t\t\t" + line) @@ -117,26 +119,21 @@ # # create_sites_for_python_operators.sh -pluginpath /path/to/plugin -zippath /path/to/plugin/assets/linx64.zip # noqa: E501 - -if os.name == "nt" and not os.path.exists( - os.path.join(plugin_path, "assets", "gltf_sites_winx64.zip") -): - cmd_file = os.path.join( - folder_root, - "doc", - "source", - "user_guide", - "create_sites_for_python_operators.ps1", +site_path = plugin_path / "assets" / "gltf_sites_winx64.zip" +if os.name == "nt" and not site_path.exists(): + cmd_file = ( + folder_root / "doc" / "source" / "user_guide" / "create_sites_for_python_operators.ps1" ) args = [ "powershell", - cmd_file, + str(cmd_file), "-pluginpath", - plugin_path, + str(plugin_path), "-zippath", - os.path.join(plugin_path, "assets", "gltf_sites_winx64.zip"), + str(plugin_path / "assets" / "gltf_sites_winx64.zip"), ] print(args) + import subprocess process = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -150,20 +147,15 @@ ) else: print("Installing pygltf in a virtual environment succeeded") -elif os.name == "posix" and not os.path.exists( - os.path.join(plugin_path, "assets", "gltf_sites_linx64.zip") -): - cmd_file = os.path.join( - folder_root, - "doc", - "source", - "user_guide", - "create_sites_for_python_operators.sh", + +elif os.name == "posix" and not site_path.exists(): + cmd_file = ( + folder_root / "doc" / "source" / "user_guide" / "create_sites_for_python_operators.sh" ) run_cmd = f"{cmd_file}" args = ( f' -pluginpath "{plugin_path}" ' - f"-zippath \"{os.path.join(plugin_path, 'assets', 'gltf_sites_linx64.zip')}\"" + f'-zippath "{plugin_path / "assets" / "gltf_sites_winx64.zip"}"' ) print(run_cmd + args) os.system(f"chmod u=rwx,o=x {cmd_file}") @@ -189,12 +181,14 @@ # Python plugins are not supported in process. dpf.start_local_server(config=dpf.AvailableServerConfigs.GrpcServer) -tmp = dpf.make_tmp_dir_server() -dpf.upload_files_in_folder(dpf.path_utilities.join(tmp, "plugins", "gltf_plugin"), plugin_path) -dpf.upload_file(plugin_path + ".xml", dpf.path_utilities.join(tmp, "plugins", "gltf_plugin.xml")) +tmp = Path(dpf.make_tmp_dir_server()) +dpf.upload_files_in_folder(dpf.path_utilities.join(str(tmp), "plugins", "gltf_plugin"), plugin_path) +dpf.upload_file( + str(plugin_path) + ".xml", dpf.path_utilities.join(str(tmp), "plugins", "gltf_plugin.xml") +) dpf.load_library( - dpf.path_utilities.join(tmp, "plugins", "gltf_plugin"), + dpf.path_utilities.join(str(tmp), "plugins", "gltf_plugin"), "py_dpf_gltf", "load_operators", ) @@ -235,8 +229,6 @@ # Use the custom operator # ----------------------- -import os - model = dpf.Model(dpf.upload_file_in_tmp_folder(examples.find_static_rst())) mesh = model.metadata.meshed_region @@ -245,14 +237,14 @@ displacement = model.results.displacement() displacement.inputs.mesh_scoping(skin_mesh) displacement.inputs.mesh(skin_mesh) -new_operator.inputs.path(os.path.join(tmp, "out")) +new_operator.inputs.path(str(tmp / "out")) new_operator.inputs.mesh(skin_mesh) new_operator.inputs.field(displacement.outputs.fields_container()[0]) new_operator.run() print("operator ran successfully") -dpf.download_file(os.path.join(tmp, "out.glb"), os.path.join(os.getcwd(), "out.glb")) +dpf.download_file(tmp / "out.glb", Path.cwd() / "out.glb") # %% # You can download :download:`output ` from the ``gltf`` operator. diff --git a/pyproject.toml b/pyproject.toml index 02b4cb90c2..c0443473a7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,7 @@ select = [ # "F", # pyflakes, see https://beta.ruff.rs/docs/rules/#pyflakes-f # "I", # isort, see https://beta.ruff.rs/docs/rules/#isort-i # "N", # pep8-naming, see https://beta.ruff.rs/docs/rules/#pep8-naming-n -# "PTH", # flake9-use-pathlib, https://beta.ruff.rs/docs/rules/#flake8-use-pathlib-pth + "PTH", # flake9-use-pathlib, https://beta.ruff.rs/docs/rules/#flake8-use-pathlib-pth # "TD", # flake8-todos, https://docs.astral.sh/ruff/rules/#flake8-todos-td ] ignore = [ diff --git a/src/ansys/dpf/core/core.py b/src/ansys/dpf/core/core.py index 199f5295c4..e3b4cb7948 100644 --- a/src/ansys/dpf/core/core.py +++ b/src/ansys/dpf/core/core.py @@ -29,6 +29,7 @@ import logging import warnings import weakref +from pathlib import Path from ansys.dpf.core import errors, misc from ansys.dpf.core import server as server_module @@ -429,7 +430,6 @@ def load_library(self, file_path, name="", symbol="LoadOperators", generate_oper ) if generate_operators: # TODO: fix code generation upload posix - import os def __generate_code(TARGET_PATH, filename, name, symbol): from ansys.dpf.core.dpf_operator import Operator @@ -444,8 +444,8 @@ def __generate_code(TARGET_PATH, filename, name, symbol): except Exception as e: warnings.warn("Unable to generate the python code with error: " + str(e.args)) - local_dir = os.path.dirname(os.path.abspath(__file__)) - LOCAL_PATH = os.path.join(local_dir, "operators") + local_dir = Path(__file__).parent + LOCAL_PATH = local_dir / "operators" if not self._server().local_server: if self._server().os != "posix" or (not self._server().os and os.name != "posix"): # send local generated code @@ -762,23 +762,24 @@ def upload_files_in_folder( """ server_paths = [] for root, subdirectories, files in os.walk(client_folder_path): + root = Path(root) for subdirectory in subdirectories: - subdir = os.path.join(root, subdirectory) - for filename in os.listdir(subdir): - f = os.path.join(subdir, filename) + subdir = root / subdirectory + for filename in subdir.iterdir(): + f = subdir / filename server_paths = self._upload_and_get_server_path( specific_extension, - f, - filename, + str(f), + filename.name, server_paths, str(to_server_folder_path), subdirectory, ) for file in files: - f = os.path.join(root, file) + f = root / file server_paths = self._upload_and_get_server_path( specific_extension, - f, + str(f), file, server_paths, str(to_server_folder_path), @@ -836,7 +837,8 @@ def upload_file(self, file_path, to_server_file_path): server_file_path : str path generated server side """ - if os.stat(file_path).st_size == 0: + file_path = Path(file_path) + if file_path.stat().st_size == 0: raise ValueError(file_path + " is empty") if not self._server().has_client(): txt = """ @@ -868,11 +870,12 @@ def upload_file_in_tmp_folder(self, file_path, new_file_name=None): server_file_path : str path generated server side """ + file_path = Path(file_path) if new_file_name: file_name = new_file_name else: - file_name = os.path.basename(file_path) - if os.stat(file_path).st_size == 0: + file_name = Path(file_path).name + if file_path.stat().st_size == 0: raise ValueError(file_path + " is empty") if not self._server().has_client(): txt = """ diff --git a/src/ansys/dpf/core/custom_operator.py b/src/ansys/dpf/core/custom_operator.py index f5e8941951..582cd047d0 100644 --- a/src/ansys/dpf/core/custom_operator.py +++ b/src/ansys/dpf/core/custom_operator.py @@ -30,8 +30,7 @@ import abc import ctypes -import os -import pathlib +from pathlib import Path import re import shutil import tempfile @@ -85,23 +84,23 @@ def update_virtual_environment_for_custom_operators( raise NotImplementedError( "Updating the dpf-site.zip of a DPF Server is only available when InProcess." ) - current_dpf_site_zip_path = os.path.join(server.ansys_path, "dpf", "python", "dpf-site.zip") + current_dpf_site_zip_path = Path(server.ansys_path) / "dpf" / "python" / "dpf-site.zip" # Get the path to where we store the original dpf-site.zip - original_dpf_site_zip_path = os.path.join( - server.ansys_path, "dpf", "python", "original", "dpf-site.zip" + original_dpf_site_zip_path = ( + Path(server.ansys_path) / "dpf" / "python" / "original" / "dpf-site.zip" ) # Restore the original dpf-site.zip if restore_original: - if os.path.exists(original_dpf_site_zip_path): + if original_dpf_site_zip_path.exists(): shutil.move(src=original_dpf_site_zip_path, dst=current_dpf_site_zip_path) - os.rmdir(os.path.dirname(original_dpf_site_zip_path)) + original_dpf_site_zip_path.parent.rmdir() else: warnings.warn("No original dpf-site.zip found. Current is most likely the original.") else: # Store original dpf-site.zip for this DPF Server if no original is stored - if not os.path.exists(os.path.dirname(original_dpf_site_zip_path)): - os.mkdir(os.path.dirname(original_dpf_site_zip_path)) - if not os.path.exists(original_dpf_site_zip_path): + if not original_dpf_site_zip_path.parent.exists(): + original_dpf_site_zip_path.parent.mkdir() + if not original_dpf_site_zip_path.exists(): shutil.move(src=current_dpf_site_zip_path, dst=original_dpf_site_zip_path) # Get the current paths to site_packages import site @@ -111,46 +110,47 @@ def update_virtual_environment_for_custom_operators( # Get the first one targeting an actual site-packages folder for path_to_site_packages in paths_to_current_site_packages: if path_to_site_packages[-13:] == "site-packages": - current_site_packages_path = pathlib.Path(path_to_site_packages) + current_site_packages_path = Path(path_to_site_packages) break if current_site_packages_path is None: warnings.warn("Could not find a currently loaded site-packages folder to update from.") return # If an ansys.dpf.core.path file exists, then the installation is editable - search_path = pathlib.Path(current_site_packages_path) + search_path = current_site_packages_path potential_editable = list(search_path.rglob("__editable__.ansys_dpf_core-*.pth")) if potential_editable: path_file = potential_editable[0] else: # Keep for older setuptools versions - path_file = os.path.join(current_site_packages_path, "ansys.dpf.core.pth") - if os.path.exists(path_file): + path_file = current_site_packages_path / "ansys.dpf.core.pth" + if path_file.exists(): # Treat editable installation of ansys-dpf-core - with open(path_file, "r") as f: - current_site_packages_path = f.readline().strip() + with path_file.open("r") as f: + current_site_packages_path = Path(f.readline().strip()) with tempfile.TemporaryDirectory() as tmpdir: - os.mkdir(os.path.join(tmpdir, "ansys_dpf_core")) - ansys_dir = os.path.join(tmpdir, "ansys_dpf_core") - os.mkdir(os.path.join(ansys_dir, "ansys")) - os.mkdir(os.path.join(ansys_dir, "ansys", "dpf")) - os.mkdir(os.path.join(ansys_dir, "ansys", "grpc")) + tmpdir = Path(tmpdir) + ansys_dir = tmpdir / "ansys_dpf_core" + ansys_dir.mkdir() + ansys_dir.joinpath("ansys").mkdir() + ansys_dir.joinpath("ansys", "dpf").mkdir() + ansys_dir.joinpath("ansys", "grpc").mkdir() shutil.copytree( - src=os.path.join(current_site_packages_path, "ansys", "dpf", "core"), - dst=os.path.join(ansys_dir, "ansys", "dpf", "core"), + src=current_site_packages_path / "ansys" / "dpf" / "core", + dst=ansys_dir / "ansys" / "dpf" / "core", ignore=lambda directory, contents: ["__pycache__", "result_files"], ) shutil.copytree( - src=os.path.join(current_site_packages_path, "ansys", "dpf", "gate"), - dst=os.path.join(ansys_dir, "ansys", "dpf", "gate"), + src=current_site_packages_path / "ansys" / "dpf" / "gate", + dst=ansys_dir / "ansys" / "dpf" / "gate", ignore=lambda directory, contents: ["__pycache__"], ) shutil.copytree( - src=os.path.join(current_site_packages_path, "ansys", "grpc", "dpf"), - dst=os.path.join(ansys_dir, "ansys", "grpc", "dpf"), + src=current_site_packages_path / "ansys" / "grpc" / "dpf", + dst=ansys_dir / "ansys" / "grpc" / "dpf", ignore=lambda directory, contents: ["__pycache__"], ) # Find the .dist_info folder pattern = re.compile(r"^ansys_dpf_core\S*") - for p in pathlib.Path(current_site_packages_path).iterdir(): + for p in current_site_packages_path.iterdir(): if p.is_dir(): # print(p.stem) if re.search(pattern, p.stem): @@ -158,12 +158,12 @@ def update_virtual_environment_for_custom_operators( break shutil.copytree( src=dist_info_path, - dst=os.path.join(ansys_dir, dist_info_path.name), + dst=ansys_dir / dist_info_path.name, ) # Zip the files as dpf-site.zip - base_name = os.path.join(tmpdir, "ansys_dpf_core_zip") + base_name = tmpdir / "ansys_dpf_core_zip" base_dir = "." - root_dir = os.path.join(tmpdir, "ansys_dpf_core") # OK + root_dir = tmpdir / "ansys_dpf_core" # OK shutil.make_archive( base_name=base_name, root_dir=root_dir, base_dir=base_dir, format="zip" ) @@ -173,7 +173,7 @@ def update_virtual_environment_for_custom_operators( for item in original.infolist(): if "ansys" not in item.filename: archive.writestr(item, original.read(item)) - with zipfile.ZipFile(base_name + ".zip", mode="r") as original: + with zipfile.ZipFile(str(base_name) + ".zip", mode="r") as original: for item in original.infolist(): archive.writestr(item, original.read(item)) diff --git a/src/ansys/dpf/core/data_sources.py b/src/ansys/dpf/core/data_sources.py index d694f54b5a..f547f9e700 100644 --- a/src/ansys/dpf/core/data_sources.py +++ b/src/ansys/dpf/core/data_sources.py @@ -28,6 +28,7 @@ """ import os +from pathlib import Path import warnings import traceback from typing import Union @@ -142,7 +143,7 @@ def set_result_file_path(self, filepath, key=""): ['/tmp/file.rst'] """ - extension = os.path.splitext(filepath)[1] + extension = Path(filepath).suffix # Handle .res files from CFX if key == "" and extension == ".res": key = "cas" @@ -162,7 +163,7 @@ def set_result_file_path(self, filepath, key=""): def guess_result_key(filepath: str) -> str: """Guess result key for files without a file extension.""" result_keys = ["d3plot", "binout"] - base_name = os.path.basename(filepath) + base_name = Path(filepath).name # Handle files without extension for result_key in result_keys: if result_key in base_name: @@ -172,14 +173,13 @@ def guess_result_key(filepath: str) -> str: @staticmethod def guess_second_key(filepath: str) -> str: """For files with an h5 or cff extension, look for another extension.""" + + # These files usually end with .cas.h5 or .dat.h5 accepted = ["cas", "dat"] - without_ext = os.path.splitext(filepath)[0] - new_split = os.path.splitext(without_ext) + new_split = Path(filepath).suffixes new_key = "" - if len(new_split) > 1: - key = new_split[1][1:] - if key in accepted: - new_key = key + if new_split[0].strip(".") in accepted: + new_key = new_split[0].strip(".") return new_key def set_domain_result_file_path( @@ -241,9 +241,12 @@ def add_file_path(self, filepath, key="", is_domain: bool = False, domain_id=0): """ # The filename needs to be a fully qualified file name - if not os.path.dirname(filepath): + # if not os.path.dirname(filepath) + + filepath = Path(filepath) + if not filepath.parent.name: # append local path - filepath = os.path.join(os.getcwd(), os.path.basename(filepath)) + filepath = Path.cwd() / filepath.name if is_domain: if key == "": raise NotImplementedError("A key must be given when using is_domain=True.") @@ -280,9 +283,10 @@ def add_domain_file_path(self, filepath, key, domain_id): """ # The filename needs to be a fully qualified file name - if not os.path.dirname(filepath): + filepath = Path(filepath) + if not filepath.parent.name: # append local path - filepath = os.path.join(os.getcwd(), os.path.basename(filepath)) + filepath = Path.cwd() / filepath.name self._api.data_sources_add_domain_file_path_with_key_utf8( self, str(filepath), key, domain_id ) @@ -307,9 +311,10 @@ def add_file_path_for_specified_result(self, filepath, key="", result_key=""): The default is ``""``, in which case the key is found directly. """ # The filename needs to be a fully qualified file name - if not os.path.dirname(filepath): + filepath = Path(filepath) + if not filepath.parent.name: # append local path - filepath = os.path.join(os.getcwd(), os.path.basename(filepath)) + filepath = Path.cwd() / filepath.name self._api.data_sources_add_file_path_for_specified_result_utf8( self, str(filepath), key, result_key diff --git a/src/ansys/dpf/core/examples/downloads.py b/src/ansys/dpf/core/examples/downloads.py index 95fdb30fd0..53b168d145 100644 --- a/src/ansys/dpf/core/examples/downloads.py +++ b/src/ansys/dpf/core/examples/downloads.py @@ -26,6 +26,7 @@ Download example datasets from https://github.com/ansys/example-data""" import os +from pathlib import Path import urllib.request import warnings from typing import Union @@ -45,7 +46,7 @@ def delete_downloads(verbose=True): from ansys.dpf.core import LOCAL_DOWNLOADED_EXAMPLES_PATH, examples not_to_remove = [ - getattr(examples.examples, item) + Path(getattr(examples.examples, item)) for item in dir(examples.examples) if not item.startswith("_") and not item.endswith("_") @@ -53,27 +54,28 @@ def delete_downloads(verbose=True): ] not_to_remove.extend( [ - os.path.join(os.path.dirname(examples.__file__), "__init__.py"), - os.path.join(os.path.dirname(examples.__file__), "downloads.py"), - os.path.join(os.path.dirname(examples.__file__), "examples.py"), + Path(examples.__file__).parent / "__init__.py", + Path(examples.__file__).parent / "downloads.py", + Path(examples.__file__).parent / "examples.py", ] ) for root, dirs, files in os.walk(LOCAL_DOWNLOADED_EXAMPLES_PATH, topdown=False): + root = Path(root) if root not in not_to_remove: for name in files: - if not os.path.join(root, name) in not_to_remove: + file_path = root / name + if not file_path in not_to_remove: try: - os.remove(os.path.join(root, name)) + file_path.unlink() if verbose: - print(f"deleting {os.path.join(root, name)}") + print(f"deleting {file_path}") except Exception as e: - warnings.warn( - f"couldn't delete {os.path.join(root, name)} with error:\n {e.args}" - ) + warnings.warn(f"couldn't delete {file_path} with error:\n {e.args}") for root, dirs, files in os.walk(LOCAL_DOWNLOADED_EXAMPLES_PATH, topdown=False): if len(dirs) == 0 and len(files) == 0: try: - os.rmdir(root) + root = Path(root) + root.rmdir() if verbose: print(f"deleting {root}") except Exception as e: @@ -89,21 +91,22 @@ def _retrieve_file(url, filename, directory): from ansys.dpf.core import LOCAL_DOWNLOADED_EXAMPLES_PATH # First check if file has already been downloaded - local_path = os.path.join(LOCAL_DOWNLOADED_EXAMPLES_PATH, directory, filename) - local_path_no_zip = local_path.replace(".zip", "") - if os.path.isfile(local_path_no_zip) or os.path.isdir(local_path_no_zip): - return local_path_no_zip + local_examples_download_path = Path(LOCAL_DOWNLOADED_EXAMPLES_PATH) + local_path = local_examples_download_path / directory / filename + local_path_no_zip = Path(str(local_path).replace(".zip", "")) + if local_path_no_zip.is_file() or local_path_no_zip.is_dir(): + return str(local_path_no_zip) # grab the correct url retriever urlretrieve = urllib.request.urlretrieve - dirpath = os.path.dirname(local_path) - if not os.path.isdir(dirpath): - os.makedirs(dirpath, exist_ok=True) + dirpath = local_path.parent + if not dirpath.is_dir(): + dirpath.mkdir(parents=True, exist_ok=True) # Perform download _, resp = urlretrieve(url, local_path) - return local_path + return str(local_path) def _download_file(directory, filename, should_upload: bool, server, return_local_path): @@ -1999,7 +2002,7 @@ def find_distributed_msup_folder( server, return_local_path, ) - return os.path.dirname(path) + return str(Path(path).parent) def download_average_filter_plugin( @@ -2135,7 +2138,7 @@ def _retrieve_plugin( for file in file_list: EXAMPLE_FILE = GITHUB_SOURCE_URL + file operator_file_path = _retrieve_file(EXAMPLE_FILE, file, directory="python_plugins") - path = os.path.dirname( - find_files(operator_file_path, should_upload, server, return_local_path) + path = str( + Path(find_files(operator_file_path, should_upload, server, return_local_path)).parent ) return path diff --git a/src/ansys/dpf/core/examples/examples.py b/src/ansys/dpf/core/examples/examples.py index 31f1587a77..b6285df9bf 100644 --- a/src/ansys/dpf/core/examples/examples.py +++ b/src/ansys/dpf/core/examples/examples.py @@ -29,6 +29,7 @@ """ import os +from pathlib import Path from ansys.dpf.core import server as server_module from ansys.dpf.core.core import upload_file_in_tmp_folder @@ -55,7 +56,8 @@ def get_example_required_minimum_dpf_version(file: os.PathLike) -> str: in_header = False previous_line_is_note = False minimum_version_str = "0.0" - with open(file, "r") as f: + file = Path(file) + with file.open("r") as f: for line in f: if line[:3] == header_flag: if not in_header: diff --git a/src/ansys/dpf/core/misc.py b/src/ansys/dpf/core/misc.py index 2eafb2045a..c10c42a41b 100644 --- a/src/ansys/dpf/core/misc.py +++ b/src/ansys/dpf/core/misc.py @@ -26,6 +26,7 @@ import glob import os import re +from pathlib import Path from pkgutil import iter_modules from ansys.dpf.core import errors @@ -120,7 +121,7 @@ def get_ansys_path(ansys_path=None): '- or by setting it by default with the environment variable "ANSYS_DPF_PATH"' ) # parse the version to an int and check for supported - ansys_folder_name = str(ansys_path).split(os.sep)[-1] + ansys_folder_name = Path(ansys_path).parts[-1] reobj_vXYZ = re.compile("^v[0123456789]{3}$") if reobj_vXYZ.match(ansys_folder_name): # vXYZ Unified Install folder @@ -171,10 +172,10 @@ def find_ansys(): base_path = None if os.name == "nt": - base_path = os.path.join(os.environ["PROGRAMFILES"], "ANSYS INC") + base_path = Path(os.environ["PROGRAMFILES"]) / "ANSYS INC" elif os.name == "posix": - for path in ["/usr/ansys_inc", "/ansys_inc"]: - if os.path.isdir(path): + for path in [Path("/usr/ansys_inc"), Path("/ansys_inc")]: + if path.is_dir(): base_path = path else: raise OSError(f"Unsupported OS {os.name}") @@ -182,16 +183,16 @@ def find_ansys(): if base_path is None: return base_path - paths = glob.glob(os.path.join(base_path, "v*")) + paths = base_path.glob("v*") - if not paths: + if not list(paths): return None versions = {} for path in paths: - ver_str = path[-3:] + ver_str = str(path)[-3:] if is_float(ver_str): - versions[int(ver_str)] = path + versions[int(ver_str)] = str(path) return versions[max(versions.keys())] diff --git a/src/ansys/dpf/core/path_utilities.py b/src/ansys/dpf/core/path_utilities.py index 70d00ad450..d4b8f235e6 100644 --- a/src/ansys/dpf/core/path_utilities.py +++ b/src/ansys/dpf/core/path_utilities.py @@ -61,7 +61,7 @@ def join(*args, **kwargs): server = None parts = [] for a in args: - if isinstance(a, (str, Path)) and len(a) > 0: + if isinstance(a, (str, Path)) and len(str(a)) > 0: parts.append(str(a)) elif isinstance(a, ansys.dpf.core.server_types.LegacyGrpcServer): server = a @@ -73,7 +73,7 @@ def join(*args, **kwargs): if ansys.dpf.core.server_types.RUNNING_DOCKER.use_docker: current_os = "posix" else: - return os.path.join(*args) + return str(Path(args[0]).joinpath(*args[1:])) else: current_os = server.os diff --git a/src/ansys/dpf/core/plotter.py b/src/ansys/dpf/core/plotter.py index cdf4ad5457..7992358291 100644 --- a/src/ansys/dpf/core/plotter.py +++ b/src/ansys/dpf/core/plotter.py @@ -35,6 +35,7 @@ import sys import numpy as np import warnings +from pathlib import Path from typing import TYPE_CHECKING, List, Union from ansys import dpf @@ -1019,7 +1020,7 @@ def _plot_contour_using_vtk_file(self, fields_container, notebook=None): # mesh_provider.inputs.data_sources.connect(self._evaluator._model.metadata.data_sources) # create a temporary file at the default temp directory - path = os.path.join(tempfile.gettempdir(), "dpf_temp_hokflb2j9s.vtk") + path = Path(tempfile.gettempdir()) / "dpf_temp_hokflb2j9s.vtk" vtk_export = dpf.core.Operator("vtk_export") vtk_export.inputs.mesh.connect(self._mesh) @@ -1028,8 +1029,8 @@ def _plot_contour_using_vtk_file(self, fields_container, notebook=None): vtk_export.run() grid = pv.read(path) - if os.path.exists(path): - os.remove(path) + if path.exists(): + path.unlink() names = grid.array_names field_name = fields_container[0].name diff --git a/src/ansys/dpf/core/plugins.py b/src/ansys/dpf/core/plugins.py index e7d728b948..4c2a125c63 100644 --- a/src/ansys/dpf/core/plugins.py +++ b/src/ansys/dpf/core/plugins.py @@ -28,6 +28,7 @@ """ import os.path +from pathlib import Path try: import importlib.metadata as importlib_metadata @@ -60,14 +61,14 @@ def load_plugin_on_server(plugin, server=None, symbol="load_operators", generate # Get the path to the plugin from the package installation if len([p for p in importlib_metadata.files(plugin) if "__init__.py" in str(p)]) > 0: file_path = [p for p in importlib_metadata.files(plugin) if "__init__.py" in str(p)][0] - plugin_path = str(os.path.dirname(file_path.locate())) + plugin_path = str(file_path.locate().parent) # For some reason the "locate()" function returns a path with src doubled - plugin_path = plugin_path.replace("src" + os.path.sep + "src", "src") + plugin_path = Path(plugin_path.replace("src" + os.path.sep + "src", "src")) elif len([p for p in importlib_metadata.files(plugin) if ".pth" in str(p)]) > 0: path_file = [p for p in importlib_metadata.files(plugin) if ".pth" in str(p)][0].locate() - with open(path_file, "r") as file: - plugin_path = file.readline()[:-1] - plugin_path = os.path.join(plugin_path, "ansys", "dpf", "plugins", plugin_name) + with path_file.open("r") as file: + plugin_path = Path(file.readline()[:-1]) + plugin_path = plugin_path / "ansys" / "dpf" / "plugins" / plugin_name else: raise ModuleNotFoundError(f"Could not locate files for plugin {plugin}") @@ -93,7 +94,7 @@ def load_plugin_on_server(plugin, server=None, symbol="load_operators", generate # Upload xml file for the plugin _ = dpf.upload_files_in_folder( target_xml_path, - os.path.join(plugin_path, os.pardir), + plugin_path.parent, specific_extension=".xml", server=server, ) diff --git a/src/ansys/dpf/core/server_types.py b/src/ansys/dpf/core/server_types.py index a3d1f7d904..c1901f7f45 100644 --- a/src/ansys/dpf/core/server_types.py +++ b/src/ansys/dpf/core/server_types.py @@ -41,6 +41,7 @@ from abc import ABC import ctypes from typing import TYPE_CHECKING, Union +from pathlib import Path import psutil @@ -68,14 +69,14 @@ def _get_dll_path(name, ansys_path=None): """Helper function to get the right dll path for Linux or Windows""" ISPOSIX = os.name == "posix" - ANSYS_INSTALL = core.misc.get_ansys_path(ansys_path) + ANSYS_INSTALL = Path(core.misc.get_ansys_path(ansys_path)) api_path = load_api._get_path_in_install() if api_path is None: raise ImportError(f"Could not find API path in install.") - SUB_FOLDERS = os.path.join(ANSYS_INSTALL, api_path) + SUB_FOLDERS = ANSYS_INSTALL / api_path if ISPOSIX: name = "lib" + name - return os.path.join(SUB_FOLDERS, name) + return SUB_FOLDERS / name def check_valid_ip(ip): @@ -92,18 +93,19 @@ def check_valid_ip(ip): def _verify_ansys_path_is_valid(ansys_path, executable, path_in_install=None): if path_in_install is None: path_in_install = load_api._get_path_in_install() - if os.path.isdir(f"{ansys_path}/{path_in_install}"): - dpf_run_dir = f"{ansys_path}/{path_in_install}" + ansys_path = Path(ansys_path) + if ansys_path.joinpath(path_in_install).is_dir(): + dpf_run_dir = ansys_path / path_in_install else: - dpf_run_dir = f"{ansys_path}" - if not os.path.isdir(dpf_run_dir): + dpf_run_dir = ansys_path + if not dpf_run_dir.is_dir(): raise NotADirectoryError( f'Invalid ansys path at "{ansys_path}". ' "Unable to locate the directory containing DPF at " f'"{dpf_run_dir}"' ) else: - if not os.path.exists(os.path.join(dpf_run_dir, executable)): + if not dpf_run_dir.joinpath(executable).exists(): raise FileNotFoundError( f'DPF executable not found at "{dpf_run_dir}". ' f'Unable to locate the executable "{executable}"' @@ -117,7 +119,7 @@ def _run_launch_server_process( bShell = False if docker_config.use_docker: docker_server_port = int(os.environ.get("DOCKER_SERVER_PORT", port)) - dpf_run_dir = os.getcwd() + dpf_run_dir = Path.cwd() if os.name == "posix": bShell = True run_cmd = docker_config.docker_run_cmd_command(docker_server_port, port) @@ -135,7 +137,7 @@ def _run_launch_server_process( path_in_install = load_api._get_path_in_install(internal_folder="bin") dpf_run_dir = _verify_ansys_path_is_valid(ansys_path, executable, path_in_install) - old_dir = os.getcwd() + old_dir = Path.cwd() os.chdir(dpf_run_dir) if not bShell: process = subprocess.Popen(run_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -940,11 +942,11 @@ def __init__( name = "DataProcessingCore" path = _get_dll_path(name, ansys_path) try: - data_processing_core_load_api(path, "common") + data_processing_core_load_api(str(path), "common") except Exception as e: - if not os.path.isdir(os.path.dirname(path)): + if not path.parent.is_dir(): raise NotADirectoryError( - f"DPF directory not found at {os.path.dirname(path)}" + f"DPF directory not found at {path.parent}" f"Unable to locate the following file: {path}" ) raise e diff --git a/src/ansys/dpf/core/workflow.py b/src/ansys/dpf/core/workflow.py index 634bbfe581..b0227362a4 100644 --- a/src/ansys/dpf/core/workflow.py +++ b/src/ansys/dpf/core/workflow.py @@ -31,6 +31,7 @@ import os import traceback import warnings +from pathlib import Path from enum import Enum from typing import Union @@ -942,11 +943,11 @@ def view( name = title if save_as: - dot_path = os.path.splitext(str(save_as))[0] + ".dot" - image_path = save_as + image_path = Path(save_as) + dot_path = image_path.parent / image_path.stem / ".dot" else: - dot_path = os.path.join(os.getcwd(), f"{name}.dot") - image_path = os.path.join(os.getcwd(), f"{name}.png") + image_path = Path.cwd() / f"{name}.png" + dot_path = image_path.parent / image_path.stem / ".dot" # Create graphviz file of workflow self.to_graphviz(dot_path) @@ -956,7 +957,7 @@ def view( # View workflow graphviz.view(filepath=image_path) if not keep_dot_file: - os.remove(dot_path) + dot_path.unlink() return image_path def to_graphviz(self, path: Union[os.PathLike, str]): diff --git a/tests/conftest.py b/tests/conftest.py index c2e0f6ab95..6dc7353e4d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -28,6 +28,7 @@ import os import functools +from pathlib import Path import psutil import pytest @@ -54,10 +55,10 @@ def _get_test_files_directory(): if local_test_repo is False: - test_path = os.path.join(os.path.dirname(os.path.abspath(__file__))) - return os.path.join(test_path, os.pardir, "tests", "testfiles") + test_path = Path(__file__).parent + return str(test_path.parent / "tests" / "testfiles") else: - return os.path.join(os.environ["AWP_UNIT_TEST_FILES"], "python") + return str(Path(os.environ["AWP_UNIT_TEST_FILES"]).joinpath("python")) if os.name == "posix": @@ -94,11 +95,11 @@ def resolve_test_file(basename, additional_path="", is_in_examples=None): if is_in_examples: return examples.find_files(getattr(examples, is_in_examples)) else: - test_files_path = _get_test_files_directory() - filename = os.path.join(test_files_path, additional_path, basename) - if not os.path.isfile(filename): + test_files_path = Path(_get_test_files_directory()) + filename = test_files_path.joinpath(additional_path, basename) + if not filename.is_file(): raise FileNotFoundError(f"Unable to locate {basename} at {test_files_path}") - return examples.find_files(filename) + return examples.find_files(str(filename)) @pytest.fixture() diff --git a/tests/entry/conftest.py b/tests/entry/conftest.py index 027c1ca64e..4ae3a6041f 100644 --- a/tests/entry/conftest.py +++ b/tests/entry/conftest.py @@ -29,6 +29,7 @@ """ import os +from pathlib import Path import functools import pytest @@ -54,10 +55,10 @@ def _get_test_files_directory(): if local_test_repo is False: - test_path = os.path.join(os.path.dirname(os.path.abspath(__file__))) - return os.path.join(test_path, os.pardir, "tests", "testfiles") + test_path = Path(__file__).parent + return str(test_path.parent / "testfiles") else: - return os.path.join(os.environ["AWP_UNIT_TEST_FILES"], "python") + return str(Path(os.environ["AWP_UNIT_TEST_FILES"]).joinpath("python")) if os.name == "posix": diff --git a/tests/test_animation.py b/tests/test_animation.py index d1f7185b3e..4161bb9845 100644 --- a/tests/test_animation.py +++ b/tests/test_animation.py @@ -21,6 +21,7 @@ # SOFTWARE. import os +from pathlib import Path import pytest @@ -43,8 +44,8 @@ def remove_gifs(request): """Remove GIF once finished.""" def remove_gif(): - if os.path.exists(os.path.join(os.getcwd(), gif_name)): - os.remove(os.path.join(os.getcwd(), gif_name)) + if Path.cwd().joinpath(gif_name).exists(): + Path.cwd().joinpath(gif_name).unlink() request.addfinalizer(remove_gif) diff --git a/tests/test_animator.py b/tests/test_animator.py index d0fada4cd8..c5ef1f5fab 100644 --- a/tests/test_animator.py +++ b/tests/test_animator.py @@ -21,6 +21,7 @@ # SOFTWARE. import os +from pathlib import Path import pytest @@ -42,8 +43,8 @@ def remove_gifs(request): """Remove GIF once finished.""" def remove_gif(): - if os.path.exists(os.path.join(os.getcwd(), gif_name)): - os.remove(os.path.join(os.getcwd(), gif_name)) + if Path.cwd().joinpath(gif_name).exists(): + Path.cwd().joinpath(gif_name).unlink() request.addfinalizer(remove_gif) @@ -250,5 +251,5 @@ def test_animator_animate_fields_container_cpos(remove_gifs, displacement_fields off_screen=True, show_axes=True, ) - assert os.path.isfile(gif_name) - assert os.path.getsize(gif_name) > 6000 + assert Path(gif_name).is_file() + assert Path(gif_name).stat().st_size > 6000 diff --git a/tests/test_code_docstrings.py b/tests/test_code_docstrings.py index 5b518a0d34..af3287e795 100644 --- a/tests/test_code_docstrings.py +++ b/tests/test_code_docstrings.py @@ -28,7 +28,7 @@ import doctest import os -import pathlib +from pathlib import Path import pytest @@ -36,14 +36,13 @@ @pytest.mark.skipif(True, reason="examples are created for windows") def test_doctest_allfiles(): directory = r"../ansys/dpf/core" - actual_path = pathlib.Path(__file__).parent.absolute() - # actual_path = os.getcwd() + actual_path = Path(__file__).parent.absolute() print(actual_path) - for filename in os.listdir(os.path.join(actual_path, directory)): + for filename in os.listdir(actual_path / directory): if filename.endswith(".py"): - path = os.path.join(directory, filename) + path = Path(directory) / filename print(path) - doctest.testfile(path, verbose=True, raise_on_error=True) + doctest.testfile(str(path), verbose=True, raise_on_error=True) else: continue @@ -51,21 +50,21 @@ def test_doctest_allfiles(): @pytest.mark.skipif(True, reason="examples are created for windows") def test_doctest_allexamples(): directory = r"../examples" - actual_path = pathlib.Path(__file__).parent.absolute() + actual_path = Path(__file__).parent.absolute() handled_files = [] - for root, subdirectories, files in os.walk(os.path.join(actual_path, directory)): + for root, subdirectories, _ in os.walk(actual_path / directory): for subdirectory in subdirectories: - subdir = os.path.join(root, subdirectory) + subdir = Path(root) / subdirectory print(subdir) for filename in os.listdir(subdir): if filename.endswith(".py"): - path = os.path.join(subdir, filename) - if ".ipynb_checkpoints" in path: + path = subdir / filename + if ".ipynb_checkpoints" in str(path): continue print(path) - handled_files.append(path) + handled_files.append(str(path)) exec( - open(path, mode="r", encoding="utf8").read(), + path.read_text(encoding="utf-8"), globals(), globals(), ) diff --git a/tests/test_codegeneration.py b/tests/test_codegeneration.py index a673928560..a0e2b12543 100644 --- a/tests/test_codegeneration.py +++ b/tests/test_codegeneration.py @@ -24,6 +24,7 @@ import os import copy import tempfile +from pathlib import Path import ansys.grpc.dpf import numpy as np @@ -153,7 +154,7 @@ def test_operator_any_input(allkindofcomplexity): serialization.inputs.any_input3.connect(u.outputs) # create a temporary file at the default temp directory - path = os.path.join(tempfile.gettempdir(), "dpf_temp_ser.txt") + path = str(Path(tempfile.gettempdir()) / "dpf_temp_ser.txt") if not core.SERVER.local_server: core.upload_file_in_tmp_folder(examples.find_static_rst(return_local_path=True)) path = core.path_utilities.join(core.make_tmp_dir_server(), "dpf_temp_ser.txt") @@ -171,8 +172,9 @@ def test_operator_any_input(allkindofcomplexity): assert hasattr(fc, "outputs") == False - if os.path.exists(path): - os.remove(path) + path = Path(path) + if path.exists(): + path.unlink() def test_create_op_with_inputs(plate_msup): diff --git a/tests/test_data_tree.py b/tests/test_data_tree.py index 08a660c31c..10416d38c4 100644 --- a/tests/test_data_tree.py +++ b/tests/test_data_tree.py @@ -24,6 +24,7 @@ import os import pytest import conftest +from pathlib import Path @conftest.raises_for_servers_version_under("4.0") @@ -174,16 +175,16 @@ def test_write_to_file_data_tree(tmpdir, server_type): to_fill.list_int = [1, 2] to_fill.list_double = [1.5, 2.5] to_fill.list_string = ["hello", "bye"] - data_tree.write_to_txt(os.path.join(tmpdir, "file.txt")) - data_tree = dpf.DataTree.read_from_txt(os.path.join(tmpdir, "file.txt"), server=server_type) + data_tree.write_to_txt(str(Path(tmpdir) / "file.txt")) + data_tree = dpf.DataTree.read_from_txt(str(Path(tmpdir) / "file.txt"), server=server_type) assert data_tree.has("int") assert data_tree.has("double") assert data_tree.has("string") assert data_tree.has("list_int") assert data_tree.has("list_double") assert data_tree.has("list_string") - data_tree.write_to_json(os.path.join(tmpdir, "file.json")) - data_tree = dpf.DataTree.read_from_json(os.path.join(tmpdir, "file.json"), server=server_type) + data_tree.write_to_json(str(Path(tmpdir) / "file.json")) + data_tree = dpf.DataTree.read_from_json(str(Path(tmpdir) / "file.json"), server=server_type) assert data_tree.has("int") assert data_tree.has("double") assert data_tree.has("string") @@ -207,19 +208,17 @@ def test_write_to_file_remote_data_tree(tmpdir, server_clayer_remote_process): to_fill.list_int = [1, 2] to_fill.list_double = [1.5, 2.5] to_fill.list_string = ["hello", "bye"] - data_tree.write_to_txt(os.path.join(tmpdir, "file.txt")) - data_tree = dpf.DataTree.read_from_txt( - os.path.join(tmpdir, "file.txt"), server=server_connected - ) + data_tree.write_to_txt(str(Path(tmpdir) / "file.txt")) + data_tree = dpf.DataTree.read_from_txt(str(Path(tmpdir) / "file.txt"), server=server_connected) assert data_tree.has("int") assert data_tree.has("double") assert data_tree.has("string") assert data_tree.has("list_int") assert data_tree.has("list_double") assert data_tree.has("list_string") - data_tree.write_to_json(os.path.join(tmpdir, "file.json")) + data_tree.write_to_json(str(Path(tmpdir) / "file.json")) data_tree = dpf.DataTree.read_from_json( - os.path.join(tmpdir, "file.json"), server=server_connected + str(Path(tmpdir) / "file.json"), server=server_connected ) assert data_tree.has("int") assert data_tree.has("double") diff --git a/tests/test_examples.py b/tests/test_examples.py index b7989a0a0f..d5764adc91 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -22,7 +22,7 @@ """Verify all examples can be accessed or downloaded""" -import os.path +from pathlib import Path import pytest @@ -152,12 +152,12 @@ def test_find_examples(example, server_type_remote_process): def test_delete_downloaded_files(): - path = examples.download_multi_stage_cyclic_result(return_local_path=True) - assert os.path.exists(path) + path = Path(examples.download_multi_stage_cyclic_result(return_local_path=True)) + assert path.exists() examples.delete_downloads(verbose=False) - assert not os.path.exists(path) - path = examples.download_multi_stage_cyclic_result(return_local_path=True) - assert os.path.exists(path) + assert not path.exists() + path = Path(examples.download_multi_stage_cyclic_result(return_local_path=True)) + assert path.exists() def test_get_example_required_minimum_dpf_version(tmp_path): @@ -197,12 +197,12 @@ def test_get_example_required_minimum_dpf_version(tmp_path): def test_download_easy_statistics(): - assert os.path.exists(examples.download_easy_statistics(return_local_path=True)) + assert Path(examples.download_easy_statistics(return_local_path=True)).exists() def test_download_average_filter_plugin(): - assert os.path.exists(examples.download_average_filter_plugin(return_local_path=True)) + assert Path(examples.download_average_filter_plugin(return_local_path=True)).exists() def test_download_gltf_plugin(): - assert os.path.exists(examples.download_gltf_plugin(return_local_path=True)) + assert Path(examples.download_gltf_plugin(return_local_path=True)).exists() diff --git a/tests/test_launcher.py b/tests/test_launcher.py index 293d4f165d..be99ea4171 100644 --- a/tests/test_launcher.py +++ b/tests/test_launcher.py @@ -21,6 +21,7 @@ # SOFTWARE. import os +from pathlib import Path import pytest import psutil @@ -192,10 +193,10 @@ def test_start_local_wrong_ansys_path(self, server_config): def test_launch_server_full_path(self, server_config): ansys_path = core.misc.get_ansys_path() if os.name == "nt": - path = os.path.join(ansys_path, "aisol", "bin", "winx64") + path = Path(ansys_path) / "aisol" / "bin" / "winx64" else: if server_config.protocol == core.server_factory.CommunicationProtocols.InProcess: - path = os.path.join(ansys_path, "aisol", "dll", "linx64") + path = Path(ansys_path) / "aisol" / "dll" / "linx64" elif ( server_config.protocol == core.server_factory.CommunicationProtocols.gRPC and server_config.legacy is False @@ -204,11 +205,13 @@ def test_launch_server_full_path(self, server_config): # Ans.Dpf.Grpc.sh reside in two different folders return else: - path = os.path.join(ansys_path, "aisol", "bin", "linx64") + path = Path(ansys_path) / "aisol" / "bin" / "linx64" # print("trying to launch on ", path) # print(os.listdir(path)) - server = core.start_local_server(as_global=False, ansys_path=path, config=server_config) + server = core.start_local_server( + as_global=False, ansys_path=str(path), config=server_config + ) assert "server_port" in server.info @@ -219,7 +222,7 @@ def test_start_local_failed_executable(remote_config_server_type): with pytest.raises(FileNotFoundError): path = Path(get_ansys_path()).parent.absolute() - core.start_local_server(ansys_path=path, config=remote_config_server_type) + core.start_local_server(ansys_path=str(path), config=remote_config_server_type) @pytest.mark.skipif(not running_docker, reason="Checks docker start server") diff --git a/tests/test_operator.py b/tests/test_operator.py index b4c2211cf6..55e59c177d 100644 --- a/tests/test_operator.py +++ b/tests/test_operator.py @@ -25,6 +25,7 @@ import shutil import types import weakref +from pathlib import Path import numpy as np import pytest @@ -449,8 +450,8 @@ def find_mapdl(): try: path = get_ansys_path() if dpf.core.SERVER.os == "nt": - exe = os.path.join(path, "ansys", "bin", "winx64", "ANSYS.exe") - return os.path.isfile(exe) + exe = Path(path).joinpath("ansys", "bin", "winx64", "ANSYS.exe") + return exe.is_file() else: return False @@ -468,8 +469,8 @@ def test_inputs_outputs_datasources_operator(cyclic_ds, server_type): dsout = op.outputs.data_sources() assert dsout is not None assert dsout.result_key == "rst" - path = os.path.join(dsout.result_files[0]) - shutil.rmtree(os.path.dirname(path)) + path = Path(dsout.result_files[0]) + shutil.rmtree(path.parent) def test_subresults_operator(cyclic_lin_rst, cyclic_ds): diff --git a/tests/test_plotter.py b/tests/test_plotter.py index 577bda5fb9..ad8d41d7da 100644 --- a/tests/test_plotter.py +++ b/tests/test_plotter.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import os +from pathlib import Path import pytest @@ -42,8 +42,8 @@ def remove_picture(picture): - if os.path.exists(os.path.join(os.getcwd(), picture)): - os.remove(os.path.join(os.getcwd(), picture)) + if Path.cwd().joinpath(picture).exists(): + Path.cwd().joinpath(picture).unlink() @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") @@ -53,7 +53,7 @@ def test_plotter_on_model(plate_msup): picture = "model_plot.png" remove_picture(picture) model.plot(off_screen=True, screenshot=picture) - assert os.path.exists(os.path.join(os.getcwd(), picture)) + assert Path.cwd().joinpath(picture).exists() remove_picture(picture) @@ -155,7 +155,7 @@ def test_plot_fieldscontainer_on_mesh(allkindofcomplexity): picture = "mesh_plot.png" remove_picture(picture) mesh.plot(fc, off_screen=True, screenshot=picture) - assert os.path.exists(os.path.join(os.getcwd(), picture)) + assert Path.cwd().joinpath(picture).exists() remove_picture(picture) @@ -193,7 +193,7 @@ def test_field_nodal_plot(allkindofcomplexity): picture = "field_plot.png" remove_picture(picture) f.plot(off_screen=True, screenshot=picture) - assert os.path.exists(os.path.join(os.getcwd(), picture)) + assert Path.cwd().joinpath(picture).exists() remove_picture(picture) @@ -372,7 +372,7 @@ def test_plot_meshes_container_1(multishells): picture = "meshes_cont_plot.png" remove_picture(picture) meshes_cont.plot(disp_fc, off_screen=True, screenshot=picture) - assert os.path.exists(os.path.join(os.getcwd(), picture)) + assert Path.cwd().joinpath(picture).exists() remove_picture(picture) @@ -656,7 +656,7 @@ def test_plot_chart(allkindofcomplexity): picture = "plot_chart.png" remove_picture(picture) plot_chart(new_fields_container, off_screen=True, screenshot=picture) - assert os.path.exists(os.path.join(os.getcwd(), picture)) + assert Path.cwd().joinpath(picture).exists() remove_picture(picture) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 55b2f34a86..4b2b7090db 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import os.path +from pathlib import Path import pytest @@ -124,7 +124,7 @@ def test_vtk(server_type, tmpdir): u = model.operator("U") op.inputs.fields1.connect(u) op.inputs.mesh.connect(model.metadata.mesh_provider) - op.inputs.directory.connect(os.path.dirname(rst_file)) + op.inputs.directory.connect(str(Path(rst_file).parent)) out_path = op.eval() # assert out_path.result_files is not [] # try: diff --git a/tests/test_python_plugins.py b/tests/test_python_plugins.py index 815555fda1..f95aad8108 100644 --- a/tests/test_python_plugins.py +++ b/tests/test_python_plugins.py @@ -22,6 +22,7 @@ import pytest import os +from pathlib import Path import platform import numpy as np from conftest import SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_4_0 @@ -56,7 +57,7 @@ @pytest.fixture(scope="module") def load_all_types_plugin(testfiles_dir): return dpf.load_library( - dpf.path_utilities.to_server_os(os.path.join(testfiles_dir, "pythonPlugins", "all_types")), + dpf.path_utilities.to_server_os(Path(testfiles_dir) / "pythonPlugins" / "all_types"), "py_test_types", "load_operators", ) @@ -65,7 +66,7 @@ def load_all_types_plugin(testfiles_dir): def load_all_types_plugin_with_serv(my_server, testfiles_dir): return dpf.load_library( dpf.path_utilities.to_server_os( - os.path.join(testfiles_dir, "pythonPlugins", "all_types"), my_server + Path(testfiles_dir) / "pythonPlugins" / "all_types", my_server ), "py_test_types", "load_operators", @@ -242,7 +243,7 @@ def test_generic_data_container(server_clayer_remote_process, testfiles_dir): def test_syntax_error(server_type_remote_process, testfiles_dir): dpf.load_library( dpf.path_utilities.to_server_os( - os.path.join(testfiles_dir, "pythonPlugins", "syntax_error_plugin"), + Path(testfiles_dir) / "pythonPlugins" / "syntax_error_plugin", server_type_remote_process, ), "py_raising", @@ -381,7 +382,7 @@ def test_create_properties_specification(server_in_process): def test_custom_op_with_spec(server_type_remote_process, testfiles_dir): dpf.load_library( dpf.path_utilities.to_server_os( - os.path.join(testfiles_dir, "pythonPlugins"), server_type_remote_process + Path(testfiles_dir) / "pythonPlugins", server_type_remote_process ), "py_operator_with_spec", "load_operators", diff --git a/tests/test_service.py b/tests/test_service.py index f654323558..c128a480ed 100644 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -21,6 +21,7 @@ # SOFTWARE. import os +from pathlib import Path import pytest import conftest @@ -77,7 +78,7 @@ def test_loadplugin(server_type): reason="Random SEGFAULT in the GitHub pipeline for 3.7-8 on Windows", ) def test_upload_download(tmpdir, server_type_remote_process): - tmpdir = str(tmpdir) + tmpdir = Path(tmpdir) file = dpf.core.upload_file_in_tmp_folder( examples.download_all_kinds_of_complexity(return_local_path=True), server=server_type_remote_process, @@ -91,17 +92,14 @@ def test_upload_download(tmpdir, server_type_remote_process): fielddef = f.field_definition assert fielddef.unit == "Pa" - dir = os.path.dirname(file) - vtk_path = os.path.join(dir, "file.vtk") + vtk_path = Path(file).parent / "file.vtk" vtk = dpf.core.operators.serialization.vtk_export( - file_path=vtk_path, fields1=fcOut, server=server_type_remote_process + file_path=str(vtk_path), fields1=fcOut, server=server_type_remote_process ) vtk.run() - dpf.core.download_file( - vtk_path, os.path.join(tmpdir, "file.vtk"), server=server_type_remote_process - ) - assert os.path.exists(os.path.join(tmpdir, "file.vtk")) + dpf.core.download_file(vtk_path, str(tmpdir / "file.vtk"), server=server_type_remote_process) + assert tmpdir.joinpath("file.vtk").exists() @pytest.mark.skipif(running_docker, reason="Path hidden within docker container") @@ -114,18 +112,18 @@ def test_download_folder( ) file = dpf.core.upload_file_in_tmp_folder(plate_msup, server=server_type_remote_process) file = dpf.core.upload_file_in_tmp_folder(multishells, server=server_type_remote_process) - parent_path = os.path.dirname(file) + parent_path = str(Path(file).parent) dpf.core.download_files_in_folder(parent_path, tmpdir, server=server_type_remote_process) import ntpath - assert os.path.exists(os.path.join(tmpdir, ntpath.basename(allkindofcomplexity))) - assert os.path.exists(os.path.join(tmpdir, ntpath.basename(plate_msup))) - assert os.path.exists(os.path.join(tmpdir, ntpath.basename(multishells))) + assert Path(tmpdir).joinpath(ntpath.basename(allkindofcomplexity)).exists() + assert Path(tmpdir).joinpath(ntpath.basename(plate_msup)).exists() + assert Path(tmpdir).joinpath(ntpath.basename(multishells)).exists() @pytest.mark.skipif(running_docker, reason="Path hidden within docker container") def test_download_with_subdir(multishells, tmpdir, server_type_remote_process): - tmpdir = str(tmpdir) + tmpdir = Path(tmpdir) file = dpf.core.upload_file_in_tmp_folder(multishells, server=server_type_remote_process) base = dpf.core.BaseService(server=server_type_remote_process) @@ -134,56 +132,56 @@ def test_download_with_subdir(multishells, tmpdir, server_type_remote_process): import ntpath filename = ntpath.basename(file) - parent_path = os.path.dirname(file) + parent_path = str(Path(file).parent) to_server_path = parent_path + separator + "subdir" + separator + filename subdir_filepath = dpf.core.upload_file(file, to_server_path, server=server_type_remote_process) folder = parent_path - out = dpf.core.download_files_in_folder(folder, tmpdir, server=server_type_remote_process) - p1 = os.path.join(tmpdir, filename) - p2 = os.path.join(tmpdir, "subdir", filename) + _ = dpf.core.download_files_in_folder(folder, str(tmpdir), server=server_type_remote_process) + p1 = tmpdir / filename + p2 = tmpdir / "subdir" / filename # p1 = tmpdir + "/" + filename # p2 = tmpdir + "/subdir/" + filename - assert os.path.exists(p1) - assert os.path.exists(p2) + assert p1.exists() + assert p2.exists() @pytest.mark.skipif(running_docker, reason="Path hidden within docker container") def test_downloadinfolder_uploadinfolder(multishells, tmpdir, server_type_remote_process): - tmpdir = str(tmpdir) + tmpdir = Path(tmpdir) base = dpf.core.BaseService(server=server_type_remote_process) # create in tmpdir some architecture with subfolder in subfolder - path1 = os.path.join(tmpdir, os.path.basename(multishells)) - path2 = os.path.join(tmpdir, "subdirA", os.path.basename(multishells)) - path4 = os.path.join(tmpdir, "subdirB", os.path.basename(multishells)) + path1 = tmpdir / Path(multishells).name + path2 = tmpdir / "subdirA" / Path(multishells).name + path4 = tmpdir / "subdirB" / Path(multishells).name from shutil import copyfile copyfile(multishells, path1) - os.mkdir(os.path.join(tmpdir, "subdirA")) + tmpdir.joinpath("subdirA").mkdir() copyfile(multishells, path2) - os.mkdir(os.path.join(tmpdir, "subdirB")) + tmpdir.joinpath("subdirB").mkdir() copyfile(multishells, path4) # upload it TARGET_PATH = base.make_tmp_dir_server() dpf.core.upload_files_in_folder( to_server_folder_path=TARGET_PATH, - client_folder_path=tmpdir, + client_folder_path=str(tmpdir), specific_extension="rst", server=server_type_remote_process, ) # download it - new_tmpdir = os.path.join(tmpdir, "my_tmp_dir") - os.mkdir(new_tmpdir) + new_tmpdir = tmpdir / "my_tmp_dir" + new_tmpdir.mkdir() out = dpf.core.download_files_in_folder( - TARGET_PATH, new_tmpdir, server=server_type_remote_process + TARGET_PATH, str(new_tmpdir), server=server_type_remote_process ) # check if the architecture of the download is ok - path1_check = os.path.join(new_tmpdir, os.path.basename(multishells)) - path2_check = os.path.join(new_tmpdir, "subdirA", os.path.basename(multishells)) - path4_check = os.path.join(new_tmpdir, "subdirB", os.path.basename(multishells)) - assert os.path.exists(path1_check) - assert os.path.exists(path2_check) - assert os.path.exists(path4_check) + path1_check = new_tmpdir / Path(multishells).name + path2_check = new_tmpdir / "subdirA" / Path(multishells).name + path4_check = new_tmpdir / "subdirB" / Path(multishells).name + assert path1_check.exists() + assert path2_check.exists() + assert path4_check.exists() # clean # os.remove(os.path.join(tmpdir, "tmpdir")) # os.remove(os.path.join(tmpdir, "subdirA")) @@ -243,18 +241,18 @@ def test_uploadinfolder_emptyfolder(tmpdir, server_type_remote_process): def test_load_plugin_correctly(server_type): from ansys.dpf import core as dpf - actual_path = os.path.dirname(pkgutil.get_loader("ansys.dpf.core").path) + actual_path = Path(pkgutil.get_loader("ansys.dpf.core").path).parent base = dpf.BaseService(server=server_type) if server_type.os == "nt": base.load_library("Ans.Dpf.Math.dll", "math_operators", generate_operators=True) - t = os.path.getmtime(os.path.join(actual_path, r"operators/math/fft_eval.py")) + t = actual_path.joinpath("operators/math/fft_eval.py").stat().st_mtime assert datetime.datetime.fromtimestamp(t).date() == datetime.datetime.today().date() else: base.load_library("libAns.Dpf.Math.so", "math_operators") - exists = os.path.exists(os.path.join(actual_path, r"operators/fft_eval.py")) + exists = actual_path.joinpath("operators/fft_eval.py").exists() assert not exists - num_lines = sum(1 for line in open(os.path.join(actual_path, r"operators/math/__init__.py"))) + num_lines = sum(1 for line in actual_path.joinpath("operators/math/__init__.py").open()) assert num_lines >= 11 @@ -267,18 +265,16 @@ def test_load_plugin_correctly_remote(): server.external_ip, server.external_port, as_global=False ) - actual_path = os.path.dirname(pkgutil.get_loader("ansys.dpf.core").path) + actual_path = Path(pkgutil.get_loader("ansys.dpf.core").path).parent if server.os == "posix": dpf.load_library("libAns.Dpf.Math.so", "math_operators", server=server_connected) else: dpf.load_library("Ans.Dpf.Math.dll", "math_operators", server=server_connected) - t = os.path.getmtime(os.path.join(actual_path, r"operators/math/fft_eval.py")) + t = actual_path.joinpath("operators/math/fft_eval.py").stat().st_mtime assert datetime.datetime.fromtimestamp(t).date() == datetime.datetime.today().date() - actual_path = os.path.dirname(pkgutil.get_loader("ansys.dpf.core").path) - - assert os.path.exists(os.path.join(actual_path, r"operators/math/fft_eval.py")) + assert actual_path.joinpath("operators/math/fft_eval.py").exists() def test_dpf_join(server_type): @@ -320,7 +316,7 @@ def test_load_api_without_awp_root(restore_awp_root): assert serv._client_api_path is not None assert serv._grpc_client_path is not None - dpf_inner_path = os.path.join("ansys", "dpf", "gatebin") + dpf_inner_path = str(Path("ansys") / "dpf" / "gatebin") assert dpf_inner_path in serv._client_api_path assert dpf_inner_path in serv._grpc_client_path @@ -339,7 +335,7 @@ def test_load_api_with_awp_root(): assert serv_2._client_api_path is not None assert serv_2._grpc_client_path is not None - dpf_inner_path = os.path.join("ansys", "dpf", "gatebin") + dpf_inner_path = str(Path("ansys") / "dpf" / "gatebin") assert dpf_inner_path in serv_2._client_api_path assert dpf_inner_path in serv_2._grpc_client_path @@ -366,7 +362,7 @@ def test_load_api_with_awp_root_2(): assert serv._client_api_path is not None assert serv._grpc_client_path is not None - dpf_inner_path = os.path.join("ansys", "dpf", "gatebin") + dpf_inner_path = str(Path("ansys") / "dpf" / "gatebin") assert dpf_inner_path in serv._client_api_path assert dpf_inner_path in serv._grpc_client_path @@ -421,9 +417,9 @@ def test_load_api_with_awp_root_no_gatebin(): assert serv_2._grpc_client_path is not None ISPOSIX = os.name == "posix" if not ISPOSIX: - dpf_inner_path = os.path.join("aisol", "bin", "winx64") + dpf_inner_path = str(Path("aisol") / "bin" / "winx64") else: - dpf_inner_path = os.path.join("aisol", "dll", "linx64") + dpf_inner_path = str(Path("aisol") / "dll" / "linx64") assert dpf_inner_path in serv_2._client_api_path assert dpf_inner_path in serv_2._grpc_client_path @@ -449,9 +445,9 @@ def test_load_api_with_awp_root_2_no_gatebin(): assert serv._grpc_client_path is not None ISPOSIX = os.name == "posix" if not ISPOSIX: - dpf_inner_path = os.path.join("aisol", "bin", "winx64") + dpf_inner_path = str(Path("aisol") / "bin" / "winx64") else: - dpf_inner_path = os.path.join("aisol", "dll", "linx64") + dpf_inner_path = str(Path("aisol") / "dll" / "linx64") assert dpf_inner_path in serv._client_api_path assert dpf_inner_path in serv._grpc_client_path diff --git a/tests/test_session.py b/tests/test_session.py index b9b4e8074d..a2076badb7 100644 --- a/tests/test_session.py +++ b/tests/test_session.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import os +from pathlib import Path import conftest import tempfile from ansys.dpf import core @@ -31,10 +31,10 @@ def get_log_file(log_path, server): if not isinstance(server, core.server_types.InProcessServer): core.core.download_file( log_path, - os.path.join(tempfile.gettempdir(), "log2.txt"), + str(Path(tempfile.gettempdir()) / "log2.txt"), server=server, ) - return os.path.join(tempfile.gettempdir(), "log2.txt") + return str(Path(tempfile.gettempdir()) / "log2.txt") else: return log_path @@ -47,14 +47,14 @@ def test_logging(tmpdir, server_type): examples.find_static_rst(return_local_path=True, server=server_type), server=server_type, ) - log_path = os.path.join(server_tmp, "log.txt") + log_path = Path(server_tmp) / "log.txt" else: - log_path = os.path.join(tmpdir, "log.txt") + log_path = Path(tmpdir) / "log.txt" result_file = examples.find_static_rst(server=server_type) # download it - new_tmpdir = os.path.join(tmpdir, "my_tmp_dir") - server_type.session.handle_events_with_file_logger(log_path, 2) + _ = Path(tmpdir) / "my_tmp_dir" + server_type.session.handle_events_with_file_logger(str(log_path), 2) wf = core.Workflow(server=server_type) wf.progress_bar = False @@ -65,13 +65,13 @@ def test_logging(tmpdir, server_type): wf.set_output_name("out", to_nodal.outputs.fields_container) wf.get_output("out", core.types.fields_container) - download_log_path = get_log_file(log_path, server_type) - assert os.path.exists(download_log_path) - file_size = os.path.getsize(download_log_path) + download_log_path = Path(get_log_file(str(log_path), server_type)) + assert download_log_path.exists() + file_size = download_log_path.stat().st_size assert file_size > 20 server_type._del_session() - download_log_path = get_log_file(log_path, server_type) - file_size = os.path.getsize(download_log_path) + download_log_path = Path(get_log_file(str(log_path), server_type)) + file_size = download_log_path.stat().st_size wf = core.Workflow(server=server_type) wf.progress_bar = False @@ -82,8 +82,8 @@ def test_logging(tmpdir, server_type): wf.set_output_name("out", to_nodal.outputs.fields_container) wf.get_output("out", core.types.fields_container) - download_log_path = get_log_file(log_path, server_type) - assert file_size == os.path.getsize(download_log_path) + download_log_path = Path(get_log_file(str(log_path), server_type)) + assert file_size == download_log_path.stat().st_size @conftest.raises_for_servers_version_under("6.1") @@ -93,8 +93,8 @@ def test_logging_remote(tmpdir, server_type_remote_process): examples.find_multishells_rst(return_local_path=True), server=server_type_remote_process, ) - log_path = os.path.join(server_tmp, "log.txt") - server_type_remote_process.session.handle_events_with_file_logger(log_path, 2) + log_path = Path(server_tmp) / "log.txt" + server_type_remote_process.session.handle_events_with_file_logger(str(log_path), 2) server_type_remote_process.session.start_emitting_rpc_log() wf = core.Workflow(server=server_type_remote_process) @@ -107,13 +107,13 @@ def test_logging_remote(tmpdir, server_type_remote_process): wf.set_output_name("out", to_nodal.outputs.fields_container) wf.get_output("out", core.types.fields_container) - download_log_path = get_log_file(log_path, server_type_remote_process) - assert os.path.exists(download_log_path) - file_size = os.path.getsize(download_log_path) + download_log_path = Path(get_log_file(str(log_path), server_type_remote_process)) + assert download_log_path.exists() + file_size = download_log_path.stat().st_size assert file_size > 3000 server_type_remote_process._del_session() - download_log_path = get_log_file(log_path, server_type_remote_process) - file_size = os.path.getsize(download_log_path) + download_log_path = Path(get_log_file(str(log_path), server_type_remote_process)) + file_size = download_log_path.stat().st_size wf = core.Workflow(server=server_type_remote_process) wf.progress_bar = False @@ -125,5 +125,5 @@ def test_logging_remote(tmpdir, server_type_remote_process): wf.set_output_name("out", to_nodal.outputs.fields_container) wf.get_output("out", core.types.fields_container) - download_log_path = get_log_file(log_path, server_type_remote_process) - assert file_size == os.path.getsize(download_log_path) + download_log_path = Path(get_log_file(str(log_path), server_type_remote_process)) + assert file_size == download_log_path.stat().st_size diff --git a/tests/test_streams_container.py b/tests/test_streams_container.py index 01e116345e..c2577aee54 100644 --- a/tests/test_streams_container.py +++ b/tests/test_streams_container.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import os +from pathlib import Path import shutil from ansys import dpf @@ -34,27 +34,27 @@ def test_create_streams_container(server_in_process, simple_bar): def test_release_handles(server_in_process, simple_bar): - split = os.path.splitext(simple_bar) - copy_path = split[0] + "copy" + split[1] + simple_bar = Path(simple_bar) + copy_path = simple_bar.parent / (simple_bar.stem + "copy" + simple_bar.suffix) shutil.copyfile(simple_bar, copy_path) - model = dpf.core.Model(copy_path, server=server_in_process) + model = dpf.core.Model(str(copy_path), server=server_in_process) # Evaluate something from the rst _ = model.metadata.meshed_region streams_provider = model.metadata.streams_provider sc = streams_provider.outputs.streams_container() sc.release_handles() - os.remove(copy_path) + copy_path.unlink() def test_release_streams_model(server_in_process, simple_bar): - split = os.path.splitext(simple_bar) - copy_path = split[0] + "copy2" + split[1] + simple_bar = Path(simple_bar) + copy_path = simple_bar.parent / (simple_bar.stem + "copy2" + simple_bar.suffix) shutil.copyfile(simple_bar, copy_path) - model = dpf.core.Model(copy_path, server=server_in_process) + model = dpf.core.Model(str(copy_path), server=server_in_process) # Evaluate something from the rst _ = model.metadata.meshed_region model.metadata.release_streams() - os.remove(copy_path) + copy_path.unlink() def test_release_streams_model_empty(server_in_process): diff --git a/tests/test_workflow.py b/tests/test_workflow.py index 40cff92ecf..9ff5e51812 100644 --- a/tests/test_workflow.py +++ b/tests/test_workflow.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import os +from pathlib import Path import numpy as np import pytest @@ -47,17 +47,17 @@ def test_create_workflow(server_type): def remove_dot_file(request): """Cleanup a testing directory once we are finished.""" - dot_path = os.path.join(os.getcwd(), "test.dot") - png_path = os.path.join(os.getcwd(), "test.png") - png_path1 = os.path.join(os.getcwd(), "test1.png") + dot_path = Path.cwd() / "test.dot" + png_path = Path.cwd() / "test.png" + png_path1 = Path.cwd() / "test1.png" def remove_files(): - if os.path.exists(dot_path): - os.remove(os.path.join(os.getcwd(), dot_path)) - if os.path.exists(png_path): - os.remove(os.path.join(os.getcwd(), png_path)) - if os.path.exists(png_path1): - os.remove(os.path.join(os.getcwd(), png_path1)) + if dot_path.exists(): + dot_path.unlink() + if png_path.exists(): + png_path.unlink() + if png_path1.exists(): + png_path1.unlink() request.addfinalizer(remove_files) @@ -78,11 +78,11 @@ def test_workflow_view(server_in_process, remove_dot_file): wf.connect_with(pre_wf, {"prewf_output": "wf_input"}) wf.view(off_screen=True, title="test1") - assert not os.path.exists("test1.dot") - assert os.path.exists("test1.png") + assert not Path("test1.dot").exists() + assert Path("test1.png").exists() wf.view(off_screen=True, save_as="test.png", keep_dot_file=True) - assert os.path.exists("test.dot") - assert os.path.exists("test.png") + assert Path("test.dot").exists() + assert Path("test.png").exists() def test_connect_field_workflow(server_type):