From 92b3e3e9771fe735a2f98c4e5dc03248448af490 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 15:37:04 +0100 Subject: [PATCH 01/28] add tutorials files --- .../extract_and_explore_results_data.rst | 2 ++ .../extract_and_explore_results_metadata.rst | 20 +++++++++++ .../import_data/import_result_file.rst | 2 ++ .../tutorials/import_data/index.rst | 35 +++++++++++++------ .../import_data/represent_data_on_dpf.rst | 2 ++ 5 files changed, 51 insertions(+), 10 deletions(-) create mode 100644 doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst create mode 100644 doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst create mode 100644 doc/source/user_guide/tutorials/import_data/import_result_file.rst create mode 100644 doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst new file mode 100644 index 0000000000..09c984905a --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst @@ -0,0 +1,2 @@ +.. _ref_tutorials_extract_and_explore_results_data: + diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst new file mode 100644 index 0000000000..630ba1bfbf --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -0,0 +1,20 @@ +.. _ref_tutorials_extract_and_explore_results_metadata: + +======================== +Explore results metadata +======================== + +.. |Field| replace:: :class:`Field` + +When you extract a result from a result file DPF stores it in a |Field|. +This |Field| will then contain the metadata for the result it is associated with. + +The metadata includes the location, the scoping, the shape of the data stored, +number of components, and units of the data. + +This tutorial shows how to extract and explore results metadata extracted +from a result file. + +Get the results +--------------- + diff --git a/doc/source/user_guide/tutorials/import_data/import_result_file.rst b/doc/source/user_guide/tutorials/import_data/import_result_file.rst new file mode 100644 index 0000000000..9e9c2fc2ed --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/import_result_file.rst @@ -0,0 +1,2 @@ +.. _ref_tutorials_import_result_file: + diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst index 112339d5a5..a2d51f2beb 100644 --- a/doc/source/user_guide/tutorials/import_data/index.rst +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -23,11 +23,11 @@ From user input Learn how to import data in DPF from csv file .. grid-item-card:: Represent your data in DPF - :link: ref_tutorials + :link: ref_tutorials_represent_data_on_dpf :link-type: ref :text-align: center - Learn how to represent your manual input data in a DPF data storage structure + Learn how to represent your manual input data in a DPF data storage structures From result files ***************** @@ -37,29 +37,44 @@ From result files :padding: 2 :margin: 2 + .. grid-item-card:: Import a result file in DPF + :link: ref_tutorials_import_result_file + :link-type: ref + :text-align: center + + This tutorial shows how to import a result file in DPF + .. grid-item-card:: Extract and explore results metadata - :link: ref_tutorials + :link: ref_tutorials_extract_and_explore_results_metadata :link-type: ref :text-align: center - This tutorial + This tutorial shows how to extract and explore results metadata (unit, + location, the scoping, the shape of the data stored ... ) extracted + from a result file. - .. grid-item-card:: Extract and explore results - :link: ref_tutorials + .. grid-item-card:: Extract and explore results data + :link: ref_tutorials_extract_and_explore_results_data :link-type: ref :text-align: center - This tutorial + This tutorial shows how to extract and explore results data from a result file. - .. grid-item-card:: Narrow down data (scoping tutorial) - :link: ref_tutorials + .. grid-item-card:: Narrow down data + :link: reft_tutorials_narrow_down_data :link-type: ref :text-align: center - This tutorial + This tutorial explains how to scope (get a spatial and/or temporal subset of + the simulation data) your results. .. toctree:: :maxdepth: 2 :hidden: + represent_data_on_dpf.rst + import_result_file.rst + extract_and_explore_results_metadata.rst + extract_and_explore_results_data.rst + narrow_down_data.rst \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst b/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst new file mode 100644 index 0000000000..38a6299292 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst @@ -0,0 +1,2 @@ +.. _ref_tutorials_represent_data_on_dpf: + From 1037474fa6e06d762626a704d79e4f33a371cc11 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:07:43 +0100 Subject: [PATCH 02/28] add narrow_down_data.rst tutorial --- .../import_data/narrow_down_data.rst | 293 ++++++++++++++++++ 1 file changed, 293 insertions(+) create mode 100644 doc/source/user_guide/tutorials/import_data/narrow_down_data.rst diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst new file mode 100644 index 0000000000..a30ce905e6 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -0,0 +1,293 @@ +.. _reft_tutorials_narrow_down_data: + +================ +Narrow down data +================ + +.. |Field| replace:: :class:`Field` +.. |FieldsContainer| replace:: :class:`FieldsContainer` +.. |Scoping| replace:: :class:`Scoping` +.. |MeshedRegion| replace:: :class:`MeshedRegion ` +.. |time_freq_scoping_factory| replace:: :mod:`time_freq_scoping_factory` +.. |mesh_scoping_factory| replace:: :mod:`mesh_scoping_factory` +.. |Model| replace:: :class:`Model ` +.. |displacement| replace:: :class:`result.displacement ` +.. |Model.results| replace:: :func:`Model.results ` +.. |Examples| replace:: :mod:`Examples` +.. |result op| replace:: :mod:`result` +.. |Result| replace:: :class:`Result ` +.. |rescope| replace:: :class:`rescope ` +.. |from_mesh| replace:: :class:`from_mesh ` +.. |extract_scoping| replace:: :class:`extract_scoping ` + +To begin the workflow set up, you need to establish the ``scoping``, that is +a spatial and/or temporal subset of the simulation data. This tutorial explains +how to scope your results over time and mesh domains. + +Understanding a scope +--------------------- + +The data in DPF is represented by a |Field|. Thus, narrow down your results means scoping your |Field|. +To do so in DPF you use the |Scoping| object. For more information on the DPF data storage structures +see :ref:`ref_tutorials_data_structures`. + +The |Field| scoping also defines how the data is ordered, for example: the first +ID in the scoping identifies to which entity the first data entity belongs. + +In conclusion, the essence of the scoping is to specify the set of time or mesh entities by defining a range of IDs: + +.. image:: ../../../images/drawings/scoping-eg.png + :align: center + +Create a |Scoping| +------------------ + +The |Scoping| object can be created by: + +- Instantiating the |Scoping| class (giving the location and the entities ids as arguments) +- Using a scoping factory (|time_freq_scoping_factory| methods for a temporal scoping + and |mesh_scoping_factory| for spatial scoping). + +.. code-block:: python + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + +Time scoping +^^^^^^^^^^^^ + +.. code-block:: python + + # 1) Using the Scoping class + # a. Define a time list that targets the times ids 14, 15, 16, 17 + my_time_list_1 = [14, 15, 16, 17] + # b. Create the time scoping object + my_time_scoping_1 = dpf.Scoping(ids=my_time_list_1, location=dpf.locations.time_freq) + + # 2) Using the time_freq_scoping_factory class + # a. Define a time list that targets the times ids 14, 15, 16, 17 + my_time_list_2 = [14, 15, 16, 17] + # b. Create the time scoping object + my_time_scoping_2 = dpf.time_freq_scoping_factory.scoping_by_sets(cumulative_sets=my_time_list_2) + +Mesh scoping +^^^^^^^^^^^^ + +.. code-block:: python + + # 1) Using the Scoping class in a nodal location + # a. Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 + my_nodes_ids_1 = [103, 204, 334, 1802] + # b. Create the mesh scoping object + my_mesh_scoping_1 = dpf.Scoping(ids=my_nodes_ids_1, location=dpf.locations.nodal) + + # 2) Using the mesh_scoping_factory class + # a. Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 + my_nodes_ids_2 = [103, 204, 334, 1802] + # b. Create the mesh scoping object + my_mesh_scoping_2 = dpf.mesh_scoping_factory.nodal_scoping(node_ids=my_nodes_ids_2) + +Extract a |Scoping| +------------------- + +A mesh |Scoping| can be extracted from: + +- A |MeshedRegion| with the |from_mesh| operator; +- A |FieldsContainer| with the |extract_scoping| operator; +- A |Field| with the |extract_scoping| operator. + + +Get the results file +^^^^^^^^^^^^^^^^^^^^ + +Here we will download a result file available in our |Examples| package. +For more information about how to import your result file in DPF check +the :ref:`ref_tutorials_import_result_file` tutorial. + +.. code-block:: python + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the result file + result_file_path_1 = examples.download_transient_result() + # Create the model + my_model_1 = dpf.Model(data_sources=result_file_path_1) + # Get the meshed region + my_meshed_region_1 = my_model_1.metadata.meshed_region + # Get a FieldsContainer + my_fc = my_model_1.results.displacement.on_all_time_freqs.eval() + # Get a Field + my_field = my_fc[0] + +Extract the |Scoping| +^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: python + + # 3) Extract the scoping from a mesh + my_mesh_scoping_3 = ops.scoping.from_mesh(mesh=my_meshed_region_1).eval() + print("Scoping from mesh", "\n", my_mesh_scoping_3, "\n") + + # 4) Extract the scoping from a FieldsContainer + extract_scop_fc_op = ops.utility.extract_scoping(field_or_fields_container=my_fc) + my_mesh_scoping_4 = extract_scop_fc_op.outputs.mesh_scoping_as_scopings_container() + print("Scoping from FieldsContainer", "\n", my_mesh_scoping_4, "\n") + + # 5) Extract the scoping from a Field + my_mesh_scoping_5 = ops.utility.extract_scoping(field_or_fields_container=my_field).eval() + print("Scoping from Field", "\n", my_mesh_scoping_5, "\n") + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + # Define the result file + result_file_path_1 = examples.download_transient_result() + # Create the model + my_model_1 = dpf.Model(data_sources=result_file_path_1) + # Get the meshed region + my_meshed_region_1 = my_model_1.metadata.meshed_region + # Get a FieldsContainer + my_fc = my_model_1.results.displacement.on_all_time_freqs.eval() + # Get a Field + my_field = my_fc[0] + # 3) Extract the scoping from a mesh + my_mesh_scoping_3 = ops.scoping.from_mesh(mesh=my_meshed_region_1).eval() + print("Scoping from mesh", "\n", my_mesh_scoping_3, "\n") + + # 4) Extract the scoping from a FieldsContainer + extract_scop_fc_op = ops.utility.extract_scoping(field_or_fields_container=my_fc) + my_mesh_scoping_4 = extract_scop_fc_op.outputs.mesh_scoping_as_scopings_container() + print("Scoping from FieldsContainer", "\n", my_mesh_scoping_4, "\n") + + # 5) Extract the scoping from a Field + my_mesh_scoping_5 = ops.utility.extract_scoping(field_or_fields_container=my_field).eval() + print("Scoping from Field", "\n", my_mesh_scoping_5, "\n") + +Use a |Scoping| +--------------- + +The |Scoping| object can be used : + +- As an input to a |result op| operator; +- As an |Result| argument when you extract results using the |Model.results| method; +- With the |Result| object methods. + +The mesh scoping can also be changed after the result extraction or manipulation by using the +|rescope| operator with a |Field| or |FieldsContainer|. + +Get the results file +^^^^^^^^^^^^^^^^^^^^ + +Here we will download a result file available in our |Examples| package. +For more information about how to import your result file in DPF check +the :ref:`ref_tutorials_import_result_file` tutorial. + +.. code-block:: python + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the result file + result_file_path_1 = examples.download_transient_result() + # Create the DataSources object + my_data_sources_1 = dpf.DataSources(result_path=result_file_path_1) + # Create the model + my_model_1 = dpf.Model(data_sources=my_data_sources_1) + +Extract and scope the results +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Here we extract and scope the displacement results. + +.. code-block:: python + + # 1) Using the result.displacement operator + disp_op = ops.result.displacement(data_sources=my_data_sources_1, + time_scoping=my_time_scoping_1, + mesh_scoping=my_mesh_scoping_1).eval() + + # 2) Using the Model.results + disp_model = my_model_1.results.displacement(time_scoping=my_time_scoping_1, mesh_scoping=my_mesh_scoping_1).eval() + + # 3) Using a Result object method + disp_result_method_1 = my_model_1.results.displacement.on_time_scoping(time_scoping=my_time_scoping_1).on_mesh_scoping(mesh_scoping=my_mesh_scoping_1).eval() + disp_result_method_2 = my_model_1.results.displacement.on_first_time_freq.eval() + + print("Displacement from result.displacement operator", "\n", disp_op, "\n") + print("Displacement from Model.results ", "\n", disp_model, "\n") + print("Scoping from Result object method 1", "\n", disp_result_method_1, "\n") + print("Scoping from Result object method 1", "\n", disp_result_method_2, "\n") + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the result file + result_file_path_1 = examples.download_transient_result() + # Create the DataSources object + my_data_sources_1 = dpf.DataSources(result_path=result_file_path_1) + # Create the model + my_model_1 = dpf.Model(data_sources=my_data_sources_1) + my_time_list_1 = [14, 15, 16, 17] + my_time_scoping_1 = dpf.Scoping(ids=my_time_list_1, location=dpf.locations.time_freq) + my_nodes_ids_1 = [103, 204, 334, 1802] + my_mesh_scoping_1 = dpf.Scoping(ids=my_nodes_ids_1, location=dpf.locations.nodal) + # 1) Using the result.displacement operator + disp_op = ops.result.displacement(data_sources=my_data_sources_1, + time_scoping=my_time_scoping_1, + mesh_scoping=my_mesh_scoping_1).eval() + + # 2) Using the Model.results + disp_model = my_model_1.results.displacement(time_scoping=my_time_scoping_1, mesh_scoping=my_mesh_scoping_1).eval() + + # 3) Using a Result object method + disp_result_method_1 = my_model_1.results.displacement.on_time_scoping(time_scoping=my_time_scoping_1).on_mesh_scoping(mesh_scoping=my_mesh_scoping_1).eval() + disp_result_method_2 = my_model_1.results.displacement.on_first_time_freq.eval() + + print("Displacement from result.displacement operator", "\n", disp_op, "\n") + print("Displacement from Model.results ", "\n", disp_model, "\n") + print("Scoping from Result object method 1", "\n", disp_result_method_1, "\n") + print("Scoping from Result object method 1", "\n", disp_result_method_2, "\n") + +Extract and rescope the results +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Here we rescope the displacement results. + +.. code-block:: python + + # 1) Extract the results for the entire mesh + disp_all_mesh = my_model_1.results.displacement.eval() + + # 2) Rescope the displacement results + disp_rescope = ops.scoping.rescope(fields=disp_rescope, mesh_scoping=my_mesh_scoping_1).eval() + + print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") + print("Displacement rescoped ", "\n", disp_rescope, "\n") + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + disp_all_mesh = my_model_1.results.displacement.eval() + disp_rescope = ops.scoping.rescope(fields=disp_rescope, mesh_scoping=my_mesh_scoping_1).eval() + print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") + print("Displacement rescoped ", "\n", disp_rescope, "\n") \ No newline at end of file From f1aa4a72f2b2f1db23054d27ea2e6db9b0b914af Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:24:07 +0100 Subject: [PATCH 03/28] updates narrow_down_data.rst tutorial --- .../user_guide/tutorials/import_data/narrow_down_data.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst index a30ce905e6..afb0c7a603 100644 --- a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -277,7 +277,7 @@ Here we rescope the displacement results. disp_all_mesh = my_model_1.results.displacement.eval() # 2) Rescope the displacement results - disp_rescope = ops.scoping.rescope(fields=disp_rescope, mesh_scoping=my_mesh_scoping_1).eval() + disp_rescope = ops.scoping.rescope(fields=disp_all_mesh, mesh_scoping=my_mesh_scoping_1).eval() print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") print("Displacement rescoped ", "\n", disp_rescope, "\n") @@ -288,6 +288,6 @@ Here we rescope the displacement results. :hide-code: disp_all_mesh = my_model_1.results.displacement.eval() - disp_rescope = ops.scoping.rescope(fields=disp_rescope, mesh_scoping=my_mesh_scoping_1).eval() + disp_rescope = ops.scoping.rescope(fields=disp_all_mesh, mesh_scoping=my_mesh_scoping_1).eval() print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") print("Displacement rescoped ", "\n", disp_rescope, "\n") \ No newline at end of file From 86f9daae58fc34e50b519537cb535668b476d7a7 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:41:23 +0100 Subject: [PATCH 04/28] add extract_and_explore_results_metadata.rst tutorial --- .../extract_and_explore_results_metadata.rst | 212 +++++++++++++++++- 1 file changed, 206 insertions(+), 6 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index 630ba1bfbf..6939e08b68 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -5,16 +5,216 @@ Explore results metadata ======================== .. |Field| replace:: :class:`Field` +.. |Examples| replace:: :mod:`Examples` +.. |ResultInfo| replace:: :class:`ResultInfo` + +You can explore the general results metadata before extracting them by using +the |ResultInfo| object. This metadata includes: + +- Analysis type; +- Physics type; +- Number of results; +- Unit system; +- Solver version, date and time; +- Job name; When you extract a result from a result file DPF stores it in a |Field|. This |Field| will then contain the metadata for the result it is associated with. +This metadata includes: + +- Location; +- Scoping; +- Shape of the data stored; +- Number of components; +- Units of the data. + +This tutorial shows how to extract and explore results metadata from a result file. + +Get the result file +------------------- + +Here we will download a result file available in our |Examples| package. +For more information about how to import your result file in DPF check +the :ref:`ref_tutorials_import_result_file` tutorial. + +Here we get the displacement results. + +.. code-block:: python + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the result file + result_file_path_1 = examples.download_transient_result() + # Create the model + my_model_1 = dpf.Model(data_sources=result_file_path_1) + +Explore the general results metadata +------------------------------------ + +Get the |ResultInfo| object from the model and then explore it using this class methods. + +.. code-block:: python + + # Define the ResultInfo object + my_result_info_1 = my_model_1.metadata.result_info + + # Get the analysis type + my_analysis_type = my_result_info_1.analysis_type + print("Analysis type: ",my_analysis_type, "\n") + + # Get the physics type + my_physics_type = my_result_info_1.physics_type + print("Physics type: ",my_physics_type, "\n") + + # Get the number of available results + number_of_results = my_result_info_1.n_results + print("Number of available results: ",number_of_results, "\n") + + # Get the unit system + my_unit_system = my_result_info_1.unit_system + print("Unit system: ",my_unit_system, "\n") + + # Get the solver version, data and time + my_solver_version = my_result_info_1.solver_version + print("Solver version: ",my_solver_version, "\n") + + my_solver_date = my_result_info_1.solver_date + print("Solver date: ", my_solver_date, "\n") + + my_solver_time = my_result_info_1.solver_time + print("Solver time: ",my_solver_time, "\n") + + # Get the job name + my_job_name = my_result_info_1.job_name + print("Job name: ",my_job_name, "\n") + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + result_file_path_1 = examples.download_transient_result() + my_model_1 = dpf.Model(data_sources=result_file_path_1) + my_result_info_1 = my_model_1.metadata.result_info + my_analysis_type = my_result_info_1.analysis_type + print("Analysis type: ",my_analysis_type, "\n") + my_physics_type = my_result_info_1.physics_type + print("Physics type: ",my_physics_type, "\n") + number_of_results = my_result_info_1.n_results + print("Number of available results: ",number_of_results, "\n") + my_unit_system = my_result_info_1.unit_system + print("Unit system: ",my_unit_system, "\n") + my_solver_version = my_result_info_1.solver_version + print("Solver version: ",my_solver_version, "\n") + my_solver_date = my_result_info_1.solver_date + print("Solver date: ", my_solver_date, "\n") + my_solver_time = my_result_info_1.solver_time + print("Solver time: ",my_solver_time, "\n") + my_job_name = my_result_info_1.job_name + print("Job name: ",my_job_name, "\n") + +Explore a given result metadata +------------------------------- + +Here we will explore the metadata of the displacement results. + +Start by extracting the displacement results: + +.. code-block:: python + + # Extract the displacement results + disp_results = my_model_1.results.displacement.eval() + + # Get the displacement field + my_disp_field = disp_results[0] + +Explore the displacement results metadata: + +.. code-block:: python + + # Location of the displacement data + my_location = my_disp_field.location + print("Location: ", my_location,'\n') + + # Displacement field scoping + my_scoping = my_disp_field.scoping # type and quantity of entities + print("Scoping: ", '\n',my_scoping, '\n') + + my_scoping_ids = my_disp_field.scoping.ids # Available entities ids + print("Scoping ids: ", '\n', my_scoping_ids, '\n') + + # Elementary data count + # Number of entities (how many data vectors we have) + my_elementary_data_count = my_disp_field.elementary_data_count + print("Elementary data count: ", my_elementary_data_count, '\n') + + # Components count + # Vectors dimension, here we have a displacement so we expect to have 3 components (X, Y and Z) + my_components_count = my_disp_field.component_count + print("Components count: ", my_components_count, '\n') + + # Size + # Length of the data entire vector (equal to the number of elementary data times the number of components) + my_field_size = my_disp_field.size + print("Size: ", my_field_size, '\n') + + # Fields shape + # Gives a tuple with the elementary data count and the components count + my_shape = my_disp_field.shape + print("Shape: ", my_shape, '\n') + + # Units + my_unit = my_disp_field.unit + print("Unit: ", my_unit, '\n') + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + # Extract the displacement results + disp_results = my_model_1.results.displacement.eval() + + # Get the displacement field + my_disp_field = disp_results[0] + + # Location of the displacement data + my_location = my_disp_field.location + print("Location: ", my_location,'\n') + + # Displacement field scoping + my_scoping = my_disp_field.scoping # type and quantity of entities + print("Scoping: ", '\n',my_scoping, '\n') + + my_scoping_ids = my_disp_field.scoping.ids # Available entities ids + print("Scoping ids: ", '\n', my_scoping_ids, '\n') + + # Elementary data count + # Number of entities (how many data vectors we have) + my_elementary_data_count = my_disp_field.elementary_data_count + print("Elementary data count: ", my_elementary_data_count, '\n') -The metadata includes the location, the scoping, the shape of the data stored, -number of components, and units of the data. + # Components count + # Vectors dimension, here we have a displacement so we expect to have 3 components (X, Y and Z) + my_components_count = my_disp_field.component_count + print("Components count: ", my_components_count, '\n') -This tutorial shows how to extract and explore results metadata extracted -from a result file. + # Size + # Length of the data entire vector (equal to the number of elementary data times the number of components) + my_field_size = my_disp_field.size + print("Size: ", my_field_size, '\n') -Get the results ---------------- + # Fields shape + # Gives a tuple with the elementary data count and the components count + my_shape = my_disp_field.shape + print("Shape: ", my_shape, '\n') + # Units + my_unit = my_disp_field.unit + print("Unit: ", my_unit, '\n') \ No newline at end of file From 3edb415d68e693d24652faf7f95141e3de4c5b4a Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:42:12 +0100 Subject: [PATCH 05/28] updates extract_and_explore_results_metadata.rst tutorial --- .../import_data/extract_and_explore_results_metadata.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index 6939e08b68..fa56ec06c3 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -147,7 +147,7 @@ Explore the displacement results metadata: print("Scoping: ", '\n',my_scoping, '\n') my_scoping_ids = my_disp_field.scoping.ids # Available entities ids - print("Scoping ids: ", '\n', my_scoping_ids, '\n') + print("Scoping ids: ", my_scoping_ids, '\n') # Elementary data count # Number of entities (how many data vectors we have) @@ -193,7 +193,7 @@ Explore the displacement results metadata: print("Scoping: ", '\n',my_scoping, '\n') my_scoping_ids = my_disp_field.scoping.ids # Available entities ids - print("Scoping ids: ", '\n', my_scoping_ids, '\n') + print("Scoping ids: ", my_scoping_ids, '\n') # Elementary data count # Number of entities (how many data vectors we have) From f51c18c2984535bb7cb3c34a33b80c3cd247f33b Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:45:55 +0100 Subject: [PATCH 06/28] updates extract_and_explore_results_metadata.rst tutorial --- .../import_data/extract_and_explore_results_metadata.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index fa56ec06c3..b7615a7400 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -19,7 +19,7 @@ the |ResultInfo| object. This metadata includes: - Job name; When you extract a result from a result file DPF stores it in a |Field|. -This |Field| will then contain the metadata for the result it is associated with. +This |Field| will then contain the metadata for the result associated with it. This metadata includes: - Location; From 3b8a7c02c7964018403900ebfaaff7b35e8b1bcc Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:49:21 +0100 Subject: [PATCH 07/28] updates narrow_down_data.rst tutorial --- .../tutorials/import_data/narrow_down_data.rst | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst index afb0c7a603..8b0aa10960 100644 --- a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -28,8 +28,18 @@ Understanding a scope --------------------- The data in DPF is represented by a |Field|. Thus, narrow down your results means scoping your |Field|. -To do so in DPF you use the |Scoping| object. For more information on the DPF data storage structures -see :ref:`ref_tutorials_data_structures`. +To do so in DPF you use the |Scoping| object. + +.. note:: + + Scoping is important because when DPF-Core returns the |Field| object, what Python actually has + is a client-side representation of the |Field|, not the entirety of the |Field| itself. This means + that all the data of the field is stored within the DPF service. This is important + because when building your workflows, the most efficient way of interacting with result data + is to minimize the exchange of data between Python and DPF, either by using operators + or by accessing exclusively the data that is needed. + +For more information on the DPF data storage structures see :ref:`ref_tutorials_data_structures`. The |Field| scoping also defines how the data is ordered, for example: the first ID in the scoping identifies to which entity the first data entity belongs. From 4f5ce92323c13a172e8365455c2c3f5d81cde0e1 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 17:10:44 +0100 Subject: [PATCH 08/28] updates extract_and_explore_results_metadata.rst tutorial --- .../import_data/extract_and_explore_results_metadata.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index b7615a7400..6ffccd9909 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -33,7 +33,7 @@ This tutorial shows how to extract and explore results metadata from a result fi Get the result file ------------------- -Here we will download a result file available in our |Examples| package. +Here we will download a result file available in our |Examples| package. For more information about how to import your result file in DPF check the :ref:`ref_tutorials_import_result_file` tutorial. From 4135b09c354a3c38cdf2df6ce2a178efa84a5ea4 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 10:53:20 +0100 Subject: [PATCH 09/28] utilise que juptyter sphinx --- .../import_data/narrow_down_data.rst | 93 ++----------------- 1 file changed, 8 insertions(+), 85 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst index 8b0aa10960..73f4f0ce1a 100644 --- a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -58,7 +58,7 @@ The |Scoping| object can be created by: - Using a scoping factory (|time_freq_scoping_factory| methods for a temporal scoping and |mesh_scoping_factory| for spatial scoping). -.. code-block:: python +.. jupyter-execute:: # Import the ``ansys.dpf.core`` module from ansys.dpf import core as dpf @@ -66,7 +66,7 @@ The |Scoping| object can be created by: Time scoping ^^^^^^^^^^^^ -.. code-block:: python +.. jupyter-execute:: # 1) Using the Scoping class # a. Define a time list that targets the times ids 14, 15, 16, 17 @@ -83,7 +83,7 @@ Time scoping Mesh scoping ^^^^^^^^^^^^ -.. code-block:: python +.. jupyter-execute:: # 1) Using the Scoping class in a nodal location # a. Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 @@ -114,7 +114,7 @@ Here we will download a result file available in our |Examples| package. For more information about how to import your result file in DPF check the :ref:`ref_tutorials_import_result_file` tutorial. -.. code-block:: python +.. jupyter-execute:: # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage from ansys.dpf import core as dpf @@ -135,7 +135,7 @@ the :ref:`ref_tutorials_import_result_file` tutorial. Extract the |Scoping| ^^^^^^^^^^^^^^^^^^^^^ -.. code-block:: python +.. jupyter-execute:: # 3) Extract the scoping from a mesh my_mesh_scoping_3 = ops.scoping.from_mesh(mesh=my_meshed_region_1).eval() @@ -150,38 +150,6 @@ Extract the |Scoping| my_mesh_scoping_5 = ops.utility.extract_scoping(field_or_fields_container=my_field).eval() print("Scoping from Field", "\n", my_mesh_scoping_5, "\n") -.. rst-class:: sphx-glr-script-out - - .. jupyter-execute:: - :hide-code: - - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage - from ansys.dpf import core as dpf - from ansys.dpf.core import examples - from ansys.dpf.core import operators as ops - # Define the result file - result_file_path_1 = examples.download_transient_result() - # Create the model - my_model_1 = dpf.Model(data_sources=result_file_path_1) - # Get the meshed region - my_meshed_region_1 = my_model_1.metadata.meshed_region - # Get a FieldsContainer - my_fc = my_model_1.results.displacement.on_all_time_freqs.eval() - # Get a Field - my_field = my_fc[0] - # 3) Extract the scoping from a mesh - my_mesh_scoping_3 = ops.scoping.from_mesh(mesh=my_meshed_region_1).eval() - print("Scoping from mesh", "\n", my_mesh_scoping_3, "\n") - - # 4) Extract the scoping from a FieldsContainer - extract_scop_fc_op = ops.utility.extract_scoping(field_or_fields_container=my_fc) - my_mesh_scoping_4 = extract_scop_fc_op.outputs.mesh_scoping_as_scopings_container() - print("Scoping from FieldsContainer", "\n", my_mesh_scoping_4, "\n") - - # 5) Extract the scoping from a Field - my_mesh_scoping_5 = ops.utility.extract_scoping(field_or_fields_container=my_field).eval() - print("Scoping from Field", "\n", my_mesh_scoping_5, "\n") - Use a |Scoping| --------------- @@ -201,7 +169,7 @@ Here we will download a result file available in our |Examples| package. For more information about how to import your result file in DPF check the :ref:`ref_tutorials_import_result_file` tutorial. -.. code-block:: python +.. jupyter-execute:: # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage from ansys.dpf import core as dpf @@ -220,7 +188,7 @@ Extract and scope the results Here we extract and scope the displacement results. -.. code-block:: python +.. jupyter-execute:: # 1) Using the result.displacement operator disp_op = ops.result.displacement(data_sources=my_data_sources_1, @@ -239,49 +207,12 @@ Here we extract and scope the displacement results. print("Scoping from Result object method 1", "\n", disp_result_method_1, "\n") print("Scoping from Result object method 1", "\n", disp_result_method_2, "\n") -.. rst-class:: sphx-glr-script-out - - .. jupyter-execute:: - :hide-code: - - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage - from ansys.dpf import core as dpf - from ansys.dpf.core import examples - from ansys.dpf.core import operators as ops - - # Define the result file - result_file_path_1 = examples.download_transient_result() - # Create the DataSources object - my_data_sources_1 = dpf.DataSources(result_path=result_file_path_1) - # Create the model - my_model_1 = dpf.Model(data_sources=my_data_sources_1) - my_time_list_1 = [14, 15, 16, 17] - my_time_scoping_1 = dpf.Scoping(ids=my_time_list_1, location=dpf.locations.time_freq) - my_nodes_ids_1 = [103, 204, 334, 1802] - my_mesh_scoping_1 = dpf.Scoping(ids=my_nodes_ids_1, location=dpf.locations.nodal) - # 1) Using the result.displacement operator - disp_op = ops.result.displacement(data_sources=my_data_sources_1, - time_scoping=my_time_scoping_1, - mesh_scoping=my_mesh_scoping_1).eval() - - # 2) Using the Model.results - disp_model = my_model_1.results.displacement(time_scoping=my_time_scoping_1, mesh_scoping=my_mesh_scoping_1).eval() - - # 3) Using a Result object method - disp_result_method_1 = my_model_1.results.displacement.on_time_scoping(time_scoping=my_time_scoping_1).on_mesh_scoping(mesh_scoping=my_mesh_scoping_1).eval() - disp_result_method_2 = my_model_1.results.displacement.on_first_time_freq.eval() - - print("Displacement from result.displacement operator", "\n", disp_op, "\n") - print("Displacement from Model.results ", "\n", disp_model, "\n") - print("Scoping from Result object method 1", "\n", disp_result_method_1, "\n") - print("Scoping from Result object method 1", "\n", disp_result_method_2, "\n") - Extract and rescope the results ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Here we rescope the displacement results. -.. code-block:: python +.. jupyter-execute:: # 1) Extract the results for the entire mesh disp_all_mesh = my_model_1.results.displacement.eval() @@ -292,12 +223,4 @@ Here we rescope the displacement results. print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") print("Displacement rescoped ", "\n", disp_rescope, "\n") -.. rst-class:: sphx-glr-script-out - .. jupyter-execute:: - :hide-code: - - disp_all_mesh = my_model_1.results.displacement.eval() - disp_rescope = ops.scoping.rescope(fields=disp_all_mesh, mesh_scoping=my_mesh_scoping_1).eval() - print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") - print("Displacement rescoped ", "\n", disp_rescope, "\n") \ No newline at end of file From c69f4767355ec1b840e632bb5d736a7d5d073123 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 10:55:16 +0100 Subject: [PATCH 10/28] use only jupyter sphinx: extract_and_explore_results_metadata.rst --- .../extract_and_explore_results_metadata.rst | 82 +------------------ 1 file changed, 4 insertions(+), 78 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index 6ffccd9909..0ef98867e9 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -39,7 +39,7 @@ the :ref:`ref_tutorials_import_result_file` tutorial. Here we get the displacement results. -.. code-block:: python +.. jupyter-execute:: # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage from ansys.dpf import core as dpf @@ -56,7 +56,7 @@ Explore the general results metadata Get the |ResultInfo| object from the model and then explore it using this class methods. -.. code-block:: python +.. jupyter-execute:: # Define the ResultInfo object my_result_info_1 = my_model_1.metadata.result_info @@ -91,34 +91,6 @@ Get the |ResultInfo| object from the model and then explore it using this class my_job_name = my_result_info_1.job_name print("Job name: ",my_job_name, "\n") -.. rst-class:: sphx-glr-script-out - - .. jupyter-execute:: - :hide-code: - - from ansys.dpf import core as dpf - from ansys.dpf.core import examples - from ansys.dpf.core import operators as ops - result_file_path_1 = examples.download_transient_result() - my_model_1 = dpf.Model(data_sources=result_file_path_1) - my_result_info_1 = my_model_1.metadata.result_info - my_analysis_type = my_result_info_1.analysis_type - print("Analysis type: ",my_analysis_type, "\n") - my_physics_type = my_result_info_1.physics_type - print("Physics type: ",my_physics_type, "\n") - number_of_results = my_result_info_1.n_results - print("Number of available results: ",number_of_results, "\n") - my_unit_system = my_result_info_1.unit_system - print("Unit system: ",my_unit_system, "\n") - my_solver_version = my_result_info_1.solver_version - print("Solver version: ",my_solver_version, "\n") - my_solver_date = my_result_info_1.solver_date - print("Solver date: ", my_solver_date, "\n") - my_solver_time = my_result_info_1.solver_time - print("Solver time: ",my_solver_time, "\n") - my_job_name = my_result_info_1.job_name - print("Job name: ",my_job_name, "\n") - Explore a given result metadata ------------------------------- @@ -126,7 +98,7 @@ Here we will explore the metadata of the displacement results. Start by extracting the displacement results: -.. code-block:: python +.. jupyter-execute:: # Extract the displacement results disp_results = my_model_1.results.displacement.eval() @@ -136,7 +108,7 @@ Start by extracting the displacement results: Explore the displacement results metadata: -.. code-block:: python +.. jupyter-execute:: # Location of the displacement data my_location = my_disp_field.location @@ -172,49 +144,3 @@ Explore the displacement results metadata: # Units my_unit = my_disp_field.unit print("Unit: ", my_unit, '\n') - -.. rst-class:: sphx-glr-script-out - - .. jupyter-execute:: - :hide-code: - - # Extract the displacement results - disp_results = my_model_1.results.displacement.eval() - - # Get the displacement field - my_disp_field = disp_results[0] - - # Location of the displacement data - my_location = my_disp_field.location - print("Location: ", my_location,'\n') - - # Displacement field scoping - my_scoping = my_disp_field.scoping # type and quantity of entities - print("Scoping: ", '\n',my_scoping, '\n') - - my_scoping_ids = my_disp_field.scoping.ids # Available entities ids - print("Scoping ids: ", my_scoping_ids, '\n') - - # Elementary data count - # Number of entities (how many data vectors we have) - my_elementary_data_count = my_disp_field.elementary_data_count - print("Elementary data count: ", my_elementary_data_count, '\n') - - # Components count - # Vectors dimension, here we have a displacement so we expect to have 3 components (X, Y and Z) - my_components_count = my_disp_field.component_count - print("Components count: ", my_components_count, '\n') - - # Size - # Length of the data entire vector (equal to the number of elementary data times the number of components) - my_field_size = my_disp_field.size - print("Size: ", my_field_size, '\n') - - # Fields shape - # Gives a tuple with the elementary data count and the components count - my_shape = my_disp_field.shape - print("Shape: ", my_shape, '\n') - - # Units - my_unit = my_disp_field.unit - print("Unit: ", my_unit, '\n') \ No newline at end of file From 690647598157b4e334cdff00c0cbca9cf487c4e1 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 11:27:49 +0100 Subject: [PATCH 11/28] add import_result_file.rst tutorial --- .../import_data/import_result_file.rst | 320 ++++++++++++++++++ 1 file changed, 320 insertions(+) diff --git a/doc/source/user_guide/tutorials/import_data/import_result_file.rst b/doc/source/user_guide/tutorials/import_data/import_result_file.rst index 9e9c2fc2ed..e9c68e607d 100644 --- a/doc/source/user_guide/tutorials/import_data/import_result_file.rst +++ b/doc/source/user_guide/tutorials/import_data/import_result_file.rst @@ -1,2 +1,322 @@ .. _ref_tutorials_import_result_file: +========================= +Import result file in DPF +========================= + +.. |Model| replace:: :class:`Model ` +.. |DataSources| replace:: :class:`DataSources ` +.. |Examples| replace:: :mod:`Examples` +.. |set_result_file_path| replace:: :func:`set_result_file_path() ` +.. |add_file_path| replace:: :func:`add_file_path() ` + +This tutorial shows how to import a result file in DPF. + +You have two approaches to import a result file in DPF: + +- Using the |DataSources| object +- Using the |Model| object + +.. note:: + + The |Model| extracts a large amount of information by default (results, mesh and analysis data). + If using this helper takes a long time for processing the code, mind using a |DataSources| object + and instantiating operators directly with it. Check the ":ref:`get_mesh_mesh_provider`" for more + information on how to get a mesh from a result file. + +Define the result file path +--------------------------- + +Both approaches need a file path to be defined. Here we will download result files available in +our |Examples| package. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the .rst result file + result_file_path_11 = examples.find_static_rst() + + # Define the modal superposition harmonic analysis (.mode, .rfrq and .rst) result files + result_file_path_12 = examples.download_msup_files_to_dict() + + print("1:", "\n",result_file_path_11, "\n") + print("2:", "\n",result_file_path_12, "\n") + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the .d3plot result file + result_file_path_21 = examples.download_d3plot_beam() + + # Define the .binout result file + result_file_path_22 = examples.download_binout_matsum() + + print("1:", "\n",result_file_path_21, "\n") + print("2:", "\n",result_file_path_22, "\n") + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the project .flprj result file + result_file_path_31 = examples.download_fluent_axial_comp()["flprj"] + + # Define the CFF .cas.h5/.dat.h5 result files + result_file_path_32 = examples.download_fluent_axial_comp() + + print("1:", "\n",result_file_path_31, "\n") + print("2:", "\n",result_file_path_32, "\n") + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the project .res result file + result_file_path_41 = examples.download_cfx_mixing_elbow() + + # Define the CFF .cas.cff/.dat.cff result files + result_file_path_42 = examples.download_cfx_heating_coil() + + print("1:", "\n",result_file_path_41, "\n") + print("2:", "\n",result_file_path_42, "\n") + +Use a |DataSources| +------------------- + +The |DataSources| object manages paths to their files. Use this object to declare data +inputs for DPF operators and define their locations. + +.. tab-set:: + + .. tab-item:: MAPDL + + **a) `.rst` result file** + + You create the |DataSources| object by defining the the path to the main result file. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_11 = dpf.DataSources(result_path=result_file_path_11) + + **b) `.mode`, `.rfrq` and `.rst` result files** + + In the modal superposition, modal coefficients are multiplied by mode shapes (of a previous modal analysis) + to analyse a structure under given boundary conditions in a range of frequencies. Doing this expansion “on demand” + in DPF instead of in the solver reduces the size of the result files. + + The expansion is recursive in DPF: first the modal response is read. Then, “upstream” mode shapes are found in + the data sources, where they are read and expanded. + + To create a recursive workflow you have to add the upstream data to the main |DataSources| object. Upstream refers + to a source that provides data to a particular process. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_12 = dpf.DataSources() + # Define the main result data + my_data_sources_12.set_result_file_path(filepath=result_file_path_12["rfrq"], key='rfrq') + + # Create the upstream DataSources object with the main upstream data + up_stream_ds_12 = dpf.DataSources(result_path=result_file_path_12["mode"]) + # Add the additional upstream data to the upstream DataSources object + up_stream_ds_12.add_file_path(filepath=result_file_path_12["rst"]) + + # Add the upstream DataSources to the main DataSources object + my_data_sources_12.add_upstream(upstream_data_sources=up_stream_ds_12) + + .. tab-item:: LSDYNA + + **a) `.d3plot` result file** + + This LS-DYNA d3plot file contains several individual results, each at different times. + The d3plot file does not contain information related to Units. In this case, as the + simulation was run through Mechanical, a ``file.actunits`` file is produced. If this + file is supplemented in the |DataSources|, the units will be correctly fetched for all + results in the file as well as for the mesh. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_21 = dpf.DataSources() + my_data_sources_21.set_result_file_path(filepath=result_file_path_21[0], key="d3plot") + my_data_sources_21.add_file_path(filepath=result_file_path_21[3], key="actunits") + + **b) `.binout` result file** + + The extension key ``.binout`` is not specified in the result file. Thus, we use the + |set_result_file_path| method to correctly implement the result file to the |DataSources| by giving + explicitly the extension key as an argument. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_22 = dpf.DataSources() + # Define the the path to the main result + my_data_sources_22.set_result_file_path(filepath=result_file_path_22, key="binout") + + .. tab-item:: Fluent + + **a) `.flprj` result file** + + You create the |DataSources| object by defining the the path to the main result file. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_31 = dpf.DataSources(result_path=result_file_path_31) + + **b) `.cas.h5`, `.dat.h5` result files** + + Here we have a main and an additional result files. Thus, we use the + |set_result_file_path| method, to correctly implement the result file to the |DataSources| by giving + explicitly the first extension key as an argument, and the |add_file_path| method, to add the additional + result file. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_32 = dpf.DataSources() + # Define the path to the main result file + my_data_sources_32.set_result_file_path(filepath=result_file_path_32['cas'][0], key="cas") + # Add the additional result file to the DataSources + my_data_sources_32.add_file_path(filepath=result_file_path_32['dat'][0], key="dat") + + .. tab-item:: CFX + + **a) `.res` result file** + + You create the |DataSources| object by defining the the path to the main result file. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_41 = dpf.DataSources(result_path=result_file_path_41) + + **b) `.cas.cff`, `.dat.cff` result files** + + Here we have a main and an additional result files. Thus, we use the + |set_result_file_path| method, to correctly implement the result file to the |DataSources| by giving + explicitly the first extension key as an argument, and the |add_file_path| method, to add the additional + result file. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_42 = dpf.DataSources() + # Define the path to the main result file + my_data_sources_42.set_result_file_path(filepath=result_file_path_42["cas"], key="cas") + # Add the additional result file to the DataSources + my_data_sources_42.add_file_path(filepath=result_file_path_42["dat"], key="dat") + +Use a |Model| +------------- + +The |Model| is a helper designed to give shortcuts to the user to access the analysis results +metadata, by opening a DataSources or a Streams, and to instanciate results provider for it. + +To create a |Model| you can provide the result file path, in the case you are working with a single result +file with an explicit extension key, or a |DataSources| as an argument. + +.. tab-set:: + + .. tab-item:: MAPDL + + **a) `.rst` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + my_model_11 = dpf.Model(data_sources=result_file_path_11) + + # Create the model with the DataSources + my_model_12 = dpf.Model(data_sources=my_data_sources_11) + + **b) `.mode`, `.rfrq` and `.rst` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources + my_model_13 = dpf.Model(data_sources=my_data_sources_12) + + .. tab-item:: LSDYNA + + **a) `.d3plot` result file** + + .. jupyter-execute:: + + # Create the model with the DataSources + my_model_21 = dpf.Model(data_sources=my_data_sources_21) + + **b) `.binout` result file** + + .. jupyter-execute:: + + # Create the model with the DataSources + my_model_22 = dpf.Model(data_sources=my_data_sources_22) + + .. tab-item:: Fluent + + **a) `.flprj` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + my_model_31 = dpf.Model(data_sources=result_file_path_31) + + # Create the model with the DataSources + my_model_32 = dpf.Model(data_sources=my_data_sources_31) + + **b) `.cas.h5`, `.dat.h5` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources + my_model_33 = dpf.Model(data_sources=my_data_sources_32) + + .. tab-item:: CFX + + .. jupyter-execute:: + + **a) `.res` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + my_model_41 = dpf.Model(data_sources=result_file_path_41) + + # Create the model with the DataSources + my_model_42 = dpf.Model(data_sources=my_data_sources_41) + + **b) `.cas.cff`, `.dat.cff` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources + my_model_43 = dpf.Model(data_sources=my_data_sources_42) + From 4ff68c339468cef2d4e76e82ca203af4ba713a05 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 11:28:44 +0100 Subject: [PATCH 12/28] updates --- .../extract_and_explore_results_data.rst | 21 +++++++++++++++++++ .../tutorials/import_data/index.rst | 7 +++---- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst index 09c984905a..4cc427331a 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst @@ -1,2 +1,23 @@ .. _ref_tutorials_extract_and_explore_results_data: +==================== +Explore results data +==================== + +.. |Field| replace:: :class:`Field` +.. |Examples| replace:: :mod:`Examples` + +This tutorial shows how to extract and explore results data from a result file. + +When you extract a result from a result file DPF stores it in a |Field|. +This |Field| will then contain the data of the result associated with it. + +When DPF-Core returns the |Field| object, what Python actually has is a client-side +representation of the |Field|, not the entirety of the |Field| itself. This means +that all the data of the field is stored within the DPF service. This is important +because when building your workflows, the most efficient way of interacting with result data +is to minimize the exchange of data between Python and DPF, either by using operators +or by accessing exclusively the data that is needed. + + + diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst index a2d51f2beb..b366137b27 100644 --- a/doc/source/user_guide/tutorials/import_data/index.rst +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -42,16 +42,15 @@ From result files :link-type: ref :text-align: center - This tutorial shows how to import a result file in DPF + This tutorial shows how to import a result file in DPF. .. grid-item-card:: Extract and explore results metadata :link: ref_tutorials_extract_and_explore_results_metadata :link-type: ref :text-align: center - This tutorial shows how to extract and explore results metadata (unit, - location, the scoping, the shape of the data stored ... ) extracted - from a result file. + This tutorial shows how to extract and explore results metadata (analysis type, + physics type, unit system ... ) from a result file. .. grid-item-card:: Extract and explore results data :link: ref_tutorials_extract_and_explore_results_data From cd775c8c0cd8144eddec5fcf8b530e6e65a4cbe6 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 15:27:34 +0100 Subject: [PATCH 13/28] add extract_and_explore_results_data.rst tutorial --- .../extract_and_explore_results_data.rst | 128 +++++++++++++++++- 1 file changed, 124 insertions(+), 4 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst index 4cc427331a..f936bf03a1 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst @@ -1,16 +1,20 @@ .. _ref_tutorials_extract_and_explore_results_data: -==================== -Explore results data -==================== +================================ +Extract and explore results data +================================ .. |Field| replace:: :class:`Field` .. |Examples| replace:: :mod:`Examples` +.. |Result| replace:: :class:`Result ` +.. |FieldsContainer| replace:: :class:`FieldsContainer` +.. |get_entity_data| replace:: :func:`get_entity_data()` +.. |get_entity_data_by_id| replace:: :func:`get_entity_data_by_id()` This tutorial shows how to extract and explore results data from a result file. When you extract a result from a result file DPF stores it in a |Field|. -This |Field| will then contain the data of the result associated with it. +This |Field| will contain the data of the result associated with it. When DPF-Core returns the |Field| object, what Python actually has is a client-side representation of the |Field|, not the entirety of the |Field| itself. This means @@ -19,5 +23,121 @@ because when building your workflows, the most efficient way of interacting with is to minimize the exchange of data between Python and DPF, either by using operators or by accessing exclusively the data that is needed. +The |Field| data is ordered with respect to its scoping ids (check the :ref:`reft_tutorials_narrow_down_data` +tutorial for more information on scoping manipulations). +Get the results +--------------- +Here we will download a result file available in our |Examples| package. +For more information about how to import your result file in DPF check +the :ref:`ref_tutorials_import_result_file` tutorial. + +Here we extract the displacement results. The displacement |Result| object gives a |FieldsContainer| when evaluated. +Thus, we will get a |Field| from this |FieldsContainer|. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the result file + result_file_path_1 = examples.download_transient_result() + + # Create the model + my_model_1 = dpf.Model(data_sources=result_file_path_1) + + # Extract the displacement results for the last time step + disp_results = my_model_1.results.displacement.on_last_time_freq.eval() + + # Get the displacement field for the last time step + my_disp_field = disp_results[0] + + print(my_disp_field) + +Extract all data from a field +----------------------------- + +You can extract the the entire data in the |Field| as an array (numpy array) or as a list. + +Data as an array +^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the displacement data as an array + my_data_array = my_disp_field.data + print("Displacement data as an array: ", '\n', my_data_array) + +Note that this array is a genuine, local, numpy array (overloaded by the DPFArray): + +.. jupyter-execute:: + + print("Array type: ", type(my_data_array)) + +Data as a list +^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the displacement data as a list + my_data_list = my_disp_field.data_as_list + print("Displacement data as a list: ", '\n', my_data_list) + +Extract specific data from a field +---------------------------------- + +If you need to access data for specific entities (node, element ...), you can extract it +based on its index (data position on the |Field| by using the |get_entity_data| method), or based +on the entities id (by using the |get_entity_data_by_id| method). + +Get the data by the entity index +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the data from the third entity in the field + data_3_entity = my_disp_field.get_entity_data(index=3) + print("Data entity index=3: ", data_3_entity) + +Get the data by the entity ind +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the data from the entity with id=533 + data_533_entity = my_disp_field.get_entity_data_by_id(id=533) + print("Data entity id=533: ", data_533_entity) + +Note that the element with id=533 would correspond to an index=2 within the |Field|. + +.. jupyter-execute:: + + # Get the index of the entity with id=533 + index_533_entity = my_disp_field.scoping.index(id=533) + print("Index entity id=533: ",index_533_entity) + +Be aware that scoping IDs are not sequential. You would get the id of the element in the 533 +position of the |Field| with: + +.. jupyter-execute:: + + # Get the id of the entity with index=533 + id_533_entity = my_disp_field.scoping.id(index=533) + print("Id entity index=533: ",id_533_entity) + + +While these methods are acceptable when requesting data for a few elements +or nodes, they should not be used when looping over the entire array. For efficiency, +a |Field|s data can be recovered locally before sending a large number of requests: + +.. jupyter-execute:: + + # Create a deep copy of the field that can be accessed and modified locally. + with my_disp_field.as_local_field() as f: + for i in my_disp_field.scoping.ids[2:50]: + f.get_entity_data_by_id(i) + + print(f) \ No newline at end of file From 5f3db1a7711a322532baf41742d470151dae2cde Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 15:28:17 +0100 Subject: [PATCH 14/28] updates on extract_and_explore_results_metadata.rst --- .../import_data/extract_and_explore_results_metadata.rst | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index 0ef98867e9..bfcbea2ca4 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -1,8 +1,8 @@ .. _ref_tutorials_extract_and_explore_results_metadata: -======================== -Explore results metadata -======================== +==================================== +Extract and explore results metadata +==================================== .. |Field| replace:: :class:`Field` .. |Examples| replace:: :mod:`Examples` @@ -37,8 +37,6 @@ Here we will download a result file available in our |Examples| package. For more information about how to import your result file in DPF check the :ref:`ref_tutorials_import_result_file` tutorial. -Here we get the displacement results. - .. jupyter-execute:: # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage From 7e46eba4739604da3fec9e9da521cd1198c45a1d Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Mon, 25 Nov 2024 16:39:42 +0100 Subject: [PATCH 15/28] add represent_data_on_dpf.rst tutorial --- .../import_data/represent_data_on_dpf.rst | 101 ++++++++++++++++++ 1 file changed, 101 insertions(+) diff --git a/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst b/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst index 38a6299292..75637032c9 100644 --- a/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst +++ b/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst @@ -1,2 +1,103 @@ .. _ref_tutorials_represent_data_on_dpf: +======================== +Manual input data on DPF +======================== + +.. |Field| replace:: :class:`Field` +.. |FieldsContainer| replace:: :class:`FieldsContainer` +.. |append| replace:: :func:`append()` +.. |data| replace:: :attr:`Field.data` +.. |scoping| replace:: :attr:`Field.scoping` + +This tutorial shows how to represent your manual input data in a DPF data storage structures. + +When handling data DPF uses |FieldsContainer| and |Field| to store and return it. The |Field| is a DPF array +and a collection of |Field| is called |FieldsContainer|. For more information on how the data is structure +in a |Field| and how the DPF data storage structures works check the :ref:`ref_tutorials_data_structures` +tutorial section. + +Here we will create some 3d vector |Field|, where the data comes from lists. + +Defining the fields +------------------- + +To manually import data on DPF you have to create the structure to store it. + +Here we create a |Field| from scratch by instantiating this object. When using this approach the |Field| has +vector nature by default. Check the :ref:`ref_tutorials_data_structures` tutorial section for more information +on others approaches. + +We will need two 3d vector |Field|: + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + + # Create the fields + # a. Define the number of entities + num_entities_1 = 2 + + # b. Instanciate the field + field_1 = dpf.Field(nentities=num_entities_1) + field_2 = dpf.Field(nentities=num_entities_1) + field_3 = dpf.Field(nentities=num_entities_1) + field_4 = dpf.Field(nentities=num_entities_1) + + # c. Define the scoping ids + + field_3.scoping.ids = range(num_entities_1) + field_4.scoping.ids = range(num_entities_1) + + # d. Create a FieldsContainer + fc_1 = dpf.fields_container_factory.over_time_freq_fields_container(fields=[field_1, field_2]) + + # Check the Fields and the FieldsContainer + print("Field 1: ", "\n" ,field_1, "\n") + print("Field 2: ", "\n" ,field_2, "\n") + print("Field 3: ", "\n" ,field_3, "\n") + print("Field 4: ", "\n" ,field_4, "\n") + print("FieldsContainer: ", "\n" ,fc_1, "\n") + +Add data to the fields +---------------------- + +Here we define the data and then add it to the fields. + +You can add data to a |Field| by using the |append| method, if you have not set the |scoping| property +with the scoping ids, or the |data| property, if you have set the |scoping| property +with the scoping ids. + +.. jupyter-execute:: + + # Define and add the data to the fields + # a. Using the append method + + # Define the Fields data + data_11 = [1.0, 2.0, 3.0] + data_12 = [4.0, 5.0, 6.0] + data_21 = [7.0, 3.0, 5.0] + data_22 = [8.0, 1.0, 2.0] + + # Add the data to the field + field_1.append(data=data_11, scopingid=0) + field_1.append(data=data_12, scopingid=1) + field_2.append(data=data_21, scopingid=0) + field_2.append(data=data_22, scopingid=1) + + # b. Using the data property + + # Define the Fields data + data_3b = [6.0, 5.0, 4.0, 3.0, 2.0, 1.0] + data_4b = [4.0, 1.0, 8.0, 5.0, 7.0, 9.0] + + # Add the data to the field + field_3.data = data_3b + field_4.data = data_4b + + # Check the Fields + print("Field 1: ", "\n", field_1, "\n") + print("Field 2: ", "\n", field_2, "\n") + print("Field 3: ", "\n" ,field_3, "\n") + print("Field 4: ", "\n" ,field_4, "\n") From d113cfb88a139d836ca8cc306c857db591b6c520 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Mon, 25 Nov 2024 17:10:32 +0100 Subject: [PATCH 16/28] updates on the index page --- doc/source/user_guide/tutorials/import_data/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst index b366137b27..4944ce6d2a 100644 --- a/doc/source/user_guide/tutorials/import_data/index.rst +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -20,9 +20,9 @@ From user input :link-type: ref :text-align: center - Learn how to import data in DPF from csv file + Learn how to import data in DPF from a csv file - .. grid-item-card:: Represent your data in DPF + .. grid-item-card:: Manual input data on DPF :link: ref_tutorials_represent_data_on_dpf :link-type: ref :text-align: center From db6a7c57163d75295aaa11be3342d71d1a55b68c Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Mon, 25 Nov 2024 17:21:21 +0100 Subject: [PATCH 17/28] updates --- .../tutorials/import_data/import_result_file.rst | 2 +- doc/source/user_guide/tutorials/import_data/index.rst | 7 ------- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/import_result_file.rst b/doc/source/user_guide/tutorials/import_data/import_result_file.rst index e9c68e607d..8bcb8a1163 100644 --- a/doc/source/user_guide/tutorials/import_data/import_result_file.rst +++ b/doc/source/user_guide/tutorials/import_data/import_result_file.rst @@ -21,7 +21,7 @@ You have two approaches to import a result file in DPF: The |Model| extracts a large amount of information by default (results, mesh and analysis data). If using this helper takes a long time for processing the code, mind using a |DataSources| object - and instantiating operators directly with it. Check the ":ref:`get_mesh_mesh_provider`" for more + and instantiating operators directly with it. Check the :ref:`get_mesh_mesh_provider` for more information on how to get a mesh from a result file. Define the result file path diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst index 4944ce6d2a..c7ead0bdde 100644 --- a/doc/source/user_guide/tutorials/import_data/index.rst +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -15,13 +15,6 @@ From user input :padding: 2 :margin: 2 - .. grid-item-card:: Import data from csv file - :link: ref_tutorials - :link-type: ref - :text-align: center - - Learn how to import data in DPF from a csv file - .. grid-item-card:: Manual input data on DPF :link: ref_tutorials_represent_data_on_dpf :link-type: ref From e8ba0d41a6812ec3897e54598b86e38cb1e0c94f Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 6 Dec 2024 16:15:03 +0100 Subject: [PATCH 18/28] update the import_result_file.rst to the tutorials guidelines --- .../import_data/import_result_file.rst | 238 ++++++++++-------- 1 file changed, 136 insertions(+), 102 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/import_result_file.rst b/doc/source/user_guide/tutorials/import_data/import_result_file.rst index 8bcb8a1163..f428b64da8 100644 --- a/doc/source/user_guide/tutorials/import_data/import_result_file.rst +++ b/doc/source/user_guide/tutorials/import_data/import_result_file.rst @@ -4,31 +4,31 @@ Import result file in DPF ========================= -.. |Model| replace:: :class:`Model ` -.. |DataSources| replace:: :class:`DataSources ` -.. |Examples| replace:: :mod:`Examples` +.. include:: ../../../links_and_refs.rst .. |set_result_file_path| replace:: :func:`set_result_file_path() ` .. |add_file_path| replace:: :func:`add_file_path() ` This tutorial shows how to import a result file in DPF. -You have two approaches to import a result file in DPF: +There are two approaches to import a result file in DPF: -- Using the |DataSources| object -- Using the |Model| object +- :ref:`Using the DataSources object ` +- :ref:`Using the Model object ` .. note:: The |Model| extracts a large amount of information by default (results, mesh and analysis data). If using this helper takes a long time for processing the code, mind using a |DataSources| object - and instantiating operators directly with it. Check the :ref:`get_mesh_mesh_provider` for more - information on how to get a mesh from a result file. + and instantiating operators directly with it. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` Define the result file path --------------------------- -Both approaches need a file path to be defined. Here we will download result files available in -our |Examples| package. +Both approaches need a file path to be defined. For this tutorial, you can use a result file available in +the |Examples| module. .. tab-set:: @@ -36,79 +36,89 @@ our |Examples| package. .. jupyter-execute:: - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + # Import the ``ansys.dpf.core`` module from ansys.dpf import core as dpf + # Import the examples module from ansys.dpf.core import examples + # Import the operators module from ansys.dpf.core import operators as ops - # Define the .rst result file + # Define the .rst result file path result_file_path_11 = examples.find_static_rst() - # Define the modal superposition harmonic analysis (.mode, .rfrq and .rst) result files + # Define the modal superposition harmonic analysis (.mode, .rfrq and .rst) result files paths result_file_path_12 = examples.download_msup_files_to_dict() - print("1:", "\n",result_file_path_11, "\n") - print("2:", "\n",result_file_path_12, "\n") + print("Result file path 11:", "\n",result_file_path_11, "\n") + print("Result files paths 12:", "\n",result_file_path_12, "\n") .. tab-item:: LSDYNA .. jupyter-execute:: - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + # Import the ``ansys.dpf.core`` module from ansys.dpf import core as dpf + # Import the examples module from ansys.dpf.core import examples + # Import the operators module from ansys.dpf.core import operators as ops - # Define the .d3plot result file + # Define the .d3plot result files paths result_file_path_21 = examples.download_d3plot_beam() - # Define the .binout result file + # Define the .binout result file path result_file_path_22 = examples.download_binout_matsum() - print("1:", "\n",result_file_path_21, "\n") - print("2:", "\n",result_file_path_22, "\n") + print("Result files paths 21:", "\n",result_file_path_21, "\n") + print("Result file path 22:", "\n",result_file_path_22, "\n") .. tab-item:: Fluent .. jupyter-execute:: - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + # Import the ``ansys.dpf.core`` module from ansys.dpf import core as dpf + # Import the examples module from ansys.dpf.core import examples + # Import the operators module from ansys.dpf.core import operators as ops - # Define the project .flprj result file + # Define the project .flprj result file path result_file_path_31 = examples.download_fluent_axial_comp()["flprj"] - # Define the CFF .cas.h5/.dat.h5 result files + # Define the CFF .cas.h5/.dat.h5 result files paths result_file_path_32 = examples.download_fluent_axial_comp() - print("1:", "\n",result_file_path_31, "\n") - print("2:", "\n",result_file_path_32, "\n") + print("Result file path 31:", "\n",result_file_path_31, "\n") + print("Result files paths 32:", "\n",result_file_path_32, "\n") .. tab-item:: CFX .. jupyter-execute:: - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + # Import the ``ansys.dpf.core`` module from ansys.dpf import core as dpf + # Import the examples module from ansys.dpf.core import examples + # Import the operators module from ansys.dpf.core import operators as ops - # Define the project .res result file + # Define the project .res result file path result_file_path_41 = examples.download_cfx_mixing_elbow() - # Define the CFF .cas.cff/.dat.cff result files + # Define the CFF .cas.cff/.dat.cff result files paths result_file_path_42 = examples.download_cfx_heating_coil() - print("1:", "\n",result_file_path_41, "\n") - print("2:", "\n",result_file_path_42, "\n") + print("Result file path 41:", "\n",result_file_path_41, "\n") + print("Result files paths 42:", "\n",result_file_path_42, "\n") + +.. _ref_import_result_file_data_sources: Use a |DataSources| ------------------- The |DataSources| object manages paths to their files. Use this object to declare data -inputs for DPF operators and define their locations. +inputs for PyDPF-Core APIs. .. tab-set:: @@ -116,12 +126,13 @@ inputs for DPF operators and define their locations. **a) `.rst` result file** - You create the |DataSources| object by defining the the path to the main result file. + Create the |DataSources| object and give the path to the result file to the *'result_path'* argument. .. jupyter-execute:: # Create the DataSources object - my_data_sources_11 = dpf.DataSources(result_path=result_file_path_11) + # Use the ``result_path`` argument and give the result file path + ds_11 = dpf.DataSources(result_path=result_file_path_11) **b) `.mode`, `.rfrq` and `.rst` result files** @@ -129,119 +140,144 @@ inputs for DPF operators and define their locations. to analyse a structure under given boundary conditions in a range of frequencies. Doing this expansion “on demand” in DPF instead of in the solver reduces the size of the result files. - The expansion is recursive in DPF: first the modal response is read. Then, “upstream” mode shapes are found in - the data sources, where they are read and expanded. + The expansion is recursive in DPF: first the modal response is read. Then, *upstream* mode shapes are found in + the |DataSources|, where they are read and expanded. Upstream refers to a source that provides data to a + particular process. - To create a recursive workflow you have to add the upstream data to the main |DataSources| object. Upstream refers - to a source that provides data to a particular process. + To create a recursive workflow add the upstream |DataSources| object, that contains the upstream + data files, to the main |DataSources| object. .. jupyter-execute:: - # Create the DataSources object - my_data_sources_12 = dpf.DataSources() - # Define the main result data - my_data_sources_12.set_result_file_path(filepath=result_file_path_12["rfrq"], key='rfrq') + # Create the main DataSources object + ds_12 = dpf.DataSources() + # Define the main result file path + ds_12.set_result_file_path(filepath=result_file_path_12["rfrq"], key='rfrq') - # Create the upstream DataSources object with the main upstream data - up_stream_ds_12 = dpf.DataSources(result_path=result_file_path_12["mode"]) - # Add the additional upstream data to the upstream DataSources object - up_stream_ds_12.add_file_path(filepath=result_file_path_12["rst"]) + # Create the upstream DataSources object with the main upstream file path + upstream_ds_12 = dpf.DataSources(result_path=result_file_path_12["mode"]) + # Add the additional upstream file path to the upstream DataSources object + upstream_ds_12.add_file_path(filepath=result_file_path_12["rst"]) # Add the upstream DataSources to the main DataSources object - my_data_sources_12.add_upstream(upstream_data_sources=up_stream_ds_12) + ds_12.add_upstream(upstream_data_sources=upstream_ds_12) .. tab-item:: LSDYNA **a) `.d3plot` result file** - This LS-DYNA d3plot file contains several individual results, each at different times. - The d3plot file does not contain information related to Units. In this case, as the + The d3plot file does not contain information related to units. In this case, as the simulation was run through Mechanical, a ``file.actunits`` file is produced. If this file is supplemented in the |DataSources|, the units will be correctly fetched for all results in the file as well as for the mesh. + Thus, we must use the |set_result_file_path| and the |add_file_path| methods to add the main + and the additional result file to the |DataSources| object. + .. jupyter-execute:: # Create the DataSources object - my_data_sources_21 = dpf.DataSources() - my_data_sources_21.set_result_file_path(filepath=result_file_path_21[0], key="d3plot") - my_data_sources_21.add_file_path(filepath=result_file_path_21[3], key="actunits") + ds_21 = dpf.DataSources() + + # Define the main result file path + ds_21.set_result_file_path(filepath=result_file_path_21[0], key="d3plot") + + # Add the additional file path related to the units + ds_21.add_file_path(filepath=result_file_path_21[3], key="actunits") **b) `.binout` result file** - The extension key ``.binout`` is not specified in the result file. Thus, we use the - |set_result_file_path| method to correctly implement the result file to the |DataSources| by giving - explicitly the extension key as an argument. + The extension key *`.binout`* is not explicitly specified in the result file. Thus, we use + the |set_result_file_path| method and give the extension key to the *'key'* argument to correctly + add the result file path to the |DataSources| object. .. jupyter-execute:: # Create the DataSources object - my_data_sources_22 = dpf.DataSources() - # Define the the path to the main result - my_data_sources_22.set_result_file_path(filepath=result_file_path_22, key="binout") + ds_22 = dpf.DataSources() + + # Define the path to the result file + # Use the ``key`` argument and give the file extension key + ds_22.set_result_file_path(filepath=result_file_path_22, key="binout") .. tab-item:: Fluent **a) `.flprj` result file** - You create the |DataSources| object by defining the the path to the main result file. + Create the |DataSources| object and give the path to the result file to the *'result_path'* argument. .. jupyter-execute:: # Create the DataSources object - my_data_sources_31 = dpf.DataSources(result_path=result_file_path_31) + # Use the ``result_path`` argument and give the result file path + ds_31 = dpf.DataSources(result_path=result_file_path_31) **b) `.cas.h5`, `.dat.h5` result files** - Here we have a main and an additional result files. Thus, we use the - |set_result_file_path| method, to correctly implement the result file to the |DataSources| by giving - explicitly the first extension key as an argument, and the |add_file_path| method, to add the additional - result file. + Here, we have a main and an additional result file with two extensions keys. + + Thus, you must use the |set_result_file_path| and the |add_file_path| methods to add the main and + additional result file to the |DataSources| object and explicitly give the *first* extension key to + their *'key'* argument. .. jupyter-execute:: # Create the DataSources object - my_data_sources_32 = dpf.DataSources() + ds_32 = dpf.DataSources() + # Define the path to the main result file - my_data_sources_32.set_result_file_path(filepath=result_file_path_32['cas'][0], key="cas") - # Add the additional result file to the DataSources - my_data_sources_32.add_file_path(filepath=result_file_path_32['dat'][0], key="dat") + # Use the ``key`` argument and give the first extension key + ds_32.set_result_file_path(filepath=result_file_path_32['cas'][0], key="cas") + + # Add the additional result file path to the DataSources + # Use the ``key`` argument and give the first extension key + ds_32.add_file_path(filepath=result_file_path_32['dat'][0], key="dat") .. tab-item:: CFX **a) `.res` result file** - You create the |DataSources| object by defining the the path to the main result file. + Create the |DataSources| object and give the path to the result file to the *'result_path'* argument. .. jupyter-execute:: # Create the DataSources object - my_data_sources_41 = dpf.DataSources(result_path=result_file_path_41) + # Use the ``result_path`` argument and give the result file path + ds_41 = dpf.DataSources(result_path=result_file_path_41) **b) `.cas.cff`, `.dat.cff` result files** - Here we have a main and an additional result files. Thus, we use the - |set_result_file_path| method, to correctly implement the result file to the |DataSources| by giving - explicitly the first extension key as an argument, and the |add_file_path| method, to add the additional - result file. + Here, we have a main and an additional result file with two extensions keys. + + Thus, you must use the |set_result_file_path| and the |add_file_path| methods to add the main and + additional result file to the |DataSources| object. Also, you must explicitly give the *first* extension keys to + the *'key'* argument. .. jupyter-execute:: # Create the DataSources object - my_data_sources_42 = dpf.DataSources() + ds_42 = dpf.DataSources() + # Define the path to the main result file - my_data_sources_42.set_result_file_path(filepath=result_file_path_42["cas"], key="cas") - # Add the additional result file to the DataSources - my_data_sources_42.add_file_path(filepath=result_file_path_42["dat"], key="dat") + # Use the ``key`` argument and give the first extension key + ds_42.set_result_file_path(filepath=result_file_path_42["cas"], key="cas") + + # Add the additional result file path to the DataSources + # Use the ``key`` argument and give the first extension key + ds_42.add_file_path(filepath=result_file_path_42["dat"], key="dat") + +.. _ref_import_result_file_model: Use a |Model| ------------- -The |Model| is a helper designed to give shortcuts to the user to access the analysis results -metadata, by opening a DataSources or a Streams, and to instanciate results provider for it. +The |Model| is a helper designed to give shortcuts to access the analysis results +metadata and to instanciate results providers by opening a |DataSources| or a Streams. + +To create a |Model| you can provide to the *'data_sources'* argument.: -To create a |Model| you can provide the result file path, in the case you are working with a single result -file with an explicit extension key, or a |DataSources| as an argument. +- The result file path, in the case you are working with a single result file that has an explicit extension key; +- A |DataSources| object. .. tab-set:: @@ -252,17 +288,17 @@ file with an explicit extension key, or a |DataSources| as an argument. .. jupyter-execute:: # Create the model with the result file path - my_model_11 = dpf.Model(data_sources=result_file_path_11) + model_11 = dpf.Model(data_sources=result_file_path_11) - # Create the model with the DataSources - my_model_12 = dpf.Model(data_sources=my_data_sources_11) + # Create the model with the DataSources object + model_12 = dpf.Model(data_sources=ds_11) **b) `.mode`, `.rfrq` and `.rst` result files** .. jupyter-execute:: - # Create the model with the DataSources - my_model_13 = dpf.Model(data_sources=my_data_sources_12) + # Create the model with the DataSources object + model_13 = dpf.Model(data_sources=ds_12) .. tab-item:: LSDYNA @@ -270,15 +306,15 @@ file with an explicit extension key, or a |DataSources| as an argument. .. jupyter-execute:: - # Create the model with the DataSources - my_model_21 = dpf.Model(data_sources=my_data_sources_21) + # Create the model with the DataSources object + model_21 = dpf.Model(data_sources=ds_21) **b) `.binout` result file** .. jupyter-execute:: - # Create the model with the DataSources - my_model_22 = dpf.Model(data_sources=my_data_sources_22) + # Create the model with the DataSources object + model_22 = dpf.Model(data_sources=ds_22) .. tab-item:: Fluent @@ -287,36 +323,34 @@ file with an explicit extension key, or a |DataSources| as an argument. .. jupyter-execute:: # Create the model with the result file path - my_model_31 = dpf.Model(data_sources=result_file_path_31) + model_31 = dpf.Model(data_sources=result_file_path_31) - # Create the model with the DataSources - my_model_32 = dpf.Model(data_sources=my_data_sources_31) + # Create the model with the DataSources object + model_32 = dpf.Model(data_sources=ds_31) **b) `.cas.h5`, `.dat.h5` result files** .. jupyter-execute:: - # Create the model with the DataSources - my_model_33 = dpf.Model(data_sources=my_data_sources_32) + # Create the model with the DataSources object + model_33 = dpf.Model(data_sources=ds_32) .. tab-item:: CFX - .. jupyter-execute:: - **a) `.res` result file** .. jupyter-execute:: # Create the model with the result file path - my_model_41 = dpf.Model(data_sources=result_file_path_41) + model_41 = dpf.Model(data_sources=result_file_path_41) - # Create the model with the DataSources - my_model_42 = dpf.Model(data_sources=my_data_sources_41) + # Create the model with the DataSources object + model_42 = dpf.Model(data_sources=ds_41) **b) `.cas.cff`, `.dat.cff` result files** .. jupyter-execute:: - # Create the model with the DataSources - my_model_43 = dpf.Model(data_sources=my_data_sources_42) + # Create the model with the DataSources object + model_43 = dpf.Model(data_sources=ds_42) From b81a7e45179aa8ec3aa8528aa499f654b425db23 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 6 Dec 2024 16:15:21 +0100 Subject: [PATCH 19/28] update the extract_and_explore_results_metadata.rst to the tutorials guidelines --- .../extract_and_explore_results_metadata.rst | 185 ++++++++++-------- 1 file changed, 99 insertions(+), 86 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index bfcbea2ca4..9b655adb5f 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -4,141 +4,154 @@ Extract and explore results metadata ==================================== -.. |Field| replace:: :class:`Field` -.. |Examples| replace:: :mod:`Examples` +.. include:: ../../../links_and_refs.rst .. |ResultInfo| replace:: :class:`ResultInfo` -You can explore the general results metadata before extracting them by using -the |ResultInfo| object. This metadata includes: - -- Analysis type; -- Physics type; -- Number of results; -- Unit system; -- Solver version, date and time; -- Job name; - -When you extract a result from a result file DPF stores it in a |Field|. -This |Field| will then contain the metadata for the result associated with it. -This metadata includes: - -- Location; -- Scoping; -- Shape of the data stored; -- Number of components; -- Units of the data. - This tutorial shows how to extract and explore results metadata from a result file. +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + Get the result file ------------------- -Here we will download a result file available in our |Examples| package. -For more information about how to import your result file in DPF check -the :ref:`ref_tutorials_import_result_file` tutorial. +First, import a result file. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_result_file` +tutorial. .. jupyter-execute:: - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + # Import the ``ansys.dpf.core`` module from ansys.dpf import core as dpf + # Import the examples module from ansys.dpf.core import examples + # Import the operators module from ansys.dpf.core import operators as ops - # Define the result file + # Define the result file path result_file_path_1 = examples.download_transient_result() # Create the model - my_model_1 = dpf.Model(data_sources=result_file_path_1) + model_1 = dpf.Model(data_sources=result_file_path_1) -Explore the general results metadata +Explore the results general metadata ------------------------------------ -Get the |ResultInfo| object from the model and then explore it using this class methods. +You can explore the general results metadata, before extracting the results, by using +the |ResultInfo| object and its methods. This metadata includes: + +- Analysis type; +- Physics type; +- Number of results; +- Unit system; +- Solver version, date and time; +- Job name; .. jupyter-execute:: # Define the ResultInfo object - my_result_info_1 = my_model_1.metadata.result_info + result_info_1 = model_1.metadata.result_info # Get the analysis type - my_analysis_type = my_result_info_1.analysis_type - print("Analysis type: ",my_analysis_type, "\n") + analysis_type = result_info_1.analysis_type + # Print the analysis type + print("Analysis type: ",analysis_type, "\n") # Get the physics type - my_physics_type = my_result_info_1.physics_type - print("Physics type: ",my_physics_type, "\n") + physics_type = result_info_1.physics_type + # Print the physics type + print("Physics type: ",physics_type, "\n") # Get the number of available results - number_of_results = my_result_info_1.n_results + number_of_results = result_info_1.n_results + # Print the number of available results print("Number of available results: ",number_of_results, "\n") # Get the unit system - my_unit_system = my_result_info_1.unit_system - print("Unit system: ",my_unit_system, "\n") + unit_system = result_info_1.unit_system + # Print the unit system + print("Unit system: ",unit_system, "\n") # Get the solver version, data and time - my_solver_version = my_result_info_1.solver_version - print("Solver version: ",my_solver_version, "\n") - - my_solver_date = my_result_info_1.solver_date - print("Solver date: ", my_solver_date, "\n") + solver_version = result_info_1.solver_version + solver_date = result_info_1.solver_date + solver_time = result_info_1.solver_time - my_solver_time = my_result_info_1.solver_time - print("Solver time: ",my_solver_time, "\n") + # Print the solver version, data and time + print("Solver version: ",solver_version, "\n") + print("Solver date: ", solver_date, "\n") + print("Solver time: ",solver_time, "\n") # Get the job name - my_job_name = my_result_info_1.job_name - print("Job name: ",my_job_name, "\n") + job_name = result_info_1.job_name + # Print the job name + print("Job name: ",job_name, "\n") -Explore a given result metadata -------------------------------- +Explore a result metadata +------------------------- +When you extract a result from a result file DPF stores it in a |Field|. +Thus, this |Field| contains the metadata for the result associated with it. +This metadata includes: + +- Location; +- Scoping (type and quantity of entities); +- Elementary data count (number of entities, how many data vectors we have); +- Components count (vectors dimension, here we have a displacement so we expect to have 3 components (X, Y and Z)); +- Shape of the data stored (tuple with the elementary data count and the components count); +- Fields size (length of the data entire vector (equal to the number of elementary data times the number of components)); +- Units of the data. Here we will explore the metadata of the displacement results. -Start by extracting the displacement results: +Start by extracting the displacement results. .. jupyter-execute:: # Extract the displacement results - disp_results = my_model_1.results.displacement.eval() + disp_results = model_1.results.displacement.eval() # Get the displacement field - my_disp_field = disp_results[0] + disp_field = disp_results[0] Explore the displacement results metadata: .. jupyter-execute:: - # Location of the displacement data - my_location = my_disp_field.location - print("Location: ", my_location,'\n') - - # Displacement field scoping - my_scoping = my_disp_field.scoping # type and quantity of entities - print("Scoping: ", '\n',my_scoping, '\n') - - my_scoping_ids = my_disp_field.scoping.ids # Available entities ids - print("Scoping ids: ", my_scoping_ids, '\n') - - # Elementary data count - # Number of entities (how many data vectors we have) - my_elementary_data_count = my_disp_field.elementary_data_count - print("Elementary data count: ", my_elementary_data_count, '\n') - - # Components count - # Vectors dimension, here we have a displacement so we expect to have 3 components (X, Y and Z) - my_components_count = my_disp_field.component_count - print("Components count: ", my_components_count, '\n') - - # Size - # Length of the data entire vector (equal to the number of elementary data times the number of components) - my_field_size = my_disp_field.size - print("Size: ", my_field_size, '\n') - - # Fields shape - # Gives a tuple with the elementary data count and the components count - my_shape = my_disp_field.shape - print("Shape: ", my_shape, '\n') - - # Units - my_unit = my_disp_field.unit - print("Unit: ", my_unit, '\n') + # Get the location of the displacement data + location = disp_field.location + # Print the location + print("Location: ", location,'\n') + + # Get the displacement Field scoping + scoping = disp_field.scoping + # Print the Field scoping + print("Scoping: ", '\n',scoping, '\n') + + # Get the displacement Field scoping ids + scoping_ids = disp_field.scoping.ids # Available entities ids + # Print the Field scoping ids + print("Scoping ids: ", scoping_ids, '\n') + + # Get the displacement Field elementary data count + elementary_data_count = disp_field.elementary_data_count + # Print the elementary data count + print("Elementary data count: ", elementary_data_count, '\n') + + # Get the displacement Field components count + components_count = disp_field.component_count + # Print the components count + print("Components count: ", components_count, '\n') + + # Get the displacement Field size + field_size = disp_field.size + # Print the Field size + print("Size: ", field_size, '\n') + + # Get the displacement Field shape + shape = disp_field.shape + # Print the Field shape + print("Shape: ", shape, '\n') + + # Get the displacement Field unit + unit = disp_field.unit + # Print the displacement Field unit + print("Unit: ", unit, '\n') \ No newline at end of file From eb67b5745600f7d1d1622497aa42771175ab6de8 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 6 Dec 2024 16:15:35 +0100 Subject: [PATCH 20/28] update the extract_and_explore_results_data.rst to the tutorials guidelines --- .../extract_and_explore_results_data.rst | 141 ++++++++++-------- 1 file changed, 81 insertions(+), 60 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst index f936bf03a1..d88bb01a9d 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst @@ -4,63 +4,68 @@ Extract and explore results data ================================ -.. |Field| replace:: :class:`Field` -.. |Examples| replace:: :mod:`Examples` -.. |Result| replace:: :class:`Result ` -.. |FieldsContainer| replace:: :class:`FieldsContainer` +.. include:: ../../../links_and_refs.rst .. |get_entity_data| replace:: :func:`get_entity_data()` .. |get_entity_data_by_id| replace:: :func:`get_entity_data_by_id()` This tutorial shows how to extract and explore results data from a result file. When you extract a result from a result file DPF stores it in a |Field|. -This |Field| will contain the data of the result associated with it. +Thus, this |Field| contains the data of the result associated with it. -When DPF-Core returns the |Field| object, what Python actually has is a client-side -representation of the |Field|, not the entirety of the |Field| itself. This means -that all the data of the field is stored within the DPF service. This is important -because when building your workflows, the most efficient way of interacting with result data -is to minimize the exchange of data between Python and DPF, either by using operators -or by accessing exclusively the data that is needed. +.. note:: -The |Field| data is ordered with respect to its scoping ids (check the :ref:`reft_tutorials_narrow_down_data` -tutorial for more information on scoping manipulations). + When DPF-Core returns the |Field| object, what Python actually has is a client-side + representation of the |Field|, not the entirety of the |Field| itself. This means + that all the data of the field is stored within the DPF service. This is important + because when building your workflows, the most efficient way of interacting with result data + is to minimize the exchange of data between Python and DPF, either by using operators + or by accessing exclusively the data that is needed. -Get the results ---------------- +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` -Here we will download a result file available in our |Examples| package. -For more information about how to import your result file in DPF check -the :ref:`ref_tutorials_import_result_file` tutorial. +Get the result file +------------------- -Here we extract the displacement results. The displacement |Result| object gives a |FieldsContainer| when evaluated. -Thus, we will get a |Field| from this |FieldsContainer|. +First, import a result file. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_result_file` +tutorial. + +Here, we extract the displacement results. The displacement |Result| object gives a |FieldsContainer| when evaluated. +Thus, we get a |Field| from this |FieldsContainer|. .. jupyter-execute:: - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + # Import the ``ansys.dpf.core`` module from ansys.dpf import core as dpf + # Import the examples module from ansys.dpf.core import examples + # Import the operators module from ansys.dpf.core import operators as ops - # Define the result file + # Define the result file path result_file_path_1 = examples.download_transient_result() # Create the model - my_model_1 = dpf.Model(data_sources=result_file_path_1) + model_1 = dpf.Model(data_sources=result_file_path_1) # Extract the displacement results for the last time step - disp_results = my_model_1.results.displacement.on_last_time_freq.eval() + disp_results = model_1.results.displacement.on_last_time_freq.eval() # Get the displacement field for the last time step - my_disp_field = disp_results[0] + disp_field = disp_results[0] + + # Print the displacement Field + print(disp_field) - print(my_disp_field) +Extract all the data from a |Field| +----------------------------------- -Extract all data from a field ------------------------------ +You can extract the entire data in a |Field| as: -You can extract the the entire data in the |Field| as an array (numpy array) or as a list. +- An array (numpy array); +- A list. Data as an array ^^^^^^^^^^^^^^^^ @@ -68,14 +73,17 @@ Data as an array .. jupyter-execute:: # Get the displacement data as an array - my_data_array = my_disp_field.data - print("Displacement data as an array: ", '\n', my_data_array) + data_array = disp_field.data + + # Print the data as an array + print("Displacement data as an array: ", '\n', data_array) Note that this array is a genuine, local, numpy array (overloaded by the DPFArray): .. jupyter-execute:: - print("Array type: ", type(my_data_array)) + # Print the array type + print("Array type: ", type(data_array)) Data as a list ^^^^^^^^^^^^^^ @@ -83,61 +91,74 @@ Data as a list .. jupyter-execute:: # Get the displacement data as a list - my_data_list = my_disp_field.data_as_list - print("Displacement data as a list: ", '\n', my_data_list) + data_list = disp_field.data_as_list + # Print the data as a list + print("Displacement data as a list: ", '\n', data_list) Extract specific data from a field ---------------------------------- -If you need to access data for specific entities (node, element ...), you can extract it -based on its index (data position on the |Field| by using the |get_entity_data| method), or based -on the entities id (by using the |get_entity_data_by_id| method). +If you need to access data for specific entities (node, element ...), you can extract it with two approaches: -Get the data by the entity index -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +- :ref:`Based on its index ` (data position on the |Field|) by using the |get_entity_data| method; +- :ref:`Based on the entities id ` by using the |get_entity_data_by_id| method. + +The |Field| data is organized with respect to its scoping ids. Note that the element with id=533 +would correspond to an index=2 within the |Field|. .. jupyter-execute:: - # Get the data from the third entity in the field - data_3_entity = my_disp_field.get_entity_data(index=3) - print("Data entity index=3: ", data_3_entity) + # Get the index of the entity with id=533 + index_533_entity = disp_field.scoping.index(id=533) + # Print the index + print("Index entity id=533: ",index_533_entity) -Get the data by the entity ind -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Be aware that scoping IDs are not sequential. You would get the id of the element in the 533 +position of the |Field| with: .. jupyter-execute:: - # Get the data from the entity with id=533 - data_533_entity = my_disp_field.get_entity_data_by_id(id=533) - print("Data entity id=533: ", data_533_entity) + # Get the id of the entity with index=533 + id_533_entity = disp_field.scoping.id(index=533) + print("Id entity index=533: ",id_533_entity) + +.. _ref_extract_specific_data_by_index: -Note that the element with id=533 would correspond to an index=2 within the |Field|. +Get the data by the entity index +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. jupyter-execute:: - # Get the index of the entity with id=533 - index_533_entity = my_disp_field.scoping.index(id=533) - print("Index entity id=533: ",index_533_entity) + # Get the data from the third entity in the field + data_3_entity = disp_field.get_entity_data(index=3) + # Print the data + print("Data entity index=3: ", data_3_entity) -Be aware that scoping IDs are not sequential. You would get the id of the element in the 533 -position of the |Field| with: +.. _ref_extract_specific_data_by_id: + +Get the data by the entity id +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. jupyter-execute:: - # Get the id of the entity with index=533 - id_533_entity = my_disp_field.scoping.id(index=533) - print("Id entity index=533: ",id_533_entity) + # Get the data from the entity with id=533 + data_533_entity = disp_field.get_entity_data_by_id(id=533) + # Print the data + print("Data entity id=533: ", data_533_entity) +Extract specific data from a field using a loop over the array +-------------------------------------------------------------- -While these methods are acceptable when requesting data for a few elements +While the methods above are acceptable when requesting data for a few elements or nodes, they should not be used when looping over the entire array. For efficiency, -a |Field|s data can be recovered locally before sending a large number of requests: +a |Field| data can be recovered locally before sending a large number of requests: .. jupyter-execute:: # Create a deep copy of the field that can be accessed and modified locally. - with my_disp_field.as_local_field() as f: - for i in my_disp_field.scoping.ids[2:50]: + with disp_field.as_local_field() as f: + for i in disp_field.scoping.ids[2:50]: f.get_entity_data_by_id(i) + # Print the field print(f) \ No newline at end of file From b51ee8c41411540b80252e34f671989e20c86022 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 6 Dec 2024 16:16:12 +0100 Subject: [PATCH 21/28] update the index.rst to the tutorials guidelines --- doc/source/user_guide/tutorials/import_data/index.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst index c7ead0bdde..ef9e979252 100644 --- a/doc/source/user_guide/tutorials/import_data/index.rst +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -15,12 +15,12 @@ From user input :padding: 2 :margin: 2 - .. grid-item-card:: Manual input data on DPF - :link: ref_tutorials_represent_data_on_dpf + .. grid-item-card:: Use custom data + :link: ref_tutorials_field_with_custom_data :link-type: ref :text-align: center - Learn how to represent your manual input data in a DPF data storage structures + Learn how to build DPF data storage structures from custom data. From result files ***************** @@ -65,7 +65,7 @@ From result files :maxdepth: 2 :hidden: - represent_data_on_dpf.rst + field_with_custom_data.rst import_result_file.rst extract_and_explore_results_metadata.rst extract_and_explore_results_data.rst From 2e152ee4bbf1a068985a8416881cbcc613af2ac5 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 6 Dec 2024 16:46:48 +0100 Subject: [PATCH 22/28] update the narrow_down_data.rst to the tutorials guidelines --- .../import_data/narrow_down_data.rst | 308 +++++++++++------- 1 file changed, 184 insertions(+), 124 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst index 73f4f0ce1a..d29611d79b 100644 --- a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -4,31 +4,34 @@ Narrow down data ================ -.. |Field| replace:: :class:`Field` -.. |FieldsContainer| replace:: :class:`FieldsContainer` -.. |Scoping| replace:: :class:`Scoping` -.. |MeshedRegion| replace:: :class:`MeshedRegion ` +.. include:: ../../../links_and_refs.rst +.. |location| replace:: :class:`location` .. |time_freq_scoping_factory| replace:: :mod:`time_freq_scoping_factory` .. |mesh_scoping_factory| replace:: :mod:`mesh_scoping_factory` -.. |Model| replace:: :class:`Model ` .. |displacement| replace:: :class:`result.displacement ` .. |Model.results| replace:: :func:`Model.results ` -.. |Examples| replace:: :mod:`Examples` .. |result op| replace:: :mod:`result` -.. |Result| replace:: :class:`Result ` .. |rescope| replace:: :class:`rescope ` .. |from_mesh| replace:: :class:`from_mesh ` .. |extract_scoping| replace:: :class:`extract_scoping ` +.. |scoping_by_sets| replace:: :func:`scoping_by_sets() ` +.. |nodal_scoping| replace:: :func:`nodal_scoping() ` +.. |ScopingsContainer| replace:: :class:`ScopingsContainer ` -To begin the workflow set up, you need to establish the ``scoping``, that is -a spatial and/or temporal subset of the simulation data. This tutorial explains -how to scope your results over time and mesh domains. +This tutorial explains how to scope your results over time and mesh domains. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` -Understanding a scope ---------------------- +Understanding the scope +----------------------- + +To begin the workflow set up, you need to establish the ``scoping``, that is +a spatial and/or temporal subset of the simulation data. The data in DPF is represented by a |Field|. Thus, narrow down your results means scoping your |Field|. -To do so in DPF you use the |Scoping| object. +To do so in DPF, you use the |Scoping| object. You can retrieve all the time steps available for +a result, but you can also filter them. .. note:: @@ -37,190 +40,247 @@ To do so in DPF you use the |Scoping| object. that all the data of the field is stored within the DPF service. This is important because when building your workflows, the most efficient way of interacting with result data is to minimize the exchange of data between Python and DPF, either by using operators - or by accessing exclusively the data that is needed. - -For more information on the DPF data storage structures see :ref:`ref_tutorials_data_structures`. + or by accessing exclusively the data that is needed. For more information on the DPF data storage + structures see :ref:`ref_tutorials_data_structures`. -The |Field| scoping also defines how the data is ordered, for example: the first -ID in the scoping identifies to which entity the first data entity belongs. - -In conclusion, the essence of the scoping is to specify the set of time or mesh entities by defining a range of IDs: +In conclusion, the essence of a scoping is to specify a set of time or mesh entities by defining a range of IDs: .. image:: ../../../images/drawings/scoping-eg.png :align: center -Create a |Scoping| ------------------- +Create a |Scoping| object from scratch +-------------------------------------- The |Scoping| object can be created by: -- Instantiating the |Scoping| class (giving the location and the entities ids as arguments) -- Using a scoping factory (|time_freq_scoping_factory| methods for a temporal scoping - and |mesh_scoping_factory| for spatial scoping). +- :ref:`Instantiating the Scoping class`; +- :ref:`Using the scoping factory `. .. jupyter-execute:: # Import the ``ansys.dpf.core`` module from ansys.dpf import core as dpf -Time scoping -^^^^^^^^^^^^ +.. _ref_create_scoping_instance_object: + +Instanciate a |Scoping| +^^^^^^^^^^^^^^^^^^^^^^^ + +Create a time and a mesh scoping by instantiating the |Scoping| object. Use the *'ids'* and *'location'* arguments +and give the entities ids and |location| of interest. + + +- Time scoping + +A time location in DPF is a |TimeFreqSupport| object. Thus, we chose a *'time_freq'* |location| and target +a set of time by their ids. + +.. jupyter-execute:: + + + # Define a time list that targets the times ids 14, 15, 16, 17 + time_list_1 = [14, 15, 16, 17] + + # Create the time Scoping object + time_scoping_1 = dpf.Scoping(ids=time_list_1, location=dpf.locations.time_freq) + +- Mesh scoping + +Here, we chose a nodal |location| and target a set of nodes by their ids. .. jupyter-execute:: - # 1) Using the Scoping class - # a. Define a time list that targets the times ids 14, 15, 16, 17 - my_time_list_1 = [14, 15, 16, 17] - # b. Create the time scoping object - my_time_scoping_1 = dpf.Scoping(ids=my_time_list_1, location=dpf.locations.time_freq) + # Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 + nodes_ids_1 = [103, 204, 334, 1802] + + # Create the mesh Scoping object + mesh_scoping_1 = dpf.Scoping(ids=nodes_ids_1, location=dpf.locations.nodal) + +.. _ref_create_scoping_scoping_factory: + +Use the scoping factory module +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Create a |Scoping| object by using the |time_freq_scoping_factory| module for a temporal scoping +and the |mesh_scoping_factory| module for a spatial scoping. - # 2) Using the time_freq_scoping_factory class - # a. Define a time list that targets the times ids 14, 15, 16, 17 - my_time_list_2 = [14, 15, 16, 17] - # b. Create the time scoping object - my_time_scoping_2 = dpf.time_freq_scoping_factory.scoping_by_sets(cumulative_sets=my_time_list_2) -Mesh scoping -^^^^^^^^^^^^ +- Time scoping + +Here, we use the |scoping_by_sets| function so we can have different time steps in the scoping. This function +gives a Scoping on a *'time_freq'* |location|. .. jupyter-execute:: - # 1) Using the Scoping class in a nodal location - # a. Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 - my_nodes_ids_1 = [103, 204, 334, 1802] - # b. Create the mesh scoping object - my_mesh_scoping_1 = dpf.Scoping(ids=my_nodes_ids_1, location=dpf.locations.nodal) + # Define a time list that targets the times ids 14, 15, 16, 17 + time_list_2 = [14, 15, 16, 17] + + # Create the time Scoping object + time_scoping_2 = dpf.time_freq_scoping_factory.scoping_by_sets(cumulative_sets=time_list_2) - # 2) Using the mesh_scoping_factory class - # a. Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 - my_nodes_ids_2 = [103, 204, 334, 1802] - # b. Create the mesh scoping object - my_mesh_scoping_2 = dpf.mesh_scoping_factory.nodal_scoping(node_ids=my_nodes_ids_2) +- Mesh scoping + +Here, we use the |nodal_scoping| function so we have a mesh scoping in a nodal |location|. + +.. jupyter-execute:: + + # Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 + nodes_ids_2 = [103, 204, 334, 1802] + + # Create the mesh Scoping object + mesh_scoping_2 = dpf.mesh_scoping_factory.nodal_scoping(node_ids=nodes_ids_2) Extract a |Scoping| ------------------- -A mesh |Scoping| can be extracted from: +You can extract a mesh |Scoping| from some DPF objects. They are: + +- A |MeshedRegion|; +- A |FieldsContainer| ; +- A |Field|. -- A |MeshedRegion| with the |from_mesh| operator; -- A |FieldsContainer| with the |extract_scoping| operator; -- A |Field| with the |extract_scoping| operator. +Define the objects +^^^^^^^^^^^^^^^^^^ -Get the results file -^^^^^^^^^^^^^^^^^^^^ +First, import a result file. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_result_file` +tutorial. -Here we will download a result file available in our |Examples| package. -For more information about how to import your result file in DPF check -the :ref:`ref_tutorials_import_result_file` tutorial. +From this result file we extract: + +- The mesh, once in DPF a mesh is a |MeshedRegion| object; +- The displacement results. The displacement |Result| object gives a |FieldsContainer| when evaluated. Additionally, + we can get a |Field| from this |FieldsContainer|. .. jupyter-execute:: - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + # Import the ``ansys.dpf.core`` module from ansys.dpf import core as dpf + # Import the examples module from ansys.dpf.core import examples + # Import the operators module from ansys.dpf.core import operators as ops - # Define the result file + # Define the result file path result_file_path_1 = examples.download_transient_result() + # Create the DataSources object + ds_1 = dpf.DataSources(result_path=result_file_path_1) # Create the model - my_model_1 = dpf.Model(data_sources=result_file_path_1) - # Get the meshed region - my_meshed_region_1 = my_model_1.metadata.meshed_region - # Get a FieldsContainer - my_fc = my_model_1.results.displacement.on_all_time_freqs.eval() - # Get a Field - my_field = my_fc[0] + model_1 = dpf.Model(data_sources=ds_1) + + # Get the MeshedRegion + meshed_region_1 = model_1.metadata.meshed_region -Extract the |Scoping| -^^^^^^^^^^^^^^^^^^^^^ + # Get a FieldsContainer with the displacement results + fc = model_1.results.displacement.on_all_time_freqs.eval() + + # Get a Field from the FieldsContainer + field = fc[0] + +Extract the mesh |Scoping| +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- From the |MeshedRegion| +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Extract the mesh scoping from the |MeshedRegion| using the |from_mesh| operator. .. jupyter-execute:: - # 3) Extract the scoping from a mesh - my_mesh_scoping_3 = ops.scoping.from_mesh(mesh=my_meshed_region_1).eval() - print("Scoping from mesh", "\n", my_mesh_scoping_3, "\n") + # Extract the mesh scoping + mesh_scoping_3 = ops.scoping.from_mesh(mesh=meshed_region_1).eval() - # 4) Extract the scoping from a FieldsContainer - extract_scop_fc_op = ops.utility.extract_scoping(field_or_fields_container=my_fc) - my_mesh_scoping_4 = extract_scop_fc_op.outputs.mesh_scoping_as_scopings_container() - print("Scoping from FieldsContainer", "\n", my_mesh_scoping_4, "\n") + # Print the mesh Scoping + print("Scoping from mesh", "\n", mesh_scoping_3, "\n") - # 5) Extract the scoping from a Field - my_mesh_scoping_5 = ops.utility.extract_scoping(field_or_fields_container=my_field).eval() - print("Scoping from Field", "\n", my_mesh_scoping_5, "\n") +- From the |FieldsContainer| +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Use a |Scoping| ---------------- +Extract the mesh Scoping from the |FieldsContainer| using the |extract_scoping| operator. This operator gets the mesh +Scoping for each |Field| in the |FieldsContainer|. Thus, you must specify the output as a |ScopingsContainer|. -The |Scoping| object can be used : +.. jupyter-execute:: + + # Define the extract_scoping operator + extract_scop_fc_op = ops.utility.extract_scoping(field_or_fields_container=fc) -- As an input to a |result op| operator; -- As an |Result| argument when you extract results using the |Model.results| method; -- With the |Result| object methods. + # Get the mesh Scopings from the operators output + mesh_scoping_4 = extract_scop_fc_op.outputs.mesh_scoping_as_scopings_container() -The mesh scoping can also be changed after the result extraction or manipulation by using the -|rescope| operator with a |Field| or |FieldsContainer|. + # Print the mesh Scopings + print("Scoping from FieldsContainer", "\n", mesh_scoping_4, "\n") -Get the results file -^^^^^^^^^^^^^^^^^^^^ +- From the |Field| +~~~~~~~~~~~~~~~~~~~ -Here we will download a result file available in our |Examples| package. -For more information about how to import your result file in DPF check -the :ref:`ref_tutorials_import_result_file` tutorial. +Extract the mesh scoping from the |Field| using the |extract_scoping| operator. .. jupyter-execute:: - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage - from ansys.dpf import core as dpf - from ansys.dpf.core import examples - from ansys.dpf.core import operators as ops + # Extract the mesh scoping + mesh_scoping_5 = ops.utility.extract_scoping(field_or_fields_container=field).eval() - # Define the result file - result_file_path_1 = examples.download_transient_result() - # Create the DataSources object - my_data_sources_1 = dpf.DataSources(result_path=result_file_path_1) - # Create the model - my_model_1 = dpf.Model(data_sources=my_data_sources_1) + # Print the mesh Scoping + print("Scoping from Field", "\n", mesh_scoping_5, "\n") + + +Use a |Scoping| +--------------- + +The |Scoping| object can be used : + +- :ref:`When extracting a result`; +- :ref:`After extracting a result`. + +.. _ref_use_scoping_when_extracting: Extract and scope the results ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Here we extract and scope the displacement results. +You can extract and scope a result using the |Model.results| method or the |result op| operator inputs. +Those two approaches handle |Result| objects. Thus, to scope the results when extracting them you use +the *'time_scoping'* and *'mesh_scoping'* arguments and give the Scopings of interest. -.. jupyter-execute:: +Here, we extract and scope the displacement results. - # 1) Using the result.displacement operator - disp_op = ops.result.displacement(data_sources=my_data_sources_1, - time_scoping=my_time_scoping_1, - mesh_scoping=my_mesh_scoping_1).eval() +.. jupyter-execute:: - # 2) Using the Model.results - disp_model = my_model_1.results.displacement(time_scoping=my_time_scoping_1, mesh_scoping=my_mesh_scoping_1).eval() + # Extract and scope the result using the Model.results method + disp_model = model_1.results.displacement(time_scoping=time_scoping_1, mesh_scoping=mesh_scoping_1).eval() - # 3) Using a Result object method - disp_result_method_1 = my_model_1.results.displacement.on_time_scoping(time_scoping=my_time_scoping_1).on_mesh_scoping(mesh_scoping=my_mesh_scoping_1).eval() - disp_result_method_2 = my_model_1.results.displacement.on_first_time_freq.eval() + # Extract and scope the results using the result.displacement operator + disp_op = ops.result.displacement(data_sources=ds_1, time_scoping=time_scoping_1, mesh_scoping=mesh_scoping_1).eval() - print("Displacement from result.displacement operator", "\n", disp_op, "\n") + # Print the displacement results print("Displacement from Model.results ", "\n", disp_model, "\n") - print("Scoping from Result object method 1", "\n", disp_result_method_1, "\n") - print("Scoping from Result object method 1", "\n", disp_result_method_2, "\n") + print("Displacement from result.displacement operator", "\n", disp_op, "\n") + +.. _ref_use_scoping_after_extracting: Extract and rescope the results -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Here we rescope the displacement results. +The mesh scoping can be changed after the result extraction or manipulation by using the +|rescope| operator. It takes a |Field| or |FieldsContainer| that contains the results data +and rescope them. + +Here, we rescope the displacement results. .. jupyter-execute:: - # 1) Extract the results for the entire mesh - disp_all_mesh = my_model_1.results.displacement.eval() + # Extract the results for the entire mesh + disp_all_mesh = model_1.results.displacement.eval() + + # Rescope the displacement results to get the data only for a specific set of nodes + disp_rescope = ops.scoping.rescope(fields=disp_all_mesh, mesh_scoping=mesh_scoping_1).eval() + + # Print the displacement results for the entire mesh + print("Displacement results for the entire mesh", "\n", disp_all_mesh, "\n") + + # Print the displacement results for the specific set of nodes + print("Displacement results rescoped ", "\n", disp_rescope, "\n") - # 2) Rescope the displacement results - disp_rescope = ops.scoping.rescope(fields=disp_all_mesh, mesh_scoping=my_mesh_scoping_1).eval() - print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") - print("Displacement rescoped ", "\n", disp_rescope, "\n") From 9c18913f1ebb75047810bf5947aa6fb063d49716 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Wed, 11 Dec 2024 11:40:59 +0100 Subject: [PATCH 23/28] update the load_custom_data.rst to the tutorials guidelines --- .../import_data/load_custom_data.rst | 669 ++++++++++++++++++ .../import_data/represent_data_on_dpf.rst | 103 --- 2 files changed, 669 insertions(+), 103 deletions(-) create mode 100644 doc/source/user_guide/tutorials/import_data/load_custom_data.rst delete mode 100644 doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst diff --git a/doc/source/user_guide/tutorials/import_data/load_custom_data.rst b/doc/source/user_guide/tutorials/import_data/load_custom_data.rst new file mode 100644 index 0000000000..fb84c145ec --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/load_custom_data.rst @@ -0,0 +1,669 @@ +.. _ref_tutorials_load_custom_data: + +======================= +Load custom data in DPF +======================= + +.. include:: ../../../links_and_refs.rst +.. |Field.append| replace:: :func:`append()` +.. |Field.data| replace:: :attr:`Field.data` +.. |fields_factory| replace:: :mod:`fields_factory` +.. |fields_container_factory| replace:: :mod:`fields_container_factory` +.. |location| replace:: :class:`location` +.. |nature| replace:: :class:`nature` +.. |dimensionality| replace:: :class:`dimensionality` +.. |Field.dimensionality| replace:: :func:`Field.dimensionality` +.. |Field.location| replace:: :func:`Field.location` +.. |Field.scoping| replace:: :func:`Field.scoping` +.. |field_from_array| replace:: :func:`field_from_array()` +.. |create_scalar_field| replace:: :func:`create_scalar_field()` +.. |create_vector_field| replace:: :func:`create_vector_field()` +.. |create_3d_vector_field| replace:: :func:`create_3d_vector_field()` +.. |create_matrix_field| replace:: :func:`create_matrix_field()` +.. |create_tensor_field| replace:: :func:`create_tensor_field()` +.. |over_time_freq_fields_container| replace:: :func:`over_time_freq_fields_container()` + +This tutorial shows how to represent your custom data in DPF data storage structures. + +To import you custom data in DPF, you must create a DPF data structure to store it. +DPF uses |Field| and |FieldsContainer| objects to handle data. The |Field| is a homogeneous array +and a |FieldsContainer| is a labeled collection of |Field|. For more information on DPF data structures +such as the |Field| and their use check the :ref:`ref_tutorials_data_structures` tutorials section. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Define the data +--------------- + +In this tutorial, we create different Fields from data stored in Python lists. These data arrays are +reshaped to respect the |Field| definition. + +Create the python lists with the data to be *set* to the Fields. + +.. jupyter-execute:: + + # Data for the scalar Fields (lists with 1 and 2 dimensions) + data_1 = [6.0, 5.0, 4.0, 3.0, 2.0, 1.0] + data_2 = [[12.0, 7.0, 8.0], [ 9.0, 31.0, 1.0]] + + # Data for the vector Fields (lists with 1 and 2 dimensions) + data_3 = [4.0, 1.0, 8.0, 5.0, 7.0, 9.0] + data_4 = [6.0, 5.0, 4.0, 3.0, 2.0, 1.0, 9.0, 7.0, 8.0, 10.0] + data_5 = [[8.0, 4.0, 3.0], [31.0, 5.0, 7.0]] + + # Data for the matrix Fields + data_6 = [3.0, 2.0, 1.0, 7.0] + data_7 = [15.0, 3.0, 9.0, 31.0, 1.0, 42.0, 5.0, 68.0, 13.0] + data_8 = [[12.0, 7.0, 8.0], [ 1.0, 4.0, 27.0], [98.0, 4.0, 6.0]] + +Create the python lists with the data to be *appended* to the Fields. + +.. jupyter-execute:: + + # Data for the scalar Fields + data_9 = [24.0] + + # Data for the vector Fields + data_10 = [47.0, 33.0, 5.0] + + # Data for the matrix Fields + data_11 = [8.0, 2.0, 4.0, 64.0, 32.0, 47.0, 11.0, 23.0, 1.0] + + +Create the Fields +----------------- + +A |Field| must always be given: + +- A |location| and a |Scoping|. + + Here, we create Fields in the default *'Nodal'* |location|. Thus each entity (here, the nodes) must + have a |Scoping| id, that can be defined in a random or in a numerical order: + + - If you want to *set* a data array to the |Field|, you must previously set the |Scoping| ids using the |Field.scoping| method. + - If you want to *append* an entity with a data array to the |Field|, you don't need to previously set the |Scoping| ids. + +- A |nature| and a |dimensionality| (number of data components for each entity). They must respect the type and size of the + data to be stored in the |Field|. + +First, import the PyDPF-Core library. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + +Then, create the different Fields. In this tutorial we explain how to create the following Fields: + +- :ref:`Scalar Field`; +- :ref:`Vector Field`; +- :ref:`Matrix Field`. + +.. _ref_scalar_field_creation: + +Scalar fields +^^^^^^^^^^^^^ + +Here, we create one |Field| with 6 scalar. Thus, 6 entities with one |Scoping| id each. + +.. jupyter-execute:: + + # Define the number of entities + num_entities_1 = 6 + +You can create a scalar |Field| using three approaches: + +- :ref:`Instantiating the Field object`; +- :ref:`Using the create_scalar_field() function from the fields_factory module`. +- :ref:`Using the field_from_array() function from the fields_factory module`. + +You must ensure that this |Field| has a *'scalar'* |nature| and an *'1D'* |dimensionality|. + +.. _ref_scalar_field_instance: + +Create the |Field| by an instance of this object +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +For this approach, the default |nature| of the |Field| object is *'vector'*. You can modify it directly with +*'nature'* argument or with the |Field.dimensionality| method. + +Create the scalar |Field| and use the *'nature'* argument. + +.. jupyter-execute:: + + # Instanciate the Fields + field_11 = dpf.Field(nentities=num_entities_1, nature=dpf.common.natures.scalar) + + # Set the scoping ids + field_11.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field: ", '\n',field_11, '\n') + +Create the scalar |Field| and use the |Field.dimensionality| method. + +.. jupyter-execute:: + + # Instanciate the Fields + field_12 = dpf.Field(nentities=num_entities_1) + + # Use the Field.dimensionality method + field_12.dimensionality = dpf.Dimensionality([1]) + + # Set the scoping ids + field_12.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field : ", '\n',field_12, '\n') + +.. _ref_scalar_field_factory_create_scalar_field: + +Create the |Field| using the |create_scalar_field| function +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +For this approach, the default |nature| of the |Field| object is *'scalar'* and the default |dimensionality| is *'1D'*. +Thus, you just have to use the |create_scalar_field| function to create a scalar |Field|. + +.. jupyter-execute:: + + # Create the scalar Field + field_13 = dpf.fields_factory.create_scalar_field(num_entities=num_entities_1) + + # Set the scoping ids + field_13.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field: ", '\n',field_13, '\n') + +.. _ref_scalar_field_factory_field_from_array: + +Create the |Field| using the |field_from_array| function +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Different from the other approaches, where you set or append the data after creating the |Field|, here, the data is +used as an input of the |field_from_array| function. + +This function gets an Numpy array or Python list of either: + +- 1 dimension (one array). In this case, you get directly a scalar |Field|; +- 2 dimensions (one array containing multiple arrays with 3 components each). In the is case, you get a 3D vector |Field|. + Thus, you have to change the |Field| |dimensionality| using the |Field.dimensionality| method. + +Create the scalar Field with an 1 dimensional list. + +.. jupyter-execute:: + + # Use the field_from_array function + field_14 = dpf.fields_factory.field_from_array(arr=data_1) + + # Set the scoping ids + field_14.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field: ", '\n',field_14, '\n') + +Create the scalar Field with a 2 dimensional list. + +.. jupyter-execute:: + + # Use the field_from_array function + field_15 = dpf.fields_factory.field_from_array(arr=data_2) + + # Use the |Field.dimensionality| method + field_15.dimensionality = dpf.Dimensionality([1]) + + # Set the scoping ids + field_15.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field (b): ", '\n',field_15, '\n') + +.. _ref_vector_field_creation: + +Vector fields +^^^^^^^^^^^^^ + +Here, we create: + +- One |Field| with 2 vectors (thus, 2 entities) of 3 components each (3D vector |Field|); +- One |Field| with 2 vectors (thus, 2 entities) of 5 components each (5D vector |Field|); + +.. jupyter-execute:: + + # Define the number of entities + num_entities_2 = 2 + +You can create a vector |Field| using three approaches: + +- :ref:`Instantiating the Field object`; +- :ref:`Using the create_vector_field() function from the fields_factory module`. +- :ref:`Using the field_from_array() function from the fields_factory module`. + +Nevertheless, we also have the :ref:`create_3d_vector_field() function from the fields_factory module` +that can be used specifically to create a 3D vector |Field| (a vector |Field| with 3 components for each entity). + +You must ensure that these Fields have a *'vector'* |nature| and the corresponding |dimensionality| (*'3D'* and *'5D'*). + +.. _ref_vector_field_instance: + +Create the |Field| by an instance of this object +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +For this approach, the default |nature| is *'vector'* and the default |dimensionality| is *'3D'*. So for the second vector +|Field| you must set a *'5D'* |dimensionality| using the |Field.dimensionality| method. + +Create the *'3D'* vector Field. + +.. jupyter-execute:: + + # Instantiate the Field + field_21 = dpf.Field(nentities=num_entities_2) + + # Set the scoping ids + field_21.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field : ", '\n',field_21, '\n') + +Create the *'5D'* vector Field. + +.. jupyter-execute:: + + # Instantiate the Field + field_31 = dpf.Field(nentities=num_entities_2) + + # Use the Field.dimensionality method + field_31.dimensionality = dpf.Dimensionality([5]) + + # Set the scoping ids + field_31.scoping.ids = range(num_entities_2) + + # Print the Field + print("5D vector Field (5D): ", '\n',field_31, '\n') + +.. _ref_vector_field_factory_create_vector_field: + +Create the |Field| using the |create_vector_field| function +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +For this approach, the default |nature| is *'vector'*. To define the |dimensionality| you must use the *'num_comp'* argument. + +Create the *'3D'* vector Field. + +.. jupyter-execute:: + + # Use the create_vector_field function + field_22 = dpf.fields_factory.create_vector_field(num_entities=num_entities_2, num_comp=3) + + # Set the scoping ids + field_22.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field : ", '\n',field_22, '\n') + +Create the *'5D'* vector Field. + +.. jupyter-execute:: + + # Use the create_vector_field function + field_32 = dpf.fields_factory.create_vector_field(num_entities=num_entities_2, num_comp=5) + + # Set the scoping ids + field_32.scoping.ids = range(num_entities_2) + + # Print the Field + print("5D vector Field : ", '\n',field_32, '\n') + +.. _ref_vector_field_factory_field_from_array: + +Create the |Field| using the |field_from_array| function +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Different from the other approaches, where you set or append the data after creating the |Field|, here, the data is +used as an input of the |field_from_array| function. + +This function gets an Numpy array or Python list of either: + +- 1 dimension (one array). In this case, you have to change the |Field| |dimensionality| using the + |Field.dimensionality| method. +- 2 dimensions (one array containing multiple arrays with 3 components). In the is case, you get a 3D vector |Field|. + +.. note:: + + The |Field| must always assure a homogeneous shape. The shape is a tuple with the number of elementary data and the + number of components. + + So, for the *'5D* vector |field| we would want a shape of (10,5). Nevertheless, the 2 dimensions data vector we + defined ("data_5") has a elementary data count of 6 (2*3). Thus, we cannot define the *'5D'* vector |Field| because it would + have a (6,5) shape. + +Create the *'3D'* vector Field with an 1 dimensional list. + +.. jupyter-execute:: + + # Use the field_from_array function + field_23 = dpf.fields_factory.field_from_array(arr=data_3) + + # Use the Field.dimensionality method + field_23.dimensionality = dpf.Dimensionality([3]) + + # Set the scoping ids + field_23.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field: ", '\n',field_23, '\n') + +Create the *'3D'* vector Field and give a 2 dimensional list. + +.. jupyter-execute:: + + # Use the field_from_array function + field_24 = dpf.fields_factory.field_from_array(arr=data_5) + + # Set the scoping ids + field_24.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field: ", '\n',field_24, '\n') + +.. _ref_vector_field_factory_create_3d_vector_field: + +Create a 3d vector |Field| using the |create_3d_vector_field| function +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +For this approach, the default |nature| is *'vector'* and the |dimensionality| is *'3D'*. Thus, you just +have to use the |create_3d_vector_field| function to create a 3D vector |Field|. + +.. jupyter-execute:: + + # Create the 3d vector Field + field_25 = dpf.fields_factory.create_3d_vector_field(num_entities=num_entities_2) + # Set the scoping ids + field_25.scoping.ids = range(num_entities_2) + + # Print the Field + print("Vector Field (3D): ", '\n',field_25, '\n') + +.. _ref_matrix_field_creation: + +Matrix fields +^^^^^^^^^^^^^ + +Here, we create: + +- One Field with 1 matrix (thus, 1 entity) of 2 lines and 2 columns; +- Two Fields with 1 matrix (thus, 1 entity) of 3 lines and 3 columns (tensor). + +.. jupyter-execute:: + + # Define the number of entities + num_entities_3 = 1 + +You can create a matrix |Field| using the |create_matrix_field| function from the |fields_factory| module. + +The default |nature| here is *'matrix'*. Thus, you only have to define the matrix |dimensionality| using the +*'num_lines'* and *'num_col'* arguments. + +Create the (2,2) matrix Field. + +.. jupyter-execute:: + + # Use the create_matrix_field function + field_41 = dpf.fields_factory.create_matrix_field(num_entities=num_entities_3, num_lines=2, num_col=2) + + # Set the scoping ids + field_41.scoping.ids = range(num_entities_3) + + # Print the Field + print("Matrix Field (2,2) : ", '\n',field_41, '\n') + +Create the (3,3) matrix Fields. + +.. jupyter-execute:: + + # Use the create_matrix_field function + field_51 = dpf.fields_factory.create_matrix_field(num_entities=num_entities_3, num_lines=3, num_col=3) + field_52 = dpf.fields_factory.create_matrix_field(num_entities=num_entities_3, num_lines=3, num_col=3) + + # Set the scoping ids + field_51.scoping.ids = range(num_entities_3) + field_52.scoping.ids = range(num_entities_3) + + # Print the Field + print("Matrix Field 1 (3,3) : ", '\n',field_51, '\n') + print("Matrix Field 2 (3,3) : ", '\n',field_52, '\n') + +Set data to the Fields +---------------------- + +To set a data array to a |Field| use the |Field.data| method. The |Field| |Scoping| defines how the data is ordered. +For example: the first id in the scoping identifies to which entity the first data entity belongs to. + +The data can be in a 1 dimension (one array) or 2 dimensions (one array containing multiple arrays) +Numpy array or Python list. + +Scalar fields +^^^^^^^^^^^^^ + +Set the data from a 1 dimensional array to the scalar Field. + +.. jupyter-execute:: + + # Set the data + field_11.data = data_1 + + # Print the Field + print("Scalar Field : ", '\n',field_11, '\n') + + # Print the Fields data + print("Data scalar Field : ", '\n',field_11.data, '\n') + +Set the data from a 2 dimensional array to the scalar Field. + +.. jupyter-execute:: + + # Set the data + field_12.data = data_2 + + # Print the Field + print("Scalar Field : ", '\n',field_12, '\n') + + # Print the Fields data + print("Data scalar Field : ", '\n',field_12.data, '\n') + +Vector fields +^^^^^^^^^^^^^ + +Set the data from a 1 dimensional array to the *'3D'* vector Field. + +.. jupyter-execute:: + + # Set the data + field_21.data = data_3 + + # Print the Field + print("Vector Field : ", '\n',field_21, '\n') + + # Print the Fields data + print("Data vector Field: ", '\n',field_21.data, '\n') + +Set the data from a 1 dimensional array to the *'5D'* vector Field. + +.. jupyter-execute:: + + # Set the data + field_31.data = data_4 + + # Print the Field + print("Vector Field: ", '\n',field_31, '\n') + + # Print the Fields data + print("Data vector Field : ", '\n',field_31.data, '\n') + +Set the data from a 2 dimensional array to the *'3D'* vector Field. + +.. jupyter-execute:: + + # Set the data + field_22.data = data_5 + + # Print the Field + print("Vector Field: ", '\n',field_22, '\n') + + # Print the Fields data + print("Data vector Field: ", '\n',field_22.data, '\n') + + +Matrix fields +^^^^^^^^^^^^^ + +Set the data from a 1 dimensional array to the (2,2) matrix Field. + +.. jupyter-execute:: + + # Set the data + field_41.data = data_6 + + # Print the Field + print("Matrix Field: ", '\n',field_41, '\n') + + # Print the Fields data + print("Data matrix Field: ", '\n',field_41.data, '\n') + +Set the data from a 1 dimensional array to the (3,3) matrix Field. + +.. jupyter-execute:: + + # Set the data + field_51.data = data_7 + + # Print the Field + print("Matrix Field: ", '\n',field_51, '\n') + + # Print the Fields data + print("Data matrix Field: ", '\n',field_51.data, '\n') + +Set the data from a 2 dimensional array to the (3,3) matrix Field. + +.. jupyter-execute:: + + # Set the data + field_52.data = data_8 + + # Print the Field + print("Matrix Field: ", '\n',field_51, '\n') + + # Print the Fields data + print("Data matrix Field: ", '\n',field_51.data, '\n') + +Append data to the Fields +------------------------- + +You can append a data array to a |Field|, this means adding a new entity with the new data in the |Field|. You have to +give the |Scoping| id that this entities will have. + +Scalar fields +^^^^^^^^^^^^^ + +Append data to a scalar |Field|. + +.. jupyter-execute:: + + # Append the data + field_11.append(scopingid=6, data=data_9) + + # Print the Field + print("Scalar Field : ", '\n',field_11, '\n') + + # Print the Fields data + print("Data scalar Field: ", '\n',field_11.data, '\n') + +Vector fields +^^^^^^^^^^^^^ + +Append data to a vector |Field|. + +.. jupyter-execute:: + + # Append the data + field_21.append(scopingid=2, data=data_10) + + # Print the Field + print("Vector Field : ", '\n',field_21, '\n') + + # Print the Fields data + print("Data vector Field: ", '\n',field_21.data, '\n') + +Matrix fields +^^^^^^^^^^^^^ + +Append data to a matrix |Field|. + +.. jupyter-execute:: + + # Append the data + field_51.append(scopingid=1, data=data_11) + + # Print the Field + print("VMatrix Field : ", '\n',field_51, '\n') + + # Print the Fields data + print("Data Matrix Field: ", '\n',field_51.data, '\n') + +Create a |FieldsContainer| +-------------------------- + +A |FieldsContainer| is a collection of |Field| ordered by labels. Each |Field| of the |FieldsContainer| has +an ID for each label. These ids allow splitting the fields on any criteria. + +The most common |FieldsContainer| have the label *'time'* with ids corresponding to time sets. The label *'complex'*, +which is used in a harmonic analysis for example, allows real parts (id=0) to be separated from imaginary parts (id=1). + +For more information on DPF data structures, see the :ref:`ref_tutorials_data_structures` tutorials section. + +You can create a |FieldsContainer| by: + +- :ref:`Instantiating the FieldsContainer object`; +- :ref:`Using the fields_container_factory module`. + +.. _ref_fields_container_instance: + +Create a |FieldsContainer| by an instance of this object +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +After defining a |FieldsContainer| by an instance of this object you need to set the labels. Here, we define +Fields over time steps labels. So, when you add a |Field| to the |FieldsContainer| you must specify the time step id +it belongs to. + +.. jupyter-execute:: + + # Create the FieldsContainer object + fc_1 = dpf.FieldsContainer() + + # Define the labels + fc_1.add_label(label="time") + + # Add the Fields + fc_1.add_field(label_space={"time": 0}, field=field_21) + fc_1.add_field(label_space={"time": 1}, field=field_31) + + # Print the FieldsContainer + print(fc_1) + +.. _ref_fields_container_factory_module: + +Create a |FieldsContainer| with the |fields_container_factory| module +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The |fields_container_factory| module contains functions that create a |FieldsContainer| with predefined +labels. Here, we use the |over_time_freq_fields_container| function that create a |FieldsContainer| with a *'time'* +label. + +.. jupyter-execute:: + + # Create the FieldsContainer + fc_2 = dpf.fields_container_factory.over_time_freq_fields_container(fields=[field_21, field_31]) + + # Print the FieldsContainer + print(fc_2) \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst b/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst deleted file mode 100644 index 75637032c9..0000000000 --- a/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst +++ /dev/null @@ -1,103 +0,0 @@ -.. _ref_tutorials_represent_data_on_dpf: - -======================== -Manual input data on DPF -======================== - -.. |Field| replace:: :class:`Field` -.. |FieldsContainer| replace:: :class:`FieldsContainer` -.. |append| replace:: :func:`append()` -.. |data| replace:: :attr:`Field.data` -.. |scoping| replace:: :attr:`Field.scoping` - -This tutorial shows how to represent your manual input data in a DPF data storage structures. - -When handling data DPF uses |FieldsContainer| and |Field| to store and return it. The |Field| is a DPF array -and a collection of |Field| is called |FieldsContainer|. For more information on how the data is structure -in a |Field| and how the DPF data storage structures works check the :ref:`ref_tutorials_data_structures` -tutorial section. - -Here we will create some 3d vector |Field|, where the data comes from lists. - -Defining the fields -------------------- - -To manually import data on DPF you have to create the structure to store it. - -Here we create a |Field| from scratch by instantiating this object. When using this approach the |Field| has -vector nature by default. Check the :ref:`ref_tutorials_data_structures` tutorial section for more information -on others approaches. - -We will need two 3d vector |Field|: - -.. jupyter-execute:: - - # Import the ``ansys.dpf.core`` module - from ansys.dpf import core as dpf - - # Create the fields - # a. Define the number of entities - num_entities_1 = 2 - - # b. Instanciate the field - field_1 = dpf.Field(nentities=num_entities_1) - field_2 = dpf.Field(nentities=num_entities_1) - field_3 = dpf.Field(nentities=num_entities_1) - field_4 = dpf.Field(nentities=num_entities_1) - - # c. Define the scoping ids - - field_3.scoping.ids = range(num_entities_1) - field_4.scoping.ids = range(num_entities_1) - - # d. Create a FieldsContainer - fc_1 = dpf.fields_container_factory.over_time_freq_fields_container(fields=[field_1, field_2]) - - # Check the Fields and the FieldsContainer - print("Field 1: ", "\n" ,field_1, "\n") - print("Field 2: ", "\n" ,field_2, "\n") - print("Field 3: ", "\n" ,field_3, "\n") - print("Field 4: ", "\n" ,field_4, "\n") - print("FieldsContainer: ", "\n" ,fc_1, "\n") - -Add data to the fields ----------------------- - -Here we define the data and then add it to the fields. - -You can add data to a |Field| by using the |append| method, if you have not set the |scoping| property -with the scoping ids, or the |data| property, if you have set the |scoping| property -with the scoping ids. - -.. jupyter-execute:: - - # Define and add the data to the fields - # a. Using the append method - - # Define the Fields data - data_11 = [1.0, 2.0, 3.0] - data_12 = [4.0, 5.0, 6.0] - data_21 = [7.0, 3.0, 5.0] - data_22 = [8.0, 1.0, 2.0] - - # Add the data to the field - field_1.append(data=data_11, scopingid=0) - field_1.append(data=data_12, scopingid=1) - field_2.append(data=data_21, scopingid=0) - field_2.append(data=data_22, scopingid=1) - - # b. Using the data property - - # Define the Fields data - data_3b = [6.0, 5.0, 4.0, 3.0, 2.0, 1.0] - data_4b = [4.0, 1.0, 8.0, 5.0, 7.0, 9.0] - - # Add the data to the field - field_3.data = data_3b - field_4.data = data_4b - - # Check the Fields - print("Field 1: ", "\n", field_1, "\n") - print("Field 2: ", "\n", field_2, "\n") - print("Field 3: ", "\n" ,field_3, "\n") - print("Field 4: ", "\n" ,field_4, "\n") From 3d68a604d3e2fada5ee6fdab5115388d88422db2 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Wed, 11 Dec 2024 11:41:29 +0100 Subject: [PATCH 24/28] update the index.rst to the tutorials guidelines --- doc/source/user_guide/tutorials/import_data/index.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst index ef9e979252..e24f2f2f35 100644 --- a/doc/source/user_guide/tutorials/import_data/index.rst +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -15,8 +15,8 @@ From user input :padding: 2 :margin: 2 - .. grid-item-card:: Use custom data - :link: ref_tutorials_field_with_custom_data + .. grid-item-card:: Load custom data + :link: ref_tutorials_load_custom_data :link-type: ref :text-align: center @@ -65,7 +65,7 @@ From result files :maxdepth: 2 :hidden: - field_with_custom_data.rst + load_custom_data.rst import_result_file.rst extract_and_explore_results_metadata.rst extract_and_explore_results_data.rst From 69434d7f47eb6f9c26974d953de0a0be7df66688 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Wed, 11 Dec 2024 11:43:43 +0100 Subject: [PATCH 25/28] add solvers badges to the index.rst cards --- doc/source/user_guide/tutorials/import_data/index.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst index e24f2f2f35..7370632a8d 100644 --- a/doc/source/user_guide/tutorials/import_data/index.rst +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -37,6 +37,9 @@ From result files This tutorial shows how to import a result file in DPF. + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + .. grid-item-card:: Extract and explore results metadata :link: ref_tutorials_extract_and_explore_results_metadata :link-type: ref @@ -45,6 +48,9 @@ From result files This tutorial shows how to extract and explore results metadata (analysis type, physics type, unit system ... ) from a result file. + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + .. grid-item-card:: Extract and explore results data :link: ref_tutorials_extract_and_explore_results_data :link-type: ref @@ -52,6 +58,9 @@ From result files This tutorial shows how to extract and explore results data from a result file. + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + .. grid-item-card:: Narrow down data :link: reft_tutorials_narrow_down_data :link-type: ref @@ -60,6 +69,8 @@ From result files This tutorial explains how to scope (get a spatial and/or temporal subset of the simulation data) your results. + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` .. toctree:: :maxdepth: 2 From b1fefc43718d5de532c7d1e2bbd9bf767373ab06 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Wed, 11 Dec 2024 11:44:47 +0100 Subject: [PATCH 26/28] add solvers badges to the beginning of each tutorial --- .../tutorials/import_data/extract_and_explore_results_data.rst | 2 ++ .../import_data/extract_and_explore_results_metadata.rst | 2 ++ .../user_guide/tutorials/import_data/import_result_file.rst | 2 ++ .../user_guide/tutorials/import_data/narrow_down_data.rst | 2 ++ 4 files changed, 8 insertions(+) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst index d88bb01a9d..38aea40a36 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst @@ -4,6 +4,8 @@ Extract and explore results data ================================ +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + .. include:: ../../../links_and_refs.rst .. |get_entity_data| replace:: :func:`get_entity_data()` .. |get_entity_data_by_id| replace:: :func:`get_entity_data_by_id()` diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index 9b655adb5f..32f0fa0228 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -4,6 +4,8 @@ Extract and explore results metadata ==================================== +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + .. include:: ../../../links_and_refs.rst .. |ResultInfo| replace:: :class:`ResultInfo` diff --git a/doc/source/user_guide/tutorials/import_data/import_result_file.rst b/doc/source/user_guide/tutorials/import_data/import_result_file.rst index f428b64da8..f3f8d2ee27 100644 --- a/doc/source/user_guide/tutorials/import_data/import_result_file.rst +++ b/doc/source/user_guide/tutorials/import_data/import_result_file.rst @@ -4,6 +4,8 @@ Import result file in DPF ========================= +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + .. include:: ../../../links_and_refs.rst .. |set_result_file_path| replace:: :func:`set_result_file_path() ` .. |add_file_path| replace:: :func:`add_file_path() ` diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst index d29611d79b..d4e3e66cb0 100644 --- a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -4,6 +4,8 @@ Narrow down data ================ +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + .. include:: ../../../links_and_refs.rst .. |location| replace:: :class:`location` .. |time_freq_scoping_factory| replace:: :mod:`time_freq_scoping_factory` From 308798bd9f052aa02468adb5a9e9268a41b89bda Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Mon, 16 Dec 2024 15:02:06 +0100 Subject: [PATCH 27/28] updates on the text of the load_custom_data.rst turorial --- .../tutorials/import_data/load_custom_data.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/load_custom_data.rst b/doc/source/user_guide/tutorials/import_data/load_custom_data.rst index fb84c145ec..4b66af154a 100644 --- a/doc/source/user_guide/tutorials/import_data/load_custom_data.rst +++ b/doc/source/user_guide/tutorials/import_data/load_custom_data.rst @@ -36,8 +36,8 @@ such as the |Field| and their use check the :ref:`ref_tutorials_data_structures` Define the data --------------- -In this tutorial, we create different Fields from data stored in Python lists. These data arrays are -reshaped to respect the |Field| definition. +In this tutorial, we create different Fields from data stored in Python lists. When attributed to a |Field|, these +data arrays are reshaped to respect the |Field| definition. Create the python lists with the data to be *set* to the Fields. @@ -78,7 +78,7 @@ A |Field| must always be given: - A |location| and a |Scoping|. - Here, we create Fields in the default *'Nodal'* |location|. Thus each entity (here, the nodes) must + Here, we create Fields in the default *'Nodal'* |location|. Thus, each entity (here, the nodes) must have a |Scoping| id, that can be defined in a random or in a numerical order: - If you want to *set* a data array to the |Field|, you must previously set the |Scoping| ids using the |Field.scoping| method. @@ -94,7 +94,7 @@ First, import the PyDPF-Core library. # Import the ``ansys.dpf.core`` module from ansys.dpf import core as dpf -Then, create the different Fields. In this tutorial we explain how to create the following Fields: +Then, create the different Fields. In this tutorial, we explain how to create the following Fields: - :ref:`Scalar Field`; - :ref:`Vector Field`; @@ -125,14 +125,14 @@ You must ensure that this |Field| has a *'scalar'* |nature| and an *'1D'* |dimen Create the |Field| by an instance of this object ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -For this approach, the default |nature| of the |Field| object is *'vector'*. You can modify it directly with +For this approach, the default |nature| of the |Field| object is *'vector'*. You can modify it directly with the *'nature'* argument or with the |Field.dimensionality| method. Create the scalar |Field| and use the *'nature'* argument. .. jupyter-execute:: - # Instanciate the Fields + # Instanciate the Field field_11 = dpf.Field(nentities=num_entities_1, nature=dpf.common.natures.scalar) # Set the scoping ids @@ -145,7 +145,7 @@ Create the scalar |Field| and use the |Field.dimensionality| method. .. jupyter-execute:: - # Instanciate the Fields + # Instanciate the Field field_12 = dpf.Field(nentities=num_entities_1) # Use the Field.dimensionality method From edb4f7f4dac9bacb6727e172b495c34c82fc5114 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Mon, 16 Dec 2024 15:04:10 +0100 Subject: [PATCH 28/28] updates on the text of the import_result_file.rst turorial --- .../tutorials/import_data/import_result_file.rst | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/import_result_file.rst b/doc/source/user_guide/tutorials/import_data/import_result_file.rst index f3f8d2ee27..f8d22047e2 100644 --- a/doc/source/user_guide/tutorials/import_data/import_result_file.rst +++ b/doc/source/user_guide/tutorials/import_data/import_result_file.rst @@ -1,8 +1,8 @@ .. _ref_tutorials_import_result_file: -========================= -Import result file in DPF -========================= +=========================== +Import a result file in DPF +=========================== :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` @@ -51,6 +51,8 @@ the |Examples| module. # Define the modal superposition harmonic analysis (.mode, .rfrq and .rst) result files paths result_file_path_12 = examples.download_msup_files_to_dict() + + # Print the result files paths print("Result file path 11:", "\n",result_file_path_11, "\n") print("Result files paths 12:", "\n",result_file_path_12, "\n") @@ -71,6 +73,7 @@ the |Examples| module. # Define the .binout result file path result_file_path_22 = examples.download_binout_matsum() + # Print the result files paths print("Result files paths 21:", "\n",result_file_path_21, "\n") print("Result file path 22:", "\n",result_file_path_22, "\n") @@ -91,6 +94,7 @@ the |Examples| module. # Define the CFF .cas.h5/.dat.h5 result files paths result_file_path_32 = examples.download_fluent_axial_comp() + # Print the result files paths print("Result file path 31:", "\n",result_file_path_31, "\n") print("Result files paths 32:", "\n",result_file_path_32, "\n") @@ -111,6 +115,7 @@ the |Examples| module. # Define the CFF .cas.cff/.dat.cff result files paths result_file_path_42 = examples.download_cfx_heating_coil() + # Print the result files paths print("Result file path 41:", "\n",result_file_path_41, "\n") print("Result files paths 42:", "\n",result_file_path_42, "\n")