Skip to content

Commit

Permalink
fix: pyflake style
Browse files Browse the repository at this point in the history
  • Loading branch information
jorgepiloto committed Dec 4, 2024
1 parent 8a34a56 commit b6d27b1
Show file tree
Hide file tree
Showing 24 changed files with 81 additions and 259 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ exclude: >
repos:

- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.7.3
rev: v0.8.1
hooks:
- id: ruff
args: [--fix]
Expand Down
4 changes: 2 additions & 2 deletions examples/09-averaging/00-compute_and_average.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@
def compute_von_mises_then_average(analysis):
# Create a model from the results of the simulation and retrieve its mesh
model = dpf.Model(analysis)
mesh = model.metadata.meshed_region
model.metadata.meshed_region

# Apply the stress operator to obtain the stresses in the body
stress_op = dpf.operators.result.stress()
Expand Down Expand Up @@ -135,7 +135,7 @@ def compute_von_mises_then_average(analysis):
def average_then_compute_von_mises(analysis):
# Creating the model from the results of the simulation
model = dpf.Model(analysis)
mesh = model.metadata.meshed_region
model.metadata.meshed_region

# Retrieving the stresses
stress_op = dpf.operators.result.stress()
Expand Down
6 changes: 3 additions & 3 deletions src/ansys/dpf/core/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -537,9 +537,9 @@ def release_dpf(self):
Available with server's version starting at 6.0 (Ansys 2023R2).
"""
if self._server().has_client():
error = self._api.data_processing_release_on_client(self._server().client, 1)
self._api.data_processing_release_on_client(self._server().client, 1)
else:
error = self._api.data_processing_release(1)
self._api.data_processing_release(1)

@version_requires("4.0")
def get_runtime_core_config(self):
Expand Down Expand Up @@ -667,7 +667,7 @@ def download_file(self, server_file_path, to_client_file_path):
download service only available for server with gRPC communication protocol
"""
raise errors.ServerTypeError(txt)
client_path = self._api.data_processing_download_file(
self._api.data_processing_download_file(
client=self._server().client,
server_file_path=str(server_file_path),
to_client_file_path=str(to_client_file_path),
Expand Down
18 changes: 16 additions & 2 deletions src/ansys/dpf/core/examples/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,19 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

from .examples import *
from .downloads import *
from .examples import get_example_required_minimum_dpf_version, find_files, fluid_axial_model
from .downloads import (
find_simple_bar,
find_static_rst,
find_complex_rst,
find_multishells_rst,
find_electric_therm,
find_steady_therm,
find_transient_therm,
find_msup_transient,
find_simple_cyclic,
find_distributed_msup_folder,
)


# called if module.<name> fails
Expand Down Expand Up @@ -67,3 +78,6 @@ def __getattr__(name):
distributed_msup_folder = find_distributed_msup_folder()
return distributed_msup_folder
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")


__all__ = ["get_example_required_minimum_dpf_version", "find_files", "fluid_axial_model"]
1 change: 0 additions & 1 deletion src/ansys/dpf/core/inputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,6 @@ def connect(self, inpt):
elif isinstance(inpt, Path):
inpt = str(inpt)

input_type_name = type(inpt).__name__
for input_pin in self._inputs:
self._operator()._find_outputs_corresponding_pins(
input_pin._python_expected_types,
Expand Down
17 changes: 8 additions & 9 deletions tests/test_cyclic_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import conftest
import pytest

from ansys import dpf
from ansys.dpf import core as dpf


Expand Down Expand Up @@ -105,7 +104,7 @@ def test_cyc_support_from_to_operator(cyclic_lin_rst, server_type):
cyclic_support=cyc_support, server=server_type
)
exp = op.outputs.cyclic_support()
mesh = op.outputs.meshed_region()
op.outputs.meshed_region()
assert exp.num_sectors() == 15
assert exp.num_stages == 1
assert np.allclose(
Expand Down Expand Up @@ -145,7 +144,7 @@ def test_cyc_support_from_to_workflow(cyclic_lin_rst, server_type):
wf.set_output_name("sup", op.outputs.cyclic_support)
wf.connect("sup", cyc_support)
exp = wf.get_output("sup", dpf.types.cyclic_support)
mesh = op.outputs.meshed_region()
op.outputs.meshed_region()
assert exp.num_sectors() == 15
assert exp.num_stages == 1
assert np.allclose(
Expand Down Expand Up @@ -248,14 +247,14 @@ def test_delete_auto_cyc_support(cyclic_lin_rst):
def test_cyc_support_memory_leaks(cyclic_lin_rst):
import gc

for i in range(2000):
for _ in range(2000):
gc.collect()
data_sources = dpf.DataSources(cyclic_lin_rst)
model = dpf.Model(data_sources)
result_info = model.metadata.result_info
cyc_support = result_info.cyclic_support
a = cyc_support.num_stages
b = cyc_support.num_sectors()
c = cyc_support.sectors_set_for_expansion()
d = cyc_support.base_elements_scoping()
e = cyc_support.base_nodes_scoping()
cyc_support.num_stages
cyc_support.num_sectors()
cyc_support.sectors_set_for_expansion()
cyc_support.base_elements_scoping()
cyc_support.base_nodes_scoping()
4 changes: 2 additions & 2 deletions tests/test_data_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def test_add_dict_data_tree(server_type):
@conftest.raises_for_servers_version_under("4.0")
def test_add_data_to_fill_data_tree():
data_tree = dpf.DataTree()
with data_tree.to_fill() as to_fill:
with data_tree.to_fill() as _:
data_tree.int = 1
data_tree.double = 1.0
data_tree.string = "hello"
Expand Down Expand Up @@ -412,7 +412,7 @@ def test_attribute_errors_data_tree(server_type):
@conftest.raises_for_servers_version_under("4.0")
def test_add_data_bool_data_tree():
data_tree = dpf.DataTree()
with data_tree.to_fill() as to_fill:
with data_tree.to_fill() as _:
data_tree.int = 1
data_tree.bool = True
assert data_tree.get_as("int", dpf.types.int) == 1
Expand Down
4 changes: 2 additions & 2 deletions tests/test_datasources.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,10 +130,10 @@ def test_print_data_sources(allkindofcomplexity, server_type):

def test_data_sources_from_data_sources(allkindofcomplexity, server_type):
with pytest.raises(ValueError) as e:
data_sources_false = dpf.core.DataSources(data_sources="Wrong Input", server=server_type)
dpf.core.DataSources(data_sources="Wrong Input", server=server_type)
assert "gRPC data sources" in e
data_sources = dpf.core.DataSources(server=server_type)
data_sources2 = dpf.core.DataSources(data_sources=data_sources, server=server_type)
dpf.core.DataSources(data_sources=data_sources, server=server_type)


@pytest.mark.skipif(
Expand Down
25 changes: 0 additions & 25 deletions tests/test_field.py
Original file line number Diff line number Diff line change
Expand Up @@ -976,31 +976,6 @@ def get_simple_field(server_clayer):
return field


@conftest.raises_for_servers_version_under("4.0")
def test_mutable_entity_data_contiguous_field(server_clayer):
simple_field = get_simple_field(server_clayer)
vec = simple_field.get_entity_data(0)
assert np.allclose(vec, np.array(range(0, 6)))

vec[0][0] = 1
vec[0][5] = 4

assert np.allclose(vec, np.array([1, 1, 2, 3, 4, 4]))

vec.commit()

assert np.allclose(simple_field.get_entity_data(0), np.array([1, 1, 2, 3, 4, 4]))

vec = simple_field.get_entity_data_by_id(2)
assert np.allclose(vec, np.array(range(6, 12)))

vec[0][0] = 1
vec[0][5] = 4
assert np.allclose(vec, np.array([1, 7, 8, 9, 10, 4]))
vec = None
assert np.allclose(simple_field.get_entity_data_by_id(2), np.array([1, 7, 8, 9, 10, 4]))


@pytest.mark.skipif(
not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_5_0,
reason="change in memory ownership in server 5.0",
Expand Down
110 changes: 0 additions & 110 deletions tests/test_local_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,113 +227,3 @@ def test_dot_operator_server_fields_container(local_server):
out = add.outputs.fields_container()
assert np.allclose(out[0].scoping.ids, [1, 2])
assert np.allclose(out[0].data, -field.data)


def test_add_operator_server_fields_container(local_server):
field = dpf.core.fields_factory.create_3d_vector_field(2, server=local_server)
field.data = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0]
field.scoping.ids = [1, 2]

fc = dpf.core.fields_container_factory.over_time_freq_fields_container(
[field, field], server=local_server
)

# operator with field out
forward = ops.utility.forward_field(field, server=local_server)
add = fc + forward
assert type(add) == ops.math.add_fc
out = add.outputs.fields_container()
assert len(out) == 2
assert np.allclose(out[0].scoping.ids, [1, 2])
assert np.allclose(out[0].data, np.array(field.data) * 2.0)

# fc + list
add = fc + [0.0, 1.0, 2.0]
assert type(add) == ops.math.add_fc
out = add.outputs.fields_container()
assert len(out) == 2
assert np.allclose(out[0].scoping.ids, [1, 2])
assert np.allclose(out[0].data, field.data + np.array([[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]]))

# fc + float
add = fc + 1.0
assert type(add) == ops.math.add_fc
out = add.outputs.fields_container()
assert np.allclose(out[0].scoping.ids, [1, 2])
assert np.allclose(out[0].data, np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]))


def test_minus_operator_server_fields_container(local_server):
field = dpf.core.fields_factory.create_3d_vector_field(2, server=local_server)
field.data = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0]
field.scoping.ids = [1, 2]

fc = dpf.core.fields_container_factory.over_time_freq_fields_container(
[field, field], server=local_server
)

# operator with field out
forward = ops.utility.forward_field(field, server=local_server)
add = fc - forward
assert type(add) == ops.math.minus_fc
out = add.outputs.fields_container()
assert len(out) == 2
assert np.allclose(out[0].scoping.ids, [1, 2])
assert np.allclose(out[0].data, np.zeros((2, 3)))

# fc - list
add = fc - [0.0, 1.0, 2.0]
assert type(add) == ops.math.minus_fc
out = add.outputs.fields_container()
assert len(out) == 2
assert np.allclose(out[0].scoping.ids, [1, 2])
assert np.allclose(out[0].data, np.array([[0.0, 0.0, 0.0], [3.0, 3.0, 3.0]]))

# fc - float
add = fc - 1.0
assert type(add) == ops.math.minus_fc
out = add.outputs.fields_container()
assert np.allclose(out[0].scoping.ids, [1, 2])
assert np.allclose(out[0].data, np.array([[-1.0, 0.0, 1.0], [2.0, 3.0, 4.0]]))


def test_dot_operator_server_fields_container(local_server):
field = dpf.core.fields_factory.create_3d_vector_field(2, server=local_server)
field.data = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0]
field.scoping.ids = [1, 2]

fc = dpf.core.fields_container_factory.over_time_freq_fields_container(
[field, field], server=local_server
)

# fc * op
forward = ops.utility.forward_field(field, server=local_server)
add = fc * forward
assert type(add) == ops.math.generalized_inner_product_fc
out = add.outputs.fields_container()
assert len(out) == 2
assert np.allclose(out[0].scoping.ids, [1, 2])
assert np.allclose(out[0].data, np.array([5.0, 50.0]))

# fc * field
add = fc * field
assert type(add) == ops.math.generalized_inner_product_fc
out = add.outputs.fields_container()
assert len(out) == 2
assert np.allclose(out[0].scoping.ids, [1, 2])
assert np.allclose(out[0].data, np.array([5.0, 50.0]))

# fc * list
add = fc * [0.0, 1.0, 2.0]
assert type(add) == ops.math.generalized_inner_product_fc
out = add.outputs.fields_container()
assert len(out) == 2
assert np.allclose(out[0].scoping.ids, [1, 2])
assert np.allclose(out[0].data, np.array([5.0, 14.0]))

# fc * float
add = fc * -1.0
assert type(add) == ops.math.generalized_inner_product_fc
out = add.outputs.fields_container()
assert np.allclose(out[0].scoping.ids, [1, 2])
assert np.allclose(out[0].data, -field.data)
2 changes: 1 addition & 1 deletion tests/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def test_result_not_dynamic(plate_msup):
assert len(fc) == 2
assert np.allclose(fc.time_freq_support.time_frequencies.data, np.array([0.115, 0.125]))
assert fc[0].unit == "Pa"
dis = model.results.displacement().eval()
model.results.displacement().eval()
dpf.core.settings.set_dynamic_available_results_capability(True)


Expand Down
4 changes: 2 additions & 2 deletions tests/test_multi_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ def test_model_cyc_support_multi_server(cyc_models):


def test_model_displacement_multi_server(transient_models):
tf = transient_models[0].metadata.time_freq_support
transient_models[0].metadata.time_freq_support
time_scoping = range(1, 3)
disp = transient_models[0].results.displacement()
disp.inputs.time_scoping(time_scoping)
Expand Down Expand Up @@ -226,7 +226,7 @@ def check_fc(fc, fc2):


def test_model_stress_multi_server(transient_models):
tf = transient_models[0].metadata.time_freq_support
transient_models[0].metadata.time_freq_support
time_scoping = range(1, 3)
disp = transient_models[0].results.stress()
disp.inputs.time_scoping(time_scoping)
Expand Down
Loading

0 comments on commit b6d27b1

Please sign in to comment.