Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix pre-commit #1254

Merged
merged 1 commit into from
Nov 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions .ci/build_wheel.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
# Input can be one of ["any", "win", "manylinux1", "manylinux_2_17"]

import argparse
import pathlib
import subprocess
import os
import sys
Expand All @@ -14,18 +13,20 @@
"any": "any",
"win": "win_amd64",
"manylinux1": "manylinux1_x86_64",
"manylinux_2_17": "manylinux_2_17_x86_64"
"manylinux_2_17": "manylinux_2_17_x86_64",
}

argParser = argparse.ArgumentParser()
argParser.add_argument("-p", "--platform", help="platform")
argParser.add_argument("-w", "--wheelhouse", help="platform", action='store_true')
argParser.add_argument("-w", "--wheelhouse", help="platform", action="store_true")

args = argParser.parse_args()

if args.platform not in supported_platforms:
raise ValueError(f"Platform {args.platform} is not supported. "
f"Supported platforms are: {list(supported_platforms.keys())}")
raise ValueError(
f"Platform {args.platform} is not supported. "
f"Supported platforms are: {list(supported_platforms.keys())}"
)
else:
requested_platform = supported_platforms[args.platform]
print(requested_platform)
Expand All @@ -35,7 +36,7 @@
# win: move .so binaries out before building
# lin: move .dll binaries out before building
with tempfile.TemporaryDirectory() as tmpdirname:
print('Created temporary directory: ', tmpdirname)
print("Created temporary directory: ", tmpdirname)

# Create the temporary build-opts.cfg
build_opts_path = os.path.join(tmpdirname, "build-opts.cfg")
Expand All @@ -45,8 +46,7 @@

# Move the binaries
gatebin_folder_path = os.path.join(
os.path.curdir,
os.path.join("src", "ansys", "dpf", "gatebin")
os.path.curdir, os.path.join("src", "ansys", "dpf", "gatebin")
)
binaries_to_move = []
moved = []
Expand Down
13 changes: 7 additions & 6 deletions .ci/update_dpf_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,18 +37,17 @@
destination = os.path.join(core_path, "src")
print(f"into {destination}")
latest_wheel = max(glob.glob(dist_path), key=os.path.getctime)
with zipfile.ZipFile(latest_wheel, 'r') as wheel:
with zipfile.ZipFile(latest_wheel, "r") as wheel:
for file in wheel.namelist():
# print(file)
if file.startswith('ansys/'):
if file.startswith("ansys/"):
wheel.extract(
file,
path=destination,
)
print("Done updating ansys-grpc-dpf")
else:
print(f"{grpc_path_key} environment variable is not defined. "
"Cannot update ansys-grpc-dpf.")
print(f"{grpc_path_key} environment variable is not defined. " "Cannot update ansys-grpc-dpf.")

if gate_path is not None:
# Update ansys-dpf-gate
Expand Down Expand Up @@ -78,5 +77,7 @@
)
print(f"Done updating ansys-dpf-gatebin for {platform.system()}")
else:
print(f"{gate_path_key} environment variable is not defined. "
"Cannot update ansys-dpf-gate or ansys-dpf-gatebin.")
print(
f"{gate_path_key} environment variable is not defined. "
"Cannot update ansys-dpf-gate or ansys-dpf-gatebin."
)
2 changes: 1 addition & 1 deletion requirements/requirements_build.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
black==23.9.1
build==1.0.3
chevron==0.14.0
wheel==0.41.0
build==1.0.3
2 changes: 1 addition & 1 deletion requirements/requirements_install.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ importlib-metadata==6.8.0
numpy==1.25.2
packaging==23.1
psutil==5.9.5
tqdm==4.66.1
tqdm==4.66.1
4 changes: 2 additions & 2 deletions src/ansys/dpf/core/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ def __getitem__(self, item):
if len(item) == 3:
return self.legacy_version_map[item]
else:
split = item.split('.')
return split[0]+'R'+split[1]
split = item.split(".")
return split[0] + "R" + split[1]


server_to_ansys_version = ServerToAnsysVersion()
2 changes: 1 addition & 1 deletion src/ansys/dpf/core/examples/downloads.py
Original file line number Diff line number Diff line change
Expand Up @@ -1559,7 +1559,7 @@ def download_cfx_heating_coil(
should_upload,
server,
return_local_path,
)
),
}


Expand Down
2 changes: 1 addition & 1 deletion src/ansys/dpf/core/field.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def as_local_field(self):

""" # noqa: E501
# Do not copy data if using InProcess server
if self._server.client is not None:
if self._server.client is not None:
return _LocalField(self)
else:
return self
Expand Down
16 changes: 7 additions & 9 deletions src/ansys/dpf/core/server_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,7 @@
import ansys.dpf.core as core
from ansys.dpf.core.check_version import server_meet_version
from ansys.dpf.core import errors, server_factory
from ansys.dpf.core._version import (
min_server_version,
server_to_ansys_version,
__version__
)
from ansys.dpf.core._version import min_server_version, server_to_ansys_version, __version__
from ansys.dpf.core import server_context
from ansys.dpf.gate import load_api, data_processing_grpcapi

Expand Down Expand Up @@ -339,10 +335,12 @@ def check_ansys_grpc_dpf_version(server, timeout):
)
LOG.debug("Established connection to DPF gRPC")
if version.parse(server.version) < version.parse(min_server_version):
raise ValueError(f"Error connecting to DPF LegacyGrpcServer with version {server.version} "
f"(ANSYS {server_to_ansys_version[server.version]}): "
f"ansys-dpf-core {__version__} does not support DPF servers below "
f"{min_server_version} ({server_to_ansys_version[min_server_version]}).")
raise ValueError(
f"Error connecting to DPF LegacyGrpcServer with version {server.version} "
f"(ANSYS {server_to_ansys_version[server.version]}): "
f"ansys-dpf-core {__version__} does not support DPF servers below "
f"{min_server_version} ({server_to_ansys_version[min_server_version]})."
)


class GhostServer:
Expand Down
1 change: 1 addition & 0 deletions tests/test_checkversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,5 +101,6 @@ def test_find_outdated_ansys_version():

def test_version():
from ansys.dpf.core._version import server_to_ansys_version

assert server_to_ansys_version["1.0"] == "2021R1"
assert server_to_ansys_version["2099.9"] == "2099R9"
31 changes: 23 additions & 8 deletions tests/test_field.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,12 +545,16 @@ def test_set_support_mesh(simple_bar):

def test_local_field_append(server_type_remote_process):
num_entities = 400
field_to_local = dpf.core.fields_factory.create_3d_vector_field(num_entities, server=server_type_remote_process)
field_to_local = dpf.core.fields_factory.create_3d_vector_field(
num_entities, server=server_type_remote_process
)
with field_to_local.as_local_field() as f:
for i in range(1, num_entities + 1):
f.append([0.1 * i, 0.2 * i, 0.3 * i], i)
assert f._is_set is True
field = dpf.core.fields_factory.create_3d_vector_field(num_entities, server=server_type_remote_process)
field = dpf.core.fields_factory.create_3d_vector_field(
num_entities, server=server_type_remote_process
)
for i in range(1, num_entities + 1):
field.append([0.1 * i, 0.2 * i, 0.3 * i], i)

Expand All @@ -567,7 +571,9 @@ def test_local_elemental_nodal_field_append(server_type_remote_process):
with field_to_local.as_local_field() as f:
for i in range(1, num_entities + 1):
f.append([[0.1 * i, 0.2 * i, 0.3 * i], [0.1 * i, 0.2 * i, 0.3 * i]], i)
field = dpf.core.fields_factory.create_3d_vector_field(num_entities, server=server_type_remote_process)
field = dpf.core.fields_factory.create_3d_vector_field(
num_entities, server=server_type_remote_process
)
for i in range(1, num_entities + 1):
field.append([[0.1 * i, 0.2 * i, 0.3 * i], [0.1 * i, 0.2 * i, 0.3 * i]], i)

Expand All @@ -590,12 +596,16 @@ def test_local_elemental_nodal_field_append(server_type_remote_process):

def test_local_array_field_append(server_type_remote_process):
num_entities = 400
field_to_local = dpf.core.fields_factory.create_3d_vector_field(num_entities, server=server_type_remote_process)
field_to_local = dpf.core.fields_factory.create_3d_vector_field(
num_entities, server=server_type_remote_process
)
with field_to_local.as_local_field() as f:
for i in range(1, num_entities + 1):
f.append(np.array([0.1 * i, 0.2 * i, 0.3 * i]), i)
assert f._is_set is True
field = dpf.core.fields_factory.create_3d_vector_field(num_entities, server=server_type_remote_process)
field = dpf.core.fields_factory.create_3d_vector_field(
num_entities, server=server_type_remote_process
)
for i in range(1, num_entities + 1):
field.append(np.array([0.1 * i, 0.2 * i, 0.3 * i]), i)

Expand All @@ -612,7 +622,9 @@ def test_local_elemental_nodal_array_field_append(server_type_remote_process):
with field_to_local.as_local_field() as f:
for i in range(1, num_entities + 1):
f.append(np.array([[0.1 * i, 0.2 * i, 0.3 * i], [0.1 * i, 0.2 * i, 0.3 * i]]), i)
field = dpf.core.fields_factory.create_3d_vector_field(num_entities, server=server_type_remote_process)
field = dpf.core.fields_factory.create_3d_vector_field(
num_entities, server=server_type_remote_process
)
for i in range(1, num_entities + 1):
field.append(np.array([[0.1 * i, 0.2 * i, 0.3 * i], [0.1 * i, 0.2 * i, 0.3 * i]]), i)

Expand Down Expand Up @@ -691,7 +703,9 @@ def test_auto_delete_field_local(server_type_remote_process):
num_entities, location=dpf.core.locations.elemental_nodal, server=server_type_remote_process
)
field_to_local.append([3.0, 4.0, 5.0], 1)
fc = dpf.core.fields_container_factory.over_time_freq_fields_container([field_to_local], server=server_type_remote_process)
fc = dpf.core.fields_container_factory.over_time_freq_fields_container(
[field_to_local], server=server_type_remote_process
)
field_to_local = None
with fc[0].as_local_field() as f:
assert np.allclose(f.get_entity_data(0), [3.0, 4.0, 5.0])
Expand Down Expand Up @@ -1275,9 +1289,10 @@ def test_dot_operator_field():
assert np.allclose(out.scoping.ids, [1, 2])
assert np.allclose(out.data, -field.data)


def test_field_no_inprocess_localfield(server_in_process, allkindofcomplexity):
model = dpf.core.Model(allkindofcomplexity, server=server_in_process)
field = model.results.stress().outputs.fields_container()[0]

with field.as_local_field() as local_field:
assert field == local_field
assert field == local_field
3 changes: 1 addition & 2 deletions tests/test_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,6 @@ def version(self):

print(MockServer(remote_server).version)
with pytest.raises(
ValueError,
match="Error connecting to DPF LegacyGrpcServer with version 1.0"
ValueError, match="Error connecting to DPF LegacyGrpcServer with version 1.0"
):
dpf.core.server_types.check_ansys_grpc_dpf_version(MockServer(remote_server), timeout=2.0)
Loading