capytaine 2.1__cp39-cp39-win_amd64.whl → 2.2.1__cp39-cp39-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- capytaine/__about__.py +1 -1
- capytaine/__init__.py +10 -7
- capytaine/bem/engines.py +2 -2
- capytaine/bem/problems_and_results.py +17 -9
- capytaine/bem/solver.py +71 -28
- capytaine/bodies/bodies.py +133 -24
- capytaine/green_functions/delhommeau.py +103 -51
- capytaine/green_functions/libs/Delhommeau_float32.cp39-win_amd64.dll.a +0 -0
- capytaine/green_functions/libs/Delhommeau_float32.cp39-win_amd64.pyd +0 -0
- capytaine/green_functions/libs/Delhommeau_float64.cp39-win_amd64.dll.a +0 -0
- capytaine/green_functions/libs/Delhommeau_float64.cp39-win_amd64.pyd +0 -0
- capytaine/io/mesh_loaders.py +49 -24
- capytaine/io/meshio.py +4 -1
- capytaine/io/xarray.py +17 -7
- capytaine/matrices/block.py +4 -2
- capytaine/matrices/linear_solvers.py +2 -3
- capytaine/matrices/low_rank.py +3 -1
- capytaine/meshes/clipper.py +3 -3
- capytaine/meshes/collections.py +13 -2
- capytaine/meshes/meshes.py +128 -4
- capytaine/meshes/predefined/cylinders.py +2 -2
- capytaine/meshes/properties.py +77 -0
- capytaine/post_pro/rao.py +1 -1
- capytaine/tools/cache_on_disk.py +3 -1
- capytaine/tools/symbolic_multiplication.py +23 -4
- capytaine/ui/vtk/body_viewer.py +2 -0
- capytaine-2.2.1.dist-info/DELVEWHEEL +2 -0
- capytaine-2.2.1.dist-info/METADATA +754 -0
- {capytaine-2.1.dist-info → capytaine-2.2.1.dist-info}/RECORD +33 -37
- capytaine/green_functions/libs/XieDelhommeau_float32.cp39-win_amd64.dll.a +0 -0
- capytaine/green_functions/libs/XieDelhommeau_float32.cp39-win_amd64.pyd +0 -0
- capytaine/green_functions/libs/XieDelhommeau_float64.cp39-win_amd64.dll.a +0 -0
- capytaine/green_functions/libs/XieDelhommeau_float64.cp39-win_amd64.pyd +0 -0
- capytaine-2.1.dist-info/DELVEWHEEL +0 -2
- capytaine-2.1.dist-info/METADATA +0 -756
- {capytaine-2.1.dist-info → capytaine-2.2.1.dist-info}/LICENSE +0 -0
- {capytaine-2.1.dist-info → capytaine-2.2.1.dist-info}/WHEEL +0 -0
- {capytaine-2.1.dist-info → capytaine-2.2.1.dist-info}/entry_points.txt +0 -0
- capytaine.libs/{.load-order-capytaine-2.1 → .load-order-capytaine-2.2.1} +2 -2
capytaine/io/mesh_loaders.py
CHANGED
|
@@ -11,7 +11,7 @@ import numpy as np
|
|
|
11
11
|
from capytaine.meshes.meshes import Mesh
|
|
12
12
|
from capytaine.meshes.symmetric import ReflectionSymmetricMesh
|
|
13
13
|
from capytaine.meshes.geometry import xOz_Plane, yOz_Plane
|
|
14
|
-
from capytaine.tools.optional_imports import import_optional_dependency
|
|
14
|
+
from capytaine.tools.optional_imports import import_optional_dependency, silently_import_optional_dependency
|
|
15
15
|
|
|
16
16
|
LOG = logging.getLogger(__name__)
|
|
17
17
|
|
|
@@ -24,14 +24,15 @@ def _check_file(filename, name=None):
|
|
|
24
24
|
return
|
|
25
25
|
|
|
26
26
|
|
|
27
|
-
def load_mesh(
|
|
27
|
+
def load_mesh(mesh, file_format=None, name=None):
|
|
28
28
|
"""Driver function that loads every mesh file format known by meshmagick.
|
|
29
29
|
Dispatch to one of the other function depending on file_format.
|
|
30
30
|
|
|
31
31
|
Parameters
|
|
32
32
|
----------
|
|
33
|
-
|
|
34
|
-
|
|
33
|
+
mesh: str or meshio object
|
|
34
|
+
Either the path to the mesh on disk
|
|
35
|
+
or a meshio object to be loaded with the dedicated method
|
|
35
36
|
file_format: str, optional
|
|
36
37
|
format of the mesh defined in the extension_dict dictionary
|
|
37
38
|
name: str, optional
|
|
@@ -42,6 +43,13 @@ def load_mesh(filename, file_format=None, name=None):
|
|
|
42
43
|
Mesh or SymmetricMesh
|
|
43
44
|
the loaded mesh
|
|
44
45
|
"""
|
|
46
|
+
meshio = silently_import_optional_dependency("meshio")
|
|
47
|
+
if meshio is not None and isinstance(mesh, meshio._mesh.Mesh):
|
|
48
|
+
from capytaine.io.meshio import load_from_meshio
|
|
49
|
+
return load_from_meshio(mesh, name=name)
|
|
50
|
+
|
|
51
|
+
filename = mesh
|
|
52
|
+
|
|
45
53
|
_check_file(filename)
|
|
46
54
|
|
|
47
55
|
if file_format is None:
|
|
@@ -53,7 +61,8 @@ def load_mesh(filename, file_format=None, name=None):
|
|
|
53
61
|
|
|
54
62
|
loader = extension_dict[file_format]
|
|
55
63
|
|
|
56
|
-
if name is None:
|
|
64
|
+
if name is None:
|
|
65
|
+
name = filename
|
|
57
66
|
|
|
58
67
|
return loader(filename, name)
|
|
59
68
|
|
|
@@ -712,6 +721,7 @@ def load_GDF(filename, name=None):
|
|
|
712
721
|
npan = int(gdf_file.readline().split()[0])
|
|
713
722
|
faces_vertices = np.genfromtxt(gdf_file)
|
|
714
723
|
|
|
724
|
+
faces_vertices = faces_vertices.reshape(-1, 3)
|
|
715
725
|
vertices, indices = np.unique(faces_vertices, axis=0, return_inverse=True)
|
|
716
726
|
faces = indices.reshape(-1, 4)
|
|
717
727
|
|
|
@@ -810,31 +820,46 @@ def load_MSH(filename, name=None):
|
|
|
810
820
|
|
|
811
821
|
_check_file(filename)
|
|
812
822
|
|
|
813
|
-
|
|
814
|
-
|
|
823
|
+
try:
|
|
824
|
+
meshio = import_optional_dependency("meshio")
|
|
825
|
+
except:
|
|
826
|
+
with open(filename, 'r') as file:
|
|
827
|
+
data = file.read()
|
|
828
|
+
version = float(re.search(r'\$MeshFormat\n(\d.\d).*\n\$EndMeshFormat', data, re.DOTALL).groups()[0])
|
|
829
|
+
|
|
830
|
+
if 4 <= version < 5:
|
|
831
|
+
message = (
|
|
832
|
+
f"Meshio is required to read MSH file format version 4. "
|
|
833
|
+
f"Use pip or conda to install Meshio."
|
|
834
|
+
)
|
|
835
|
+
raise ImportError(message) from None
|
|
836
|
+
else:
|
|
837
|
+
nb_nodes, nodes_data = re.search(r'\$Nodes\n(\d+)\n(.+)\$EndNodes', data, re.DOTALL).groups()
|
|
838
|
+
nb_elts, elts_data = re.search(r'\$Elements\n(\d+)\n(.+)\$EndElements', data, re.DOTALL).groups()
|
|
815
839
|
|
|
816
|
-
|
|
817
|
-
|
|
840
|
+
vertices = np.asarray(list(map(float, nodes_data.split())), dtype=float).reshape((-1, 4))[:, 1:]
|
|
841
|
+
vertices = np.ascontiguousarray(vertices)
|
|
842
|
+
faces = []
|
|
818
843
|
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
844
|
+
# Triangles
|
|
845
|
+
for tri_elt in re.findall(r'(^\d+\s2(?:\s\d+)+?$)', elts_data, re.MULTILINE):
|
|
846
|
+
tri_elt = list(map(int, tri_elt.split()))
|
|
847
|
+
triangle = tri_elt[-3:]
|
|
848
|
+
triangle.append(triangle[0])
|
|
849
|
+
faces.append(triangle)
|
|
822
850
|
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
triangle.append(triangle[0])
|
|
828
|
-
faces.append(triangle)
|
|
851
|
+
for quad_elt in re.findall(r'(^\d+\s3(?:\s\d+)+?$)', elts_data, re.MULTILINE):
|
|
852
|
+
quad_elt = list(map(int, quad_elt.split()))
|
|
853
|
+
quadrangle = quad_elt[-4:]
|
|
854
|
+
faces.append(quadrangle)
|
|
829
855
|
|
|
830
|
-
|
|
831
|
-
quad_elt = list(map(int, quad_elt.split()))
|
|
832
|
-
quadrangle = quad_elt[-4:]
|
|
833
|
-
faces.append(quadrangle)
|
|
856
|
+
faces = np.asarray(faces, dtype=int) - 1
|
|
834
857
|
|
|
835
|
-
|
|
858
|
+
return Mesh(vertices, faces, name)
|
|
836
859
|
|
|
837
|
-
|
|
860
|
+
msh_mesh = meshio.read(filename)
|
|
861
|
+
from capytaine.io.meshio import load_from_meshio
|
|
862
|
+
return load_from_meshio(msh_mesh, name)
|
|
838
863
|
|
|
839
864
|
|
|
840
865
|
def load_MED(filename, name=None):
|
capytaine/io/meshio.py
CHANGED
|
@@ -32,4 +32,7 @@ def load_from_meshio(mesh, name=None):
|
|
|
32
32
|
if name is None:
|
|
33
33
|
name = f'mesh_from_meshio_{next(Mesh._ids)}'
|
|
34
34
|
|
|
35
|
-
|
|
35
|
+
mesh = Mesh(vertices=mesh.points, faces=all_faces_as_quads(mesh.cells_dict), name=name)
|
|
36
|
+
mesh.heal_mesh()
|
|
37
|
+
|
|
38
|
+
return mesh
|
capytaine/io/xarray.py
CHANGED
|
@@ -57,6 +57,10 @@ def problems_from_dataset(dataset: xr.Dataset,
|
|
|
57
57
|
if isinstance(bodies, FloatingBody):
|
|
58
58
|
bodies = [bodies]
|
|
59
59
|
|
|
60
|
+
# Should be done before looking for `frequency_keys`, otherwise
|
|
61
|
+
# frequencies provided as a scalar dimension will be skipped.
|
|
62
|
+
dataset = _unsqueeze_dimensions(dataset)
|
|
63
|
+
|
|
60
64
|
# SANITY CHECKS
|
|
61
65
|
assert len(list(set(body.name for body in bodies))) == len(bodies), \
|
|
62
66
|
"All bodies should have different names."
|
|
@@ -80,8 +84,6 @@ def problems_from_dataset(dataset: xr.Dataset,
|
|
|
80
84
|
"Received {}".format(frequency_keys))
|
|
81
85
|
# END SANITY CHECKS
|
|
82
86
|
|
|
83
|
-
dataset = _unsqueeze_dimensions(dataset)
|
|
84
|
-
|
|
85
87
|
if len(frequency_keys) == 0:
|
|
86
88
|
freq_type = "omega"
|
|
87
89
|
freq_range = [_default_parameters['omega']]
|
|
@@ -110,11 +112,19 @@ def problems_from_dataset(dataset: xr.Dataset,
|
|
|
110
112
|
if wave_direction_range is not None:
|
|
111
113
|
for freq, wave_direction, water_depth, body_name, forward_speed, rho, g \
|
|
112
114
|
in product(freq_range, wave_direction_range, water_depth_range, body_range, forward_speed_range, rho_range, g_range):
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
115
|
+
if freq not in {0.0, np.inf}:
|
|
116
|
+
problems.append(
|
|
117
|
+
DiffractionProblem(body=body_range[body_name], **{freq_type: freq},
|
|
118
|
+
wave_direction=wave_direction, water_depth=water_depth,
|
|
119
|
+
forward_speed=forward_speed, rho=rho, g=g)
|
|
120
|
+
)
|
|
121
|
+
elif freq in {0.0, np.inf} and radiating_dofs is not None:
|
|
122
|
+
# Diffraction problems are not defined for 0 and infinite frequency.
|
|
123
|
+
# But we don't want the whole batch to fail, as these frequencies are there for the radiation problems.
|
|
124
|
+
# The excitation force will be NaN for these frequencies in the resulting dataset.
|
|
125
|
+
pass
|
|
126
|
+
else:
|
|
127
|
+
raise ValueError("Zero and infinite frequencies are not defined when solving only diffraction problems.")
|
|
118
128
|
|
|
119
129
|
if radiating_dofs is not None:
|
|
120
130
|
for freq, radiating_dof, water_depth, body_name, forward_speed, rho, g \
|
capytaine/matrices/block.py
CHANGED
|
@@ -185,7 +185,9 @@ class BlockMatrix:
|
|
|
185
185
|
self._put_in_full_matrix(full_matrix)
|
|
186
186
|
return full_matrix
|
|
187
187
|
|
|
188
|
-
def __array__(self, dtype=None):
|
|
188
|
+
def __array__(self, dtype=None, copy=True):
|
|
189
|
+
if not copy:
|
|
190
|
+
raise ValueError("Making an ndarray out of a BlockMatrix requires copy")
|
|
189
191
|
return self.full_matrix(dtype=dtype)
|
|
190
192
|
|
|
191
193
|
def no_toeplitz(self):
|
|
@@ -587,4 +589,4 @@ class BlockMatrix:
|
|
|
587
589
|
this_block = self
|
|
588
590
|
for index in path:
|
|
589
591
|
this_block = this_block.all_blocks[index, index]
|
|
590
|
-
return this_block
|
|
592
|
+
return this_block
|
|
@@ -3,14 +3,13 @@
|
|
|
3
3
|
They are based on numpy solvers with a thin layer for the handling of Hierarchical Toeplitz matrices.
|
|
4
4
|
"""
|
|
5
5
|
# Copyright (C) 2017-2019 Matthieu Ancellin
|
|
6
|
-
# See LICENSE file at <https://github.com/
|
|
6
|
+
# See LICENSE file at <https://github.com/capytaine/capytaine>
|
|
7
7
|
|
|
8
8
|
import logging
|
|
9
9
|
|
|
10
10
|
import numpy as np
|
|
11
11
|
from scipy import linalg as sl
|
|
12
12
|
from scipy.sparse import linalg as ssl
|
|
13
|
-
from itertools import accumulate, chain
|
|
14
13
|
|
|
15
14
|
from capytaine.matrices.block import BlockMatrix
|
|
16
15
|
from capytaine.matrices.block_toeplitz import BlockSymmetricToeplitzMatrix, BlockCirculantMatrix
|
|
@@ -27,7 +26,7 @@ def solve_directly(A, b):
|
|
|
27
26
|
blocks_of_diagonalization = A.block_diagonalize()
|
|
28
27
|
fft_of_rhs = np.fft.fft(np.reshape(b, (A.nb_blocks[0], A.block_shape[0])), axis=0)
|
|
29
28
|
try: # Try to run it as vectorized numpy arrays.
|
|
30
|
-
fft_of_result = np.linalg.solve(blocks_of_diagonalization, fft_of_rhs)
|
|
29
|
+
fft_of_result = np.linalg.solve(blocks_of_diagonalization, fft_of_rhs[..., np.newaxis])[..., 0]
|
|
31
30
|
except np.linalg.LinAlgError: # Or do the same thing with list comprehension.
|
|
32
31
|
fft_of_result = np.array([solve_directly(block, vec) for block, vec in zip(blocks_of_diagonalization, fft_of_rhs)])
|
|
33
32
|
result = np.fft.ifft(fft_of_result, axis=0).reshape((A.shape[1],))
|
capytaine/matrices/low_rank.py
CHANGED
|
@@ -318,7 +318,9 @@ class LowRankMatrix:
|
|
|
318
318
|
else:
|
|
319
319
|
return self.left_matrix @ self.right_matrix
|
|
320
320
|
|
|
321
|
-
def __array__(self, dtype=None):
|
|
321
|
+
def __array__(self, dtype=None, copy=True):
|
|
322
|
+
if not copy:
|
|
323
|
+
raise ValueError("Making an ndarray out of a BlockMatrix requires copy")
|
|
322
324
|
return self.full_matrix(dtype=dtype)
|
|
323
325
|
|
|
324
326
|
@property
|
capytaine/meshes/clipper.py
CHANGED
|
@@ -30,14 +30,14 @@ def clip(source_mesh: Mesh, plane: Plane, vicinity_tol=1e-12, name=None):
|
|
|
30
30
|
"""
|
|
31
31
|
vertices_data = _vertices_positions_wrt_plane(source_mesh, plane, vicinity_tol)
|
|
32
32
|
|
|
33
|
-
|
|
34
|
-
vertices_data['vertices_above_mask']
|
|
33
|
+
nb_vertices_strictly_above_plane = np.count_nonzero(
|
|
34
|
+
vertices_data['vertices_above_mask']
|
|
35
35
|
)
|
|
36
36
|
nb_vertices_below_or_on_plane = np.count_nonzero(
|
|
37
37
|
vertices_data['vertices_below_mask'] | vertices_data['vertices_on_mask']
|
|
38
38
|
)
|
|
39
39
|
|
|
40
|
-
if
|
|
40
|
+
if nb_vertices_strictly_above_plane == source_mesh.nb_vertices:
|
|
41
41
|
LOG.warning(f"Clipping {source_mesh.name} by {plane}: all vertices are removed.")
|
|
42
42
|
clipped_mesh = Mesh(None, None)
|
|
43
43
|
clipped_mesh._clipping_data = dict(faces_ids=[])
|
capytaine/meshes/collections.py
CHANGED
|
@@ -103,14 +103,14 @@ class CollectionOfMeshes(ClippableMixin, SurfaceIntegralsMixin, Abstract3DObject
|
|
|
103
103
|
|
|
104
104
|
def path_to_leaf(self):
|
|
105
105
|
"""
|
|
106
|
-
Builds a list of lists of paths from the collection corresponding to the
|
|
106
|
+
Builds a list of lists of paths from the collection corresponding to the
|
|
107
107
|
root of the tree to the submeshes corresponding to the leaves
|
|
108
108
|
"""
|
|
109
109
|
ptl = []
|
|
110
110
|
for i, mesh in enumerate(self):
|
|
111
111
|
for path in mesh.path_to_leaf():
|
|
112
112
|
ptl.append([i] + path)
|
|
113
|
-
return ptl
|
|
113
|
+
return ptl
|
|
114
114
|
|
|
115
115
|
def copy(self, name=None):
|
|
116
116
|
from copy import deepcopy
|
|
@@ -124,6 +124,11 @@ class CollectionOfMeshes(ClippableMixin, SurfaceIntegralsMixin, Abstract3DObject
|
|
|
124
124
|
for mesh in self:
|
|
125
125
|
mesh.heal_mesh(closed_mesh=closed_mesh)
|
|
126
126
|
|
|
127
|
+
@inplace_transformation
|
|
128
|
+
def with_normal_vector_going_down(self):
|
|
129
|
+
for mesh in self:
|
|
130
|
+
mesh.with_normal_vector_going_down()
|
|
131
|
+
|
|
127
132
|
##############
|
|
128
133
|
# Properties #
|
|
129
134
|
##############
|
|
@@ -311,3 +316,9 @@ class CollectionOfMeshes(ClippableMixin, SurfaceIntegralsMixin, Abstract3DObject
|
|
|
311
316
|
|
|
312
317
|
def show_matplotlib(self, *args, **kwargs):
|
|
313
318
|
self.merged().show_matplotlib(*args, **kwargs)
|
|
319
|
+
|
|
320
|
+
def lowest_lid_position(self, *args, **kwargs):
|
|
321
|
+
return self.merged().lowest_lid_position(*args, **kwargs)
|
|
322
|
+
|
|
323
|
+
def generate_lid(self, *args, **kwargs):
|
|
324
|
+
return self.merged().generate_lid(*args, **kwargs)
|
capytaine/meshes/meshes.py
CHANGED
|
@@ -9,8 +9,8 @@ from itertools import count
|
|
|
9
9
|
|
|
10
10
|
import numpy as np
|
|
11
11
|
|
|
12
|
-
from capytaine.meshes.geometry import Abstract3DObject, ClippableMixin, Plane, inplace_transformation
|
|
13
|
-
from capytaine.meshes.properties import compute_faces_properties
|
|
12
|
+
from capytaine.meshes.geometry import Abstract3DObject, ClippableMixin, Plane, inplace_transformation, xOy_Plane
|
|
13
|
+
from capytaine.meshes.properties import compute_faces_properties, connected_components, connected_components_of_waterline
|
|
14
14
|
from capytaine.meshes.surface_integrals import SurfaceIntegralsMixin
|
|
15
15
|
from capytaine.meshes.quality import (merge_duplicates, heal_normals, remove_unused_vertices,
|
|
16
16
|
heal_triangles, remove_degenerated_faces)
|
|
@@ -470,8 +470,9 @@ class Mesh(ClippableMixin, SurfaceIntegralsMixin, Abstract3DObject):
|
|
|
470
470
|
Other parameters are passed to Poly3DCollection.
|
|
471
471
|
"""
|
|
472
472
|
matplotlib = import_optional_dependency("matplotlib")
|
|
473
|
-
|
|
474
|
-
|
|
473
|
+
import importlib
|
|
474
|
+
plt = importlib.import_module("matplotlib.pyplot")
|
|
475
|
+
cm = importlib.import_module("matplotlib.cm")
|
|
475
476
|
|
|
476
477
|
mpl_toolkits = import_optional_dependency("mpl_toolkits", package_name="matplotlib")
|
|
477
478
|
Poly3DCollection = mpl_toolkits.mplot3d.art3d.Poly3DCollection
|
|
@@ -744,3 +745,126 @@ class Mesh(ClippableMixin, SurfaceIntegralsMixin, Abstract3DObject):
|
|
|
744
745
|
if closed_mesh:
|
|
745
746
|
self.heal_normals()
|
|
746
747
|
return self
|
|
748
|
+
|
|
749
|
+
##########
|
|
750
|
+
# Lids #
|
|
751
|
+
##########
|
|
752
|
+
|
|
753
|
+
def lowest_lid_position(self, omega_max, *, g=9.81):
|
|
754
|
+
z_lid = 0.0
|
|
755
|
+
for comp in connected_components(self):
|
|
756
|
+
for ccomp in connected_components_of_waterline(comp):
|
|
757
|
+
x_span = ccomp.vertices[:, 0].max() - ccomp.vertices[:, 0].min()
|
|
758
|
+
y_span = ccomp.vertices[:, 1].max() - ccomp.vertices[:, 1].min()
|
|
759
|
+
p = np.hypot(1/x_span, 1/y_span)
|
|
760
|
+
z_lid_comp = -np.arctanh(np.pi*g*p/omega_max**2) / (np.pi * p)
|
|
761
|
+
z_lid = min(z_lid, z_lid_comp)
|
|
762
|
+
return 0.9*z_lid # Add a small safety margin
|
|
763
|
+
|
|
764
|
+
def generate_lid(self, z=0.0, faces_max_radius=None):
|
|
765
|
+
"""
|
|
766
|
+
Return a mesh of the internal free surface of the body.
|
|
767
|
+
|
|
768
|
+
Parameters
|
|
769
|
+
----------
|
|
770
|
+
z: float
|
|
771
|
+
Vertical position of the lid. Default: 0.0
|
|
772
|
+
faces_max_radius: float
|
|
773
|
+
resolution of the mesh of the lid.
|
|
774
|
+
Default: mean of hull mesh resolution.
|
|
775
|
+
|
|
776
|
+
Returns
|
|
777
|
+
-------
|
|
778
|
+
Mesh
|
|
779
|
+
lid of internal surface
|
|
780
|
+
"""
|
|
781
|
+
from capytaine.meshes.predefined.rectangles import mesh_rectangle
|
|
782
|
+
|
|
783
|
+
clipped_hull_mesh = self.clipped(Plane(normal=(0, 0, 1), point=(0, 0, z)))
|
|
784
|
+
# Alternatively: could keep only faces below z without proper clipping,
|
|
785
|
+
# and it would work similarly.
|
|
786
|
+
|
|
787
|
+
if clipped_hull_mesh.nb_faces == 0:
|
|
788
|
+
return Mesh(None, None, name="lid for {}".format(self.name))
|
|
789
|
+
|
|
790
|
+
x_span = clipped_hull_mesh.vertices[:, 0].max() - clipped_hull_mesh.vertices[:, 0].min()
|
|
791
|
+
y_span = clipped_hull_mesh.vertices[:, 1].max() - clipped_hull_mesh.vertices[:, 1].min()
|
|
792
|
+
x_mean = (clipped_hull_mesh.vertices[:, 0].max() + clipped_hull_mesh.vertices[:, 0].min())/2
|
|
793
|
+
y_mean = (clipped_hull_mesh.vertices[:, 1].max() + clipped_hull_mesh.vertices[:, 1].min())/2
|
|
794
|
+
|
|
795
|
+
if faces_max_radius is None:
|
|
796
|
+
faces_max_radius = np.mean(clipped_hull_mesh.faces_radiuses)
|
|
797
|
+
|
|
798
|
+
candidate_lid_mesh = mesh_rectangle(
|
|
799
|
+
size=(1.1*y_span, 1.1*x_span), # TODO Fix mesh_rectangle
|
|
800
|
+
faces_max_radius=faces_max_radius,
|
|
801
|
+
center=(x_mean, y_mean, z),
|
|
802
|
+
normal=(0.0, 0.0, -1.0),
|
|
803
|
+
)
|
|
804
|
+
|
|
805
|
+
candidate_lid_points = candidate_lid_mesh.vertices[:, 0:2]
|
|
806
|
+
|
|
807
|
+
hull_faces = clipped_hull_mesh.vertices[clipped_hull_mesh.faces, 0:2]
|
|
808
|
+
edges_of_hull_faces = hull_faces[:, [1, 2, 3, 0], :] - hull_faces[:, :, :] # Vectors between two consecutive points in a face
|
|
809
|
+
# edges_of_hull_faces.shape = (nb_full_faces, 4, 2)
|
|
810
|
+
lid_points_in_local_coords = candidate_lid_points[:, np.newaxis, np.newaxis, :] - hull_faces[:, :, :]
|
|
811
|
+
# lid_points_in_local_coords.shape = (nb_candidate_lid_points, nb_full_faces, 4, 2)
|
|
812
|
+
side_of_hull_edges = (lid_points_in_local_coords[..., 0] * edges_of_hull_faces[..., 1]
|
|
813
|
+
- lid_points_in_local_coords[..., 1] * edges_of_hull_faces[..., 0])
|
|
814
|
+
# side_of_hull_edges.shape = (nb_candidate_lid_points, nb_full_faces, 4)
|
|
815
|
+
point_is_above_panel = np.all(side_of_hull_edges <= 0, axis=-1) | np.all(side_of_hull_edges >= 0, axis=-1)
|
|
816
|
+
# point_is_above_panel.shape = (nb_candidate_lid_points, nb_full_faces)
|
|
817
|
+
|
|
818
|
+
# For all point in candidate_lid_points, and for all edges of all faces of
|
|
819
|
+
# the hull mesh, check on which side of the edge is the point by using a
|
|
820
|
+
# cross product.
|
|
821
|
+
# If a point on the same side of all edges of a face, then it is inside.
|
|
822
|
+
|
|
823
|
+
nb_panels_below_point = np.sum(point_is_above_panel, axis=-1)
|
|
824
|
+
needs_lid = (nb_panels_below_point % 2 == 1).nonzero()[0]
|
|
825
|
+
|
|
826
|
+
lid_faces = candidate_lid_mesh.faces[np.all(np.isin(candidate_lid_mesh.faces, needs_lid), axis=-1), :]
|
|
827
|
+
|
|
828
|
+
if len(lid_faces) == 0:
|
|
829
|
+
return Mesh(None, None, name="lid for {}".format(self.name))
|
|
830
|
+
|
|
831
|
+
lid_mesh = Mesh(candidate_lid_mesh.vertices, lid_faces, name="lid for {}".format(self.name))
|
|
832
|
+
lid_mesh.heal_mesh()
|
|
833
|
+
|
|
834
|
+
return lid_mesh
|
|
835
|
+
|
|
836
|
+
@inplace_transformation
|
|
837
|
+
def with_normal_vector_going_down(self):
|
|
838
|
+
# For lid meshes for irregular frequencies removal
|
|
839
|
+
if np.allclose(self.faces_normals[:, 2], np.ones((self.nb_faces,))):
|
|
840
|
+
# The mesh is horizontal with normal vectors going up
|
|
841
|
+
LOG.warning(f"Inverting the direction of the normal vectors of {self} to be downward.")
|
|
842
|
+
self.faces = self.faces[:, ::-1]
|
|
843
|
+
else:
|
|
844
|
+
return self
|
|
845
|
+
|
|
846
|
+
def _face_on_plane(self, i_face, plane):
|
|
847
|
+
return (
|
|
848
|
+
self.faces_centers[i_face, :] in plane
|
|
849
|
+
and plane.is_orthogonal_to(self.faces_normals[i_face, :])
|
|
850
|
+
)
|
|
851
|
+
|
|
852
|
+
def extract_lid(self, plane=xOy_Plane):
|
|
853
|
+
"""
|
|
854
|
+
Split the mesh into a mesh of the hull and a mesh of the lid.
|
|
855
|
+
By default, the lid is composed of the horizontal faces on the z=0 plane.
|
|
856
|
+
|
|
857
|
+
Parameters
|
|
858
|
+
----------
|
|
859
|
+
plane: Plane
|
|
860
|
+
The plane on which to look for lid faces.
|
|
861
|
+
|
|
862
|
+
Returns
|
|
863
|
+
-------
|
|
864
|
+
2-ple of Mesh
|
|
865
|
+
hull mesh and lid mesh
|
|
866
|
+
"""
|
|
867
|
+
faces_on_plane = [i_face for i_face in range(self.nb_faces) if self._face_on_plane(i_face, plane)]
|
|
868
|
+
lid_mesh = self.extract_faces(faces_on_plane)
|
|
869
|
+
hull_mesh = self.extract_faces(list(set(range(self.nb_faces)) - set(faces_on_plane)))
|
|
870
|
+
return hull_mesh, lid_mesh
|
|
@@ -88,7 +88,7 @@ def mesh_disk(*, radius=1.0, center=(0, 0, 0), normal=(0, 0, 1),
|
|
|
88
88
|
else:
|
|
89
89
|
theta_range = np.linspace(0, _theta_max, ntheta+1)
|
|
90
90
|
r_range = np.linspace(0.0, radius, nr+1)
|
|
91
|
-
nodes = np.array([(0, r*sin(t), r*cos(t)) for (r, t) in product(r_range, theta_range)])
|
|
91
|
+
nodes = np.array([(0, r*sin(t), -r*cos(t)) for (r, t) in product(r_range, theta_range)])
|
|
92
92
|
panels = np.array([(j+i*(ntheta+1), j+1+i*(ntheta+1), j+1+(i+1)*(ntheta+1), j+(i+1)*(ntheta+1))
|
|
93
93
|
for (i, j) in product(range(0, nr), range(0, ntheta))])
|
|
94
94
|
|
|
@@ -170,7 +170,7 @@ def mesh_vertical_cylinder(*, length=10.0, radius=1.0, center=(0, 0, 0),
|
|
|
170
170
|
resolution=(nr, ntheta//2, nz), reflection_symmetry=False, axial_symmetry=False,
|
|
171
171
|
name=f"half_{name}", _theta_max=_theta_max/2)
|
|
172
172
|
|
|
173
|
-
mesh = ReflectionSymmetricMesh(half_cylinder, plane=
|
|
173
|
+
mesh = ReflectionSymmetricMesh(half_cylinder, plane=yOz_Plane, name=name)
|
|
174
174
|
|
|
175
175
|
elif axial_symmetry:
|
|
176
176
|
|
capytaine/meshes/properties.py
CHANGED
|
@@ -4,7 +4,11 @@ Based on meshmagick <https://github.com/LHEEA/meshmagick> by François Rongère.
|
|
|
4
4
|
# Copyright (C) 2017-2019 Matthieu Ancellin, based on the work of François Rongère
|
|
5
5
|
# See LICENSE file at <https://github.com/mancellin/capytaine>
|
|
6
6
|
|
|
7
|
+
from functools import reduce
|
|
8
|
+
from itertools import chain
|
|
7
9
|
import numpy as np
|
|
10
|
+
from typing import List
|
|
11
|
+
from numpy.typing import NDArray
|
|
8
12
|
|
|
9
13
|
|
|
10
14
|
def compute_faces_properties(mesh):
|
|
@@ -197,3 +201,76 @@ def compute_connectivity(mesh):
|
|
|
197
201
|
'v_f': v_f,
|
|
198
202
|
'f_f': f_f,
|
|
199
203
|
'boundaries': boundaries}
|
|
204
|
+
|
|
205
|
+
def faces_in_group(faces: NDArray[np.integer], group: NDArray[np.integer]) -> NDArray[np.bool_]:
|
|
206
|
+
"""Identification of faces with vertices within group.
|
|
207
|
+
|
|
208
|
+
Parameters
|
|
209
|
+
----------
|
|
210
|
+
faces : NDArray[np.integer]
|
|
211
|
+
Mesh faces. Expecting a numpy array of shape N_faces x N_vertices_per_face.
|
|
212
|
+
group : NDArray[np.integer]
|
|
213
|
+
Group of connected vertices
|
|
214
|
+
|
|
215
|
+
Returns
|
|
216
|
+
-------
|
|
217
|
+
NDArray[np.bool]
|
|
218
|
+
Mask of faces containing vertices from the group
|
|
219
|
+
"""
|
|
220
|
+
return np.any(np.isin(faces, group), axis=1)
|
|
221
|
+
|
|
222
|
+
def clustering(faces: NDArray[np.integer]) -> List[NDArray[np.integer]]:
|
|
223
|
+
"""Clustering of vertices per connected faces.
|
|
224
|
+
|
|
225
|
+
Parameters
|
|
226
|
+
----------
|
|
227
|
+
faces : NDArray[np.integer]
|
|
228
|
+
Mesh faces. Expecting a numpy array of shape N_faces x N_vertices_per_face.
|
|
229
|
+
|
|
230
|
+
Returns
|
|
231
|
+
-------
|
|
232
|
+
list[NDArray[np.integer]]
|
|
233
|
+
Groups of connected vertices.
|
|
234
|
+
"""
|
|
235
|
+
vert_groups: list[NDArray[np.integer]] = []
|
|
236
|
+
mask = np.ones(faces.shape[0], dtype=bool)
|
|
237
|
+
while np.any(mask):
|
|
238
|
+
# Consider faces whose vertices are not already identified in a group.
|
|
239
|
+
# Start new group by considering first face
|
|
240
|
+
remaining_faces = faces[mask]
|
|
241
|
+
group = remaining_faces[0]
|
|
242
|
+
rem_mask = np.ones(remaining_faces.shape[0], dtype=bool)
|
|
243
|
+
# Iterative update of vertices group. Output final result to frozenset
|
|
244
|
+
while not np.allclose(new:=faces_in_group(remaining_faces, group), rem_mask):
|
|
245
|
+
group = np.unique(remaining_faces[new])
|
|
246
|
+
rem_mask = new
|
|
247
|
+
else:
|
|
248
|
+
group = np.unique(remaining_faces[new])
|
|
249
|
+
vert_groups.append(group)
|
|
250
|
+
# Identify faces that have no vertices in current groups
|
|
251
|
+
mask = ~reduce(np.logical_or, [faces_in_group(faces, group) for group in vert_groups])
|
|
252
|
+
return vert_groups
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def connected_components(mesh):
|
|
256
|
+
"""Returns a list of meshes that each corresponds to the a connected component in the original mesh.
|
|
257
|
+
Assumes the mesh is mostly conformal without duplicate vertices.
|
|
258
|
+
"""
|
|
259
|
+
# Get connected vertices
|
|
260
|
+
vertices_components = clustering(mesh.faces)
|
|
261
|
+
# Verification
|
|
262
|
+
if sum(len(group) for group in vertices_components) != len(set(chain.from_iterable(vertices_components))):
|
|
263
|
+
raise ValueError("Error in connected components clustering. Some elements are duplicated")
|
|
264
|
+
# The components are found. The rest is just about retrieving the faces in each components.
|
|
265
|
+
faces_components = [np.argwhere(faces_in_group(mesh.faces, group)) for group in vertices_components]
|
|
266
|
+
components = [mesh.extract_faces(f) for f in faces_components]
|
|
267
|
+
return components
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
def connected_components_of_waterline(mesh, z=0.0):
|
|
271
|
+
if np.any(mesh.vertices[:, 2] > z + 1e-8):
|
|
272
|
+
mesh = mesh.immersed_part(free_surface=z)
|
|
273
|
+
fs_vertices_indices = np.where(np.isclose(mesh.vertices[:, 2], z))[0]
|
|
274
|
+
fs_faces_indices = np.where(np.any(np.isin(mesh.faces, fs_vertices_indices), axis=1))[0]
|
|
275
|
+
crown_mesh = mesh.extract_faces(fs_faces_indices)
|
|
276
|
+
return connected_components(crown_mesh)
|
capytaine/post_pro/rao.py
CHANGED
|
@@ -55,6 +55,6 @@ def rao(dataset, wave_direction=None, dissipation=None, stiffness=None):
|
|
|
55
55
|
# Solve and add coordinates
|
|
56
56
|
rao_dims = [d for d in H.dims if d != 'influenced_dof']
|
|
57
57
|
rao_coords = {c: H.coords[c] for c in H.coords if c != 'influenced_dof'}
|
|
58
|
-
rao = xr.DataArray(np.linalg.solve(H, fex), coords=rao_coords, dims=rao_dims)
|
|
58
|
+
rao = xr.DataArray(np.linalg.solve(H.values, fex.values[..., np.newaxis])[..., 0], coords=rao_coords, dims=rao_dims)
|
|
59
59
|
|
|
60
60
|
return rao
|
capytaine/tools/cache_on_disk.py
CHANGED
|
@@ -9,7 +9,9 @@ from capytaine import __version__
|
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
def cache_directory():
|
|
12
|
-
if
|
|
12
|
+
if "CAPYTAINE_CACHE_DIR" in os.environ:
|
|
13
|
+
path = os.path.join(os.environ["CAPYTAINE_CACHE_DIR"], __version__)
|
|
14
|
+
elif sys.platform == "win32": # Windows
|
|
13
15
|
path = os.path.normpath(os.environ.get("LOCALAPPDATA"))
|
|
14
16
|
path = os.path.join(path, "capytaine", "Cache", __version__)
|
|
15
17
|
elif sys.platform == "darwin": # MacOS
|
|
@@ -1,7 +1,17 @@
|
|
|
1
|
+
"""This module is used for the handling of zero and infinite frequencies.
|
|
2
|
+
In this cases, the magnitudes that the solver has to manipulate are in the form of ω times a non-zero term.
|
|
3
|
+
Instead of evaluating this multiplication as zero of infinity, we keep it symbolic using the class defined here.
|
|
4
|
+
|
|
5
|
+
The frequency can be provided to the solver as something like
|
|
6
|
+
`SymbolicMultiplication("0", 1.0)` (that is zero) and the solver will return an
|
|
7
|
+
output of the form `SymbolicMultiplication("0", np.array(...))`
|
|
8
|
+
(that is also actually zero, except we may be intested in the non-zero array).
|
|
9
|
+
"""
|
|
10
|
+
|
|
1
11
|
import numpy as np
|
|
2
|
-
from functools import wraps
|
|
12
|
+
from functools import wraps, total_ordering
|
|
3
13
|
|
|
4
|
-
|
|
14
|
+
@total_ordering
|
|
5
15
|
class SymbolicMultiplication:
|
|
6
16
|
def __init__(self, symbol, value=1.0):
|
|
7
17
|
self.symbol = symbol
|
|
@@ -64,6 +74,9 @@ class SymbolicMultiplication:
|
|
|
64
74
|
def __rmatmul__(self, x):
|
|
65
75
|
return SymbolicMultiplication(self.symbol, x @ self.value)
|
|
66
76
|
|
|
77
|
+
def __getitem__(self, item):
|
|
78
|
+
return SymbolicMultiplication(self.symbol, self.value[item])
|
|
79
|
+
|
|
67
80
|
def __eq__(self, x):
|
|
68
81
|
return float(self) == x
|
|
69
82
|
|
|
@@ -84,9 +97,9 @@ class SymbolicMultiplication:
|
|
|
84
97
|
|
|
85
98
|
def __float__(self):
|
|
86
99
|
if self.symbol == "0":
|
|
87
|
-
return 0.0
|
|
100
|
+
return 0.0 * float(self.value)
|
|
88
101
|
elif self.symbol == "∞":
|
|
89
|
-
return np.inf
|
|
102
|
+
return np.inf * float(self.value)
|
|
90
103
|
else:
|
|
91
104
|
raise NotImplementedError
|
|
92
105
|
|
|
@@ -95,6 +108,12 @@ class SymbolicMultiplication:
|
|
|
95
108
|
|
|
96
109
|
|
|
97
110
|
def supporting_symbolic_multiplication(f):
|
|
111
|
+
"""
|
|
112
|
+
When this decorator is applied to a function, this function can now take
|
|
113
|
+
as input a `SymbolicMultiplication` object. The function is applied on the
|
|
114
|
+
`value` part of the `SymbolicMultiplication` without modifying the
|
|
115
|
+
`symbol`.
|
|
116
|
+
"""
|
|
98
117
|
@wraps(f)
|
|
99
118
|
def wrapped_f(a, x):
|
|
100
119
|
if hasattr(x, 'symbol'):
|
capytaine/ui/vtk/body_viewer.py
CHANGED
|
@@ -15,6 +15,8 @@ class FloatingBodyViewer(MeshViewer):
|
|
|
15
15
|
|
|
16
16
|
def add_body(self, body, **kwargs):
|
|
17
17
|
self.add_mesh(body.mesh, **kwargs)
|
|
18
|
+
if body.lid_mesh is not None:
|
|
19
|
+
self.add_mesh(body.lid_mesh, **kwargs)
|
|
18
20
|
|
|
19
21
|
for dof in body.dofs:
|
|
20
22
|
vtk_data_array = vtk.vtkFloatArray()
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
Version: 1.9.0
|
|
2
|
+
Arguments: ['C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-qolinjw3\\cp39-win_amd64\\build\\venv\\Scripts\\delvewheel', 'repair', '-w', 'C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-qolinjw3\\cp39-win_amd64\\repaired_wheel', 'C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-qolinjw3\\cp39-win_amd64\\built_wheel\\capytaine-2.2.1-cp39-cp39-win_amd64.whl', '--no-mangle-all']
|