capytaine 2.2__cp311-cp311-win_amd64.whl → 2.2.1__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
capytaine/__about__.py CHANGED
@@ -5,7 +5,7 @@ __all__ = ["__title__", "__description__", "__version__", "__author__", "__uri__
5
5
  __title__ = "capytaine"
6
6
  __description__ = """Python BEM solver for linear potential flow, based on Nemoh"""
7
7
 
8
- __version__ = "2.2"
8
+ __version__ = "2.2.1"
9
9
 
10
10
  __author__ = "Matthieu Ancellin"
11
11
  __uri__ = "https://github.com/capytaine/capytaine"
capytaine/__init__.py CHANGED
@@ -3,15 +3,14 @@
3
3
 
4
4
 
5
5
  # start delvewheel patch
6
- def _delvewheel_patch_1_7_1():
6
+ def _delvewheel_patch_1_9_0():
7
7
  import os
8
- libs_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'capytaine.libs'))
9
- if os.path.isdir(libs_dir):
8
+ if os.path.isdir(libs_dir := os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'capytaine.libs'))):
10
9
  os.add_dll_directory(libs_dir)
11
10
 
12
11
 
13
- _delvewheel_patch_1_7_1()
14
- del _delvewheel_patch_1_7_1
12
+ _delvewheel_patch_1_9_0()
13
+ del _delvewheel_patch_1_9_0
15
14
  # end delvewheel patch
16
15
 
17
16
  from .__about__ import (
capytaine/bem/engines.py CHANGED
@@ -123,10 +123,10 @@ class BasicMatrixEngine(MatrixEngine):
123
123
 
124
124
  S_a, V_a = self.build_matrices(
125
125
  mesh1[0], mesh2[0], free_surface, water_depth, wavenumber,
126
- green_function)
126
+ green_function, adjoint_double_layer=adjoint_double_layer)
127
127
  S_b, V_b = self.build_matrices(
128
128
  mesh1[0], mesh2[1], free_surface, water_depth, wavenumber,
129
- green_function)
129
+ green_function, adjoint_double_layer=adjoint_double_layer)
130
130
 
131
131
  return BlockSymmetricToeplitzMatrix([[S_a, S_b]]), BlockSymmetricToeplitzMatrix([[V_a, V_b]])
132
132
 
@@ -529,7 +529,10 @@ class RadiationResult(LinearPotentialFlowResult):
529
529
 
530
530
  @property
531
531
  def radiation_damping(self):
532
- return {dof: float(np.imag(force)/self.encounter_omega) for (dof, force) in self.forces.items()}
532
+ if float(self.encounter_omega) in {0.0, np.inf} and self.forward_speed == 0.0:
533
+ return {dof: 0.0 for dof in self.forces.keys()}
534
+ else:
535
+ return {dof: float(np.imag(force)/self.encounter_omega) for (dof, force) in self.forces.items()}
533
536
 
534
537
  # Aliases for backward compatibility
535
538
  added_masses = added_mass
capytaine/bem/solver.py CHANGED
@@ -83,8 +83,9 @@ class BEMSolver:
83
83
  keep_details: bool, optional
84
84
  if True, store the sources and the potential on the floating body in the output object
85
85
  (default: True)
86
- _check_wavelength: bool, optional
87
- if True, check the mesh resolution with respect to the wavelength
86
+ _check_wavelength: bool, optional (default: True)
87
+ If True, the frequencies are compared to the mesh resolution and
88
+ the estimated first irregular frequency to warn the user.
88
89
 
89
90
  Returns
90
91
  -------
@@ -159,6 +160,9 @@ class BEMSolver:
159
160
  By defaults: do not use joblib and solve sequentially.
160
161
  progress_bar: bool, optional (default: True)
161
162
  Display a progress bar while solving
163
+ _check_wavelength: bool, optional (default: True)
164
+ If True, the frequencies are compared to the mesh resolution and
165
+ the estimated first irregular frequency to warn the user.
162
166
 
163
167
  Returns
164
168
  -------
@@ -192,6 +196,7 @@ class BEMSolver:
192
196
  def _check_wavelength_and_mesh_resolution(problems):
193
197
  """Display a warning if some of the problems have a mesh resolution
194
198
  that might not be sufficient for the given wavelength."""
199
+ LOG.debug("Check wavelength with mesh resolution.")
195
200
  risky_problems = [pb for pb in problems
196
201
  if 0.0 < pb.wavelength < pb.body.minimal_computable_wavelength]
197
202
  nb_risky_problems = len(risky_problems)
@@ -218,8 +223,9 @@ class BEMSolver:
218
223
  @staticmethod
219
224
  def _check_wavelength_and_irregular_frequencies(problems):
220
225
  """Display a warning if some of the problems might encounter irregular frequencies."""
226
+ LOG.debug("Check wavelength with estimated irregular frequency.")
221
227
  risky_problems = [pb for pb in problems
222
- if pb.body.first_irregular_frequency_estimate() < pb.omega < np.inf]
228
+ if pb.body.first_irregular_frequency_estimate(g=pb.g) < pb.omega < np.inf]
223
229
  nb_risky_problems = len(risky_problems)
224
230
  if nb_risky_problems >= 1:
225
231
  if any(pb.body.lid_mesh is None for pb in problems):
@@ -244,7 +250,7 @@ class BEMSolver:
244
250
  + recommendation
245
251
  )
246
252
 
247
- def fill_dataset(self, dataset, bodies, *, method='indirect', n_jobs=1, **kwargs):
253
+ def fill_dataset(self, dataset, bodies, *, method='indirect', n_jobs=1, _check_wavelength=True, **kwargs):
248
254
  """Solve a set of problems defined by the coordinates of an xarray dataset.
249
255
 
250
256
  Parameters
@@ -261,6 +267,9 @@ class BEMSolver:
261
267
  By defaults: do not use joblib and solve sequentially.
262
268
  progress_bar: bool, optional (default: True)
263
269
  Display a progress bar while solving
270
+ _check_wavelength: bool, optional (default: True)
271
+ If True, the frequencies are compared to the mesh resolution and
272
+ the estimated first irregular frequency to warn the user.
264
273
 
265
274
  Returns
266
275
  -------
@@ -270,12 +279,12 @@ class BEMSolver:
270
279
  **self.exportable_settings}
271
280
  problems = problems_from_dataset(dataset, bodies)
272
281
  if 'theta' in dataset.coords:
273
- results = self.solve_all(problems, keep_details=True, method=method, n_jobs=n_jobs)
282
+ results = self.solve_all(problems, keep_details=True, method=method, n_jobs=n_jobs, _check_wavelength=_check_wavelength)
274
283
  kochin = kochin_data_array(results, dataset.coords['theta'])
275
284
  dataset = assemble_dataset(results, attrs=attrs, **kwargs)
276
285
  dataset.update(kochin)
277
286
  else:
278
- results = self.solve_all(problems, keep_details=False, method=method, n_jobs=n_jobs)
287
+ results = self.solve_all(problems, keep_details=False, method=method, n_jobs=n_jobs, _check_wavelength=_check_wavelength)
279
288
  dataset = assemble_dataset(results, attrs=attrs, **kwargs)
280
289
  return dataset
281
290
 
@@ -5,7 +5,7 @@
5
5
  import logging
6
6
  import copy
7
7
  from itertools import chain, accumulate, zip_longest
8
- from functools import cached_property
8
+ from functools import cached_property, lru_cache
9
9
 
10
10
  import numpy as np
11
11
  import xarray as xr
@@ -76,7 +76,11 @@ class FloatingBody(ClippableMixin, Abstract3DObject):
76
76
  raise TypeError("Unrecognized `mesh` object passed to the FloatingBody constructor.")
77
77
 
78
78
  if lid_mesh is not None:
79
- self.lid_mesh = lid_mesh.with_normal_vector_going_down(inplace=False)
79
+ if lid_mesh.nb_faces == 0:
80
+ LOG.warning("Lid mesh %s provided for body initialization is empty. The lid mesh is ignored.", lid_mesh)
81
+ self.lid_mesh = None
82
+ else:
83
+ self.lid_mesh = lid_mesh.with_normal_vector_going_down(inplace=False)
80
84
  else:
81
85
  self.lid_mesh = None
82
86
 
@@ -996,6 +1000,9 @@ respective inertia coefficients are assigned as NaN.")
996
1000
  self.mesh.clip(plane)
997
1001
  if self.lid_mesh is not None:
998
1002
  self.lid_mesh.clip(plane)
1003
+ if self.lid_mesh.nb_faces == 0:
1004
+ LOG.warning("Lid mesh %s is empty after clipping. The lid mesh is removed.", self.lid_mesh)
1005
+ self.lid_mesh = None
999
1006
 
1000
1007
  # Clip dofs
1001
1008
  ids = self.mesh._clipping_data['faces_ids']
@@ -1100,6 +1107,7 @@ respective inertia coefficients are assigned as NaN.")
1100
1107
  else:
1101
1108
  return 8*self.mesh.faces_radiuses.max()
1102
1109
 
1110
+ @lru_cache
1103
1111
  def first_irregular_frequency_estimate(self, *, g=9.81):
1104
1112
  r"""Estimates the angular frequency of the lowest irregular
1105
1113
  frequency.
@@ -721,6 +721,7 @@ def load_GDF(filename, name=None):
721
721
  npan = int(gdf_file.readline().split()[0])
722
722
  faces_vertices = np.genfromtxt(gdf_file)
723
723
 
724
+ faces_vertices = faces_vertices.reshape(-1, 3)
724
725
  vertices, indices = np.unique(faces_vertices, axis=0, return_inverse=True)
725
726
  faces = indices.reshape(-1, 4)
726
727
 
capytaine/io/xarray.py CHANGED
@@ -112,11 +112,19 @@ def problems_from_dataset(dataset: xr.Dataset,
112
112
  if wave_direction_range is not None:
113
113
  for freq, wave_direction, water_depth, body_name, forward_speed, rho, g \
114
114
  in product(freq_range, wave_direction_range, water_depth_range, body_range, forward_speed_range, rho_range, g_range):
115
- problems.append(
116
- DiffractionProblem(body=body_range[body_name], **{freq_type: freq},
117
- wave_direction=wave_direction, water_depth=water_depth,
118
- forward_speed=forward_speed, rho=rho, g=g)
119
- )
115
+ if freq not in {0.0, np.inf}:
116
+ problems.append(
117
+ DiffractionProblem(body=body_range[body_name], **{freq_type: freq},
118
+ wave_direction=wave_direction, water_depth=water_depth,
119
+ forward_speed=forward_speed, rho=rho, g=g)
120
+ )
121
+ elif freq in {0.0, np.inf} and radiating_dofs is not None:
122
+ # Diffraction problems are not defined for 0 and infinite frequency.
123
+ # But we don't want the whole batch to fail, as these frequencies are there for the radiation problems.
124
+ # The excitation force will be NaN for these frequencies in the resulting dataset.
125
+ pass
126
+ else:
127
+ raise ValueError("Zero and infinite frequencies are not defined when solving only diffraction problems.")
120
128
 
121
129
  if radiating_dofs is not None:
122
130
  for freq, radiating_dof, water_depth, body_name, forward_speed, rho, g \
@@ -185,7 +185,9 @@ class BlockMatrix:
185
185
  self._put_in_full_matrix(full_matrix)
186
186
  return full_matrix
187
187
 
188
- def __array__(self, dtype=None):
188
+ def __array__(self, dtype=None, copy=True):
189
+ if not copy:
190
+ raise ValueError("Making an ndarray out of a BlockMatrix requires copy")
189
191
  return self.full_matrix(dtype=dtype)
190
192
 
191
193
  def no_toeplitz(self):
@@ -587,4 +589,4 @@ class BlockMatrix:
587
589
  this_block = self
588
590
  for index in path:
589
591
  this_block = this_block.all_blocks[index, index]
590
- return this_block
592
+ return this_block
@@ -318,7 +318,9 @@ class LowRankMatrix:
318
318
  else:
319
319
  return self.left_matrix @ self.right_matrix
320
320
 
321
- def __array__(self, dtype=None):
321
+ def __array__(self, dtype=None, copy=True):
322
+ if not copy:
323
+ raise ValueError("Making an ndarray out of a BlockMatrix requires copy")
322
324
  return self.full_matrix(dtype=dtype)
323
325
 
324
326
  @property
@@ -30,14 +30,14 @@ def clip(source_mesh: Mesh, plane: Plane, vicinity_tol=1e-12, name=None):
30
30
  """
31
31
  vertices_data = _vertices_positions_wrt_plane(source_mesh, plane, vicinity_tol)
32
32
 
33
- nb_vertices_above_or_on_plane = np.count_nonzero(
34
- vertices_data['vertices_above_mask'] | vertices_data['vertices_on_mask']
33
+ nb_vertices_strictly_above_plane = np.count_nonzero(
34
+ vertices_data['vertices_above_mask']
35
35
  )
36
36
  nb_vertices_below_or_on_plane = np.count_nonzero(
37
37
  vertices_data['vertices_below_mask'] | vertices_data['vertices_on_mask']
38
38
  )
39
39
 
40
- if nb_vertices_above_or_on_plane == source_mesh.nb_vertices:
40
+ if nb_vertices_strictly_above_plane == source_mesh.nb_vertices:
41
41
  LOG.warning(f"Clipping {source_mesh.name} by {plane}: all vertices are removed.")
42
42
  clipped_mesh = Mesh(None, None)
43
43
  clipped_mesh._clipping_data = dict(faces_ids=[])
@@ -470,8 +470,9 @@ class Mesh(ClippableMixin, SurfaceIntegralsMixin, Abstract3DObject):
470
470
  Other parameters are passed to Poly3DCollection.
471
471
  """
472
472
  matplotlib = import_optional_dependency("matplotlib")
473
- plt = matplotlib.pyplot
474
- cm = matplotlib.cm
473
+ import importlib
474
+ plt = importlib.import_module("matplotlib.pyplot")
475
+ cm = importlib.import_module("matplotlib.cm")
475
476
 
476
477
  mpl_toolkits = import_optional_dependency("mpl_toolkits", package_name="matplotlib")
477
478
  Poly3DCollection = mpl_toolkits.mplot3d.art3d.Poly3DCollection
@@ -808,7 +809,8 @@ class Mesh(ClippableMixin, SurfaceIntegralsMixin, Abstract3DObject):
808
809
  # edges_of_hull_faces.shape = (nb_full_faces, 4, 2)
809
810
  lid_points_in_local_coords = candidate_lid_points[:, np.newaxis, np.newaxis, :] - hull_faces[:, :, :]
810
811
  # lid_points_in_local_coords.shape = (nb_candidate_lid_points, nb_full_faces, 4, 2)
811
- side_of_hull_edges = np.cross(lid_points_in_local_coords, edges_of_hull_faces)
812
+ side_of_hull_edges = (lid_points_in_local_coords[..., 0] * edges_of_hull_faces[..., 1]
813
+ - lid_points_in_local_coords[..., 1] * edges_of_hull_faces[..., 0])
812
814
  # side_of_hull_edges.shape = (nb_candidate_lid_points, nb_full_faces, 4)
813
815
  point_is_above_panel = np.all(side_of_hull_edges <= 0, axis=-1) | np.all(side_of_hull_edges >= 0, axis=-1)
814
816
  # point_is_above_panel.shape = (nb_candidate_lid_points, nb_full_faces)
@@ -4,7 +4,11 @@ Based on meshmagick <https://github.com/LHEEA/meshmagick> by François Rongère.
4
4
  # Copyright (C) 2017-2019 Matthieu Ancellin, based on the work of François Rongère
5
5
  # See LICENSE file at <https://github.com/mancellin/capytaine>
6
6
 
7
+ from functools import reduce
8
+ from itertools import chain
7
9
  import numpy as np
10
+ from typing import List
11
+ from numpy.typing import NDArray
8
12
 
9
13
 
10
14
  def compute_faces_properties(mesh):
@@ -198,37 +202,67 @@ def compute_connectivity(mesh):
198
202
  'f_f': f_f,
199
203
  'boundaries': boundaries}
200
204
 
205
+ def faces_in_group(faces: NDArray[np.integer], group: NDArray[np.integer]) -> NDArray[np.bool_]:
206
+ """Identification of faces with vertices within group.
201
207
 
202
- def connected_components(mesh):
203
- """Returns a list of meshes that each corresponds to the a connected component in the original mesh.
204
- Assumes the mesh is mostly conformal without duplicate vertices.
208
+ Parameters
209
+ ----------
210
+ faces : NDArray[np.integer]
211
+ Mesh faces. Expecting a numpy array of shape N_faces x N_vertices_per_face.
212
+ group : NDArray[np.integer]
213
+ Group of connected vertices
214
+
215
+ Returns
216
+ -------
217
+ NDArray[np.bool]
218
+ Mask of faces containing vertices from the group
205
219
  """
206
- from typing import Set, FrozenSet, List
220
+ return np.any(np.isin(faces, group), axis=1)
207
221
 
208
- vertices_components: Set[FrozenSet[int]] = set()
209
- for set_of_v_in_face in map(frozenset, mesh.faces):
210
- intersecting_components = [c for c in vertices_components if len(c.intersection(set_of_v_in_face)) > 0]
211
- if len(intersecting_components) == 0:
212
- vertices_components.add(set_of_v_in_face)
222
+ def clustering(faces: NDArray[np.integer]) -> List[NDArray[np.integer]]:
223
+ """Clustering of vertices per connected faces.
224
+
225
+ Parameters
226
+ ----------
227
+ faces : NDArray[np.integer]
228
+ Mesh faces. Expecting a numpy array of shape N_faces x N_vertices_per_face.
229
+
230
+ Returns
231
+ -------
232
+ list[NDArray[np.integer]]
233
+ Groups of connected vertices.
234
+ """
235
+ vert_groups: list[NDArray[np.integer]] = []
236
+ mask = np.ones(faces.shape[0], dtype=bool)
237
+ while np.any(mask):
238
+ # Consider faces whose vertices are not already identified in a group.
239
+ # Start new group by considering first face
240
+ remaining_faces = faces[mask]
241
+ group = remaining_faces[0]
242
+ rem_mask = np.ones(remaining_faces.shape[0], dtype=bool)
243
+ # Iterative update of vertices group. Output final result to frozenset
244
+ while not np.allclose(new:=faces_in_group(remaining_faces, group), rem_mask):
245
+ group = np.unique(remaining_faces[new])
246
+ rem_mask = new
213
247
  else:
214
- for c in intersecting_components:
215
- vertices_components.remove(c)
216
- vertices_components.add(frozenset.union(set_of_v_in_face, *intersecting_components))
248
+ group = np.unique(remaining_faces[new])
249
+ vert_groups.append(group)
250
+ # Identify faces that have no vertices in current groups
251
+ mask = ~reduce(np.logical_or, [faces_in_group(faces, group) for group in vert_groups])
252
+ return vert_groups
217
253
 
218
- # Verification
219
- for component in vertices_components:
220
- assert all(len(component.intersection(c)) == 0 for c in vertices_components if c != component)
221
254
 
255
+ def connected_components(mesh):
256
+ """Returns a list of meshes that each corresponds to the a connected component in the original mesh.
257
+ Assumes the mesh is mostly conformal without duplicate vertices.
258
+ """
259
+ # Get connected vertices
260
+ vertices_components = clustering(mesh.faces)
261
+ # Verification
262
+ if sum(len(group) for group in vertices_components) != len(set(chain.from_iterable(vertices_components))):
263
+ raise ValueError("Error in connected components clustering. Some elements are duplicated")
222
264
  # The components are found. The rest is just about retrieving the faces in each components.
223
- vertices_components: List[FrozenSet[int]] = list(vertices_components)
224
- faces_components: List[List[int]] = [[] for _ in vertices_components]
225
- for i_face, v_in_face in enumerate(mesh.faces):
226
- for i_component, v_c in enumerate(vertices_components):
227
- if any(v in v_c for v in v_in_face):
228
- assert all(v in v_c for v in v_in_face)
229
- faces_components[i_component].append(i_face)
230
- break
231
-
265
+ faces_components = [np.argwhere(faces_in_group(mesh.faces, group)) for group in vertices_components]
232
266
  components = [mesh.extract_faces(f) for f in faces_components]
233
267
  return components
234
268
 
@@ -1,3 +1,13 @@
1
+ """This module is used for the handling of zero and infinite frequencies.
2
+ In this cases, the magnitudes that the solver has to manipulate are in the form of ω times a non-zero term.
3
+ Instead of evaluating this multiplication as zero of infinity, we keep it symbolic using the class defined here.
4
+
5
+ The frequency can be provided to the solver as something like
6
+ `SymbolicMultiplication("0", 1.0)` (that is zero) and the solver will return an
7
+ output of the form `SymbolicMultiplication("0", np.array(...))`
8
+ (that is also actually zero, except we may be intested in the non-zero array).
9
+ """
10
+
1
11
  import numpy as np
2
12
  from functools import wraps, total_ordering
3
13
 
@@ -87,9 +97,9 @@ class SymbolicMultiplication:
87
97
 
88
98
  def __float__(self):
89
99
  if self.symbol == "0":
90
- return 0.0
100
+ return 0.0 * float(self.value)
91
101
  elif self.symbol == "∞":
92
- return np.inf
102
+ return np.inf * float(self.value)
93
103
  else:
94
104
  raise NotImplementedError
95
105
 
@@ -98,6 +108,12 @@ class SymbolicMultiplication:
98
108
 
99
109
 
100
110
  def supporting_symbolic_multiplication(f):
111
+ """
112
+ When this decorator is applied to a function, this function can now take
113
+ as input a `SymbolicMultiplication` object. The function is applied on the
114
+ `value` part of the `SymbolicMultiplication` without modifying the
115
+ `symbol`.
116
+ """
101
117
  @wraps(f)
102
118
  def wrapped_f(a, x):
103
119
  if hasattr(x, 'symbol'):
@@ -0,0 +1,2 @@
1
+ Version: 1.9.0
2
+ Arguments: ['C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-qolinjw3\\cp311-win_amd64\\build\\venv\\Scripts\\delvewheel', 'repair', '-w', 'C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-qolinjw3\\cp311-win_amd64\\repaired_wheel', 'C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-qolinjw3\\cp311-win_amd64\\built_wheel\\capytaine-2.2.1-cp311-cp311-win_amd64.whl', '--no-mangle-all']