capytaine 3.0.0a1__cp312-cp312-macosx_15_0_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. capytaine/.dylibs/libgcc_s.1.1.dylib +0 -0
  2. capytaine/.dylibs/libgfortran.5.dylib +0 -0
  3. capytaine/.dylibs/libquadmath.0.dylib +0 -0
  4. capytaine/__about__.py +21 -0
  5. capytaine/__init__.py +32 -0
  6. capytaine/bem/__init__.py +0 -0
  7. capytaine/bem/airy_waves.py +111 -0
  8. capytaine/bem/engines.py +321 -0
  9. capytaine/bem/problems_and_results.py +601 -0
  10. capytaine/bem/solver.py +718 -0
  11. capytaine/bodies/__init__.py +4 -0
  12. capytaine/bodies/bodies.py +630 -0
  13. capytaine/bodies/dofs.py +146 -0
  14. capytaine/bodies/hydrostatics.py +540 -0
  15. capytaine/bodies/multibodies.py +216 -0
  16. capytaine/green_functions/Delhommeau_float32.cpython-312-darwin.so +0 -0
  17. capytaine/green_functions/Delhommeau_float64.cpython-312-darwin.so +0 -0
  18. capytaine/green_functions/__init__.py +2 -0
  19. capytaine/green_functions/abstract_green_function.py +64 -0
  20. capytaine/green_functions/delhommeau.py +522 -0
  21. capytaine/green_functions/hams.py +210 -0
  22. capytaine/io/__init__.py +0 -0
  23. capytaine/io/bemio.py +153 -0
  24. capytaine/io/legacy.py +228 -0
  25. capytaine/io/wamit.py +479 -0
  26. capytaine/io/xarray.py +673 -0
  27. capytaine/meshes/__init__.py +2 -0
  28. capytaine/meshes/abstract_meshes.py +375 -0
  29. capytaine/meshes/clean.py +302 -0
  30. capytaine/meshes/clip.py +347 -0
  31. capytaine/meshes/export.py +89 -0
  32. capytaine/meshes/geometry.py +259 -0
  33. capytaine/meshes/io.py +433 -0
  34. capytaine/meshes/meshes.py +826 -0
  35. capytaine/meshes/predefined/__init__.py +6 -0
  36. capytaine/meshes/predefined/cylinders.py +280 -0
  37. capytaine/meshes/predefined/rectangles.py +202 -0
  38. capytaine/meshes/predefined/spheres.py +55 -0
  39. capytaine/meshes/quality.py +159 -0
  40. capytaine/meshes/surface_integrals.py +82 -0
  41. capytaine/meshes/symmetric_meshes.py +641 -0
  42. capytaine/meshes/visualization.py +353 -0
  43. capytaine/post_pro/__init__.py +6 -0
  44. capytaine/post_pro/free_surfaces.py +85 -0
  45. capytaine/post_pro/impedance.py +92 -0
  46. capytaine/post_pro/kochin.py +54 -0
  47. capytaine/post_pro/rao.py +60 -0
  48. capytaine/tools/__init__.py +0 -0
  49. capytaine/tools/block_circulant_matrices.py +275 -0
  50. capytaine/tools/cache_on_disk.py +26 -0
  51. capytaine/tools/deprecation_handling.py +18 -0
  52. capytaine/tools/lists_of_points.py +52 -0
  53. capytaine/tools/memory_monitor.py +45 -0
  54. capytaine/tools/optional_imports.py +27 -0
  55. capytaine/tools/prony_decomposition.py +150 -0
  56. capytaine/tools/symbolic_multiplication.py +161 -0
  57. capytaine/tools/timer.py +90 -0
  58. capytaine/ui/__init__.py +0 -0
  59. capytaine/ui/cli.py +28 -0
  60. capytaine/ui/rich.py +5 -0
  61. capytaine-3.0.0a1.dist-info/LICENSE +674 -0
  62. capytaine-3.0.0a1.dist-info/METADATA +755 -0
  63. capytaine-3.0.0a1.dist-info/RECORD +65 -0
  64. capytaine-3.0.0a1.dist-info/WHEEL +6 -0
  65. capytaine-3.0.0a1.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,375 @@
1
+ # Copyright 2025 Mews Labs
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from __future__ import annotations
16
+
17
+ import logging
18
+ from abc import ABC, abstractmethod
19
+ from functools import cached_property, lru_cache
20
+ from typing import Literal, Tuple
21
+
22
+ import numpy as np
23
+
24
+ from capytaine.meshes.surface_integrals import SurfaceIntegralsMixin
25
+ from capytaine.tools.deprecation_handling import _get_water_depth
26
+ from capytaine.meshes.geometry import connected_components, connected_components_of_waterline
27
+
28
+ LOG = logging.getLogger(__name__)
29
+
30
+ class AbstractMesh(SurfaceIntegralsMixin, ABC):
31
+ @property
32
+ @abstractmethod
33
+ def nb_vertices(self) -> int:
34
+ ...
35
+
36
+ @property
37
+ @abstractmethod
38
+ def nb_faces(self) -> int:
39
+ ...
40
+
41
+ @property
42
+ @abstractmethod
43
+ def faces_normals(self) -> np.ndarray:
44
+ ...
45
+
46
+ @property
47
+ @abstractmethod
48
+ def faces_areas(self) -> np.ndarray:
49
+ ...
50
+
51
+ @property
52
+ @abstractmethod
53
+ def faces_centers(self) -> np.ndarray:
54
+ ...
55
+
56
+ @property
57
+ @abstractmethod
58
+ def faces_radiuses(self) -> np.ndarray:
59
+ ...
60
+
61
+ @property
62
+ @abstractmethod
63
+ def faces(self) -> np.ndarray:
64
+ ...
65
+
66
+ @property
67
+ @abstractmethod
68
+ def quadrature_points(self) -> np.ndarray:
69
+ ...
70
+
71
+ @cached_property
72
+ def z_span(self) -> Tuple[float, float]:
73
+ return (self.vertices[:, 2].min(), self.vertices[:, 2].max())
74
+
75
+ @abstractmethod
76
+ def __str__(self) -> str:
77
+ ...
78
+
79
+ @abstractmethod
80
+ def __short_str__(self) -> str:
81
+ ...
82
+
83
+ @abstractmethod
84
+ def with_quadrature(self, quadrature_method):
85
+ ...
86
+
87
+ @abstractmethod
88
+ def extract_faces(self, faces_id, *, name=None) -> AbstractMesh:
89
+ ...
90
+
91
+ @abstractmethod
92
+ def translated(self, shift, *, name=None) -> AbstractMesh:
93
+ ...
94
+
95
+ def translated_x(self, dx: float, *, name=None) -> AbstractMesh:
96
+ """Return a new Mesh translated in the x-direction along `dx`."""
97
+ return self.translated([dx, 0.0, 0.0], name=name)
98
+
99
+ def translated_y(self, dy: float, *, name=None) -> AbstractMesh:
100
+ """Return a new Mesh translated in the y-direction along `dy`."""
101
+ return self.translated([0.0, dy, 0.0], name=name)
102
+
103
+ def translated_z(self, dz: float, *, name=None) -> AbstractMesh:
104
+ """Return a new Mesh translated in the z-direction along `dz`."""
105
+ return self.translated([0.0, 0.0, dz], name=name)
106
+
107
+ @abstractmethod
108
+ def rotated_with_matrix(self, R, *, name=None) -> AbstractMesh:
109
+ ...
110
+
111
+ def rotated_x(self, angle: float, *, name=None) -> AbstractMesh:
112
+ """Return a new Mesh rotated around the x-axis using the provided rotation angle in radians"""
113
+ c, s = np.cos(angle), np.sin(angle)
114
+ R = np.array([[1, 0, 0], [0, c, -s], [0, s, c]])
115
+ return self.rotated_with_matrix(R, name=name)
116
+
117
+ def rotated_y(self, angle: float, *, name=None) -> AbstractMesh:
118
+ """Return a new Mesh rotated around the y-axis using the provided rotation angle in radians"""
119
+ c, s = np.cos(angle), np.sin(angle)
120
+ R = np.array([[c, 0, s], [0, 1, 0], [-s, 0, c]])
121
+ return self.rotated_with_matrix(R, name=name)
122
+
123
+ def rotated_z(self, angle: float, *, name=None) -> AbstractMesh:
124
+ """Return a new Mesh rotated around the z-axis using the provided rotation angle in radians"""
125
+ c, s = np.cos(angle), np.sin(angle)
126
+ R = np.array([[c, -s, 0], [s, c, 0], [0, 0, 1]])
127
+ return self.rotated_with_matrix(R, name=name)
128
+
129
+ def rotated_such_that_vectors_are_aligned(self, a, b, *, eps=1e-8, name=None) -> AbstractMesh:
130
+ a = np.asarray(a, dtype=float)
131
+ b = np.asarray(b, dtype=float)
132
+
133
+ # Normalize input vectors
134
+ a_norm = np.linalg.norm(a)
135
+ b_norm = np.linalg.norm(b)
136
+ if a_norm < eps or b_norm < eps:
137
+ raise ValueError("Input vectors must be non-zero")
138
+
139
+ a_hat = a / a_norm
140
+ b_hat = b / b_norm
141
+
142
+ # Cross and dot products
143
+ v = np.cross(a_hat, b_hat)
144
+ c = np.dot(a_hat, b_hat)
145
+ s = np.linalg.norm(v)
146
+
147
+ # Case 1: vectors are already aligned
148
+ if s < eps and c > 0:
149
+ return self.copy(name=name)
150
+
151
+ # Case 2: vectors are opposite
152
+ if s < eps and c < 0:
153
+ # Find an arbitrary orthogonal vector
154
+ # Prefer axis least aligned with a_hat
155
+ axis = np.array([1.0, 0.0, 0.0])
156
+ if abs(a_hat[0]) > abs(a_hat[1]):
157
+ axis = np.array([0.0, 1.0, 0.0])
158
+ axis = axis - a_hat * np.dot(a_hat, axis)
159
+ axis /= np.linalg.norm(axis)
160
+
161
+ # Rotation by pi around axis
162
+ K = np.array([[0, -axis[2], axis[1]],
163
+ [axis[2], 0, -axis[0]],
164
+ [-axis[1], axis[0], 0]])
165
+ return self.rotated_with_matrix(np.eye(3) + 2 * K @ K, name=name)
166
+
167
+ # General case: Rodrigues' rotation formula
168
+ K = np.array([[0, -v[2], v[1]],
169
+ [v[2], 0, -v[0]],
170
+ [-v[1], v[0], 0]])
171
+
172
+ R = np.eye(3) + K + K @ K * ((1 - c) / (s ** 2))
173
+ return self.rotated_with_matrix(R, name=name)
174
+
175
+ def mirrored(self, plane: Literal['xOz', 'yOz'], *, name=None) -> AbstractMesh:
176
+ ...
177
+
178
+ @abstractmethod
179
+ def join_meshes(*meshes, return_masks=False, name=None) -> AbstractMesh:
180
+ ...
181
+
182
+ def _common_metadata_keys(*meshes):
183
+ metadata_keys = [set(m.faces_metadata.keys()) for m in meshes]
184
+ common_metadata_keys = set.intersection(*metadata_keys)
185
+ lost_metadata_keys = set.union(*metadata_keys) - common_metadata_keys
186
+ if len(lost_metadata_keys) > 0:
187
+ LOG.warning(f'The following metadata have been dropped when joining meshes: {lost_metadata_keys}')
188
+ return common_metadata_keys
189
+
190
+ def __add__(self, other: AbstractMesh) -> AbstractMesh:
191
+ """Combine two meshes using the + operator.
192
+
193
+ Parameters
194
+ ----------
195
+ other : Mesh
196
+ Another mesh to combine with this one.
197
+
198
+ Returns
199
+ -------
200
+ Mesh
201
+ New mesh containing vertices and faces from both meshes.
202
+ """
203
+ if self.name is not None or other.name is not None:
204
+ name = f"{self.name}+{other.name}"
205
+ else:
206
+ name = None
207
+ return self.join_meshes(other, name=name)
208
+
209
+ def lowest_lid_position(self, omega_max, *, g=9.81):
210
+ z_lid = 0.0
211
+ for comp in connected_components(self):
212
+ for ccomp in connected_components_of_waterline(comp):
213
+ x_span = ccomp.vertices[:, 0].max() - ccomp.vertices[:, 0].min()
214
+ y_span = ccomp.vertices[:, 1].max() - ccomp.vertices[:, 1].min()
215
+ p = np.hypot(1/x_span, 1/y_span)
216
+ z_lid_comp = -np.arctanh(np.pi*g*p/omega_max**2) / (np.pi * p)
217
+ z_lid = min(z_lid, z_lid_comp)
218
+ return 0.9*z_lid # Add a small safety margin
219
+
220
+ @abstractmethod
221
+ def generate_lid(self, z=0.0, faces_max_radius=None, name=None):
222
+ ...
223
+
224
+ @abstractmethod
225
+ def extract_lid(self, z=0.0):
226
+ ...
227
+
228
+ @abstractmethod
229
+ def with_normal_vector_going_down(self, **kwargs) -> AbstractMesh:
230
+ ...
231
+
232
+ @abstractmethod
233
+ def copy(self) -> AbstractMesh:
234
+ ...
235
+
236
+ def with_metadata(self, **new_metadata) -> AbstractMesh:
237
+ faces_metadata = self.faces_metadata.copy()
238
+ for k, v in new_metadata.items():
239
+ faces_metadata[k] = v
240
+ return self.copy(faces_metadata=faces_metadata)
241
+
242
+ def pop_metadata(self, metadata_name) -> Tuple[AbstractMesh, np.ndarray]:
243
+ faces_metadata = self.faces_metadata.copy()
244
+ data = faces_metadata.pop(metadata_name)
245
+ return self.copy(faces_metadata=faces_metadata), data
246
+
247
+ def without_metadata(self, *metadata_names) -> AbstractMesh:
248
+ faces_metadata = self.faces_metadata.copy()
249
+ for k in metadata_names:
250
+ del faces_metadata[k]
251
+ return self.copy(faces_metadata=faces_metadata)
252
+
253
+ def without_any_metadata(self) -> AbstractMesh:
254
+ return self.copy(faces_metadata={})
255
+
256
+ @abstractmethod
257
+ def merged(self) -> AbstractMesh:
258
+ ...
259
+
260
+ @abstractmethod
261
+ def clipped(self, *, origin, normal, name=None) -> AbstractMesh:
262
+ ...
263
+
264
+ def extract_wedge(self, n: int, axis: str = "z") -> AbstractMesh:
265
+ """Extract a wedge (angular sector) from the mesh for rotational symmetry.
266
+
267
+ Extracts a 1/n sector of the mesh by clipping at angular boundaries.
268
+ This creates proper faces at the wedge boundaries for clean reconstruction.
269
+
270
+ Parameters
271
+ ----------
272
+ n : int
273
+ The rotation order. The wedge will span 360/n degrees.
274
+ axis : str, optional
275
+ Only "z" is currently supported.
276
+
277
+ Returns
278
+ -------
279
+ Mesh
280
+ A new mesh containing the wedge sector with proper boundary faces.
281
+
282
+ Examples
283
+ --------
284
+ Extract 1/3 of a sphere (120-degree wedge):
285
+
286
+ >>> sphere = mesh_sphere(radius=1.0, resolution=(12, 12))
287
+ >>> wedge = sphere.extract_wedge(n=3)
288
+ >>> wedge.nb_faces # Approximately 1/3 of sphere.nb_faces
289
+ """
290
+ if axis != "z":
291
+ raise NotImplementedError(
292
+ f"Only 'z' axis is currently supported, got '{axis}'"
293
+ )
294
+ if n < 2:
295
+ raise ValueError(f"Rotation order must be >= 2, got {n}")
296
+
297
+ # Wedge angle in radians
298
+ wedge_angle = 2 * np.pi / n
299
+
300
+ # First clip: keep the half with y >= 0 (theta in [0, pi])
301
+ # This corresponds to the plane y=0, keeping positive y side
302
+ origin = np.array([0.0, 0.0, 0.0])
303
+ normal_1 = np.array([0.0, -1.0, 0.0]) # Keep y >= 0
304
+ wedge = self.clipped(origin=origin, normal=normal_1)
305
+
306
+ # Second clip: create the wedge boundary at angle = wedge_angle
307
+ # The plane passes through the z-axis and has a normal perpendicular to the boundary
308
+ # For a wedge from theta=0 to theta=wedge_angle, we need to keep theta <= wedge_angle
309
+ # Normal vector points outward from the wedge (to reject the side we don't want)
310
+ # At angle theta, the outward normal is [-sin(theta), cos(theta), 0]
311
+ normal_2 = np.array([-np.sin(wedge_angle), np.cos(wedge_angle), 0.0])
312
+ wedge = wedge.clipped(origin=origin, normal=normal_2, name=f"{self.name}_wedge_n{n}")
313
+
314
+ return wedge
315
+
316
+ @lru_cache
317
+ def immersed_part(self, free_surface=0.0, *, sea_bottom=None, water_depth=None) -> AbstractMesh:
318
+ """
319
+ Clip the mesh to keep only the part below the free surface.
320
+
321
+ Parameters
322
+ ----------
323
+ free_surface: float
324
+ The :math:`z` coordinate of the free surface (default: 0.0)
325
+ water_depth: Optional[float]
326
+ The water depth, as a positive value (default: infinity)
327
+
328
+ Returns
329
+ -------
330
+ Mesh
331
+ A new Mesh instance that has been clipped.
332
+ """
333
+ water_depth = _get_water_depth(free_surface, water_depth, sea_bottom,
334
+ default_water_depth=np.inf)
335
+ if (free_surface - water_depth <= self.z_span[0]
336
+ and self.z_span[1] <= free_surface): # Already clipped
337
+ return self # Shortcut for performance
338
+ clipped = self.clipped(origin=(0, 0, 0), normal=(0, 0, 1))
339
+ if water_depth < np.inf:
340
+ clipped = clipped.clipped(origin=(0, 0, free_surface-water_depth), normal=(0, 0, -1))
341
+ return clipped
342
+
343
+ @abstractmethod
344
+ def show(self, *, backend=None, **kwargs):
345
+ ...
346
+
347
+ def show_pyvista(self, **kwargs):
348
+ """
349
+ Equivalent to show(backend="pyvista").
350
+ See also :func:`~capytaine.meshes.visualization.show_pyvista`
351
+ """
352
+ return self.show(backend="pyvista", **kwargs)
353
+
354
+ def show_matplotlib(self, **kwargs):
355
+ """
356
+ Equivalent to show(backend="matplotlib").
357
+ See also :func:`~capytaine.meshes.visualization.show_matplotlib`
358
+ """
359
+ return self.show(backend="matplotlib", **kwargs)
360
+
361
+ @abstractmethod
362
+ def export(self, format, **kwargs):
363
+ ...
364
+
365
+ def export_to_pyvista(self, **kwargs):
366
+ return self.export(format="pyvista", **kwargs)
367
+
368
+ def export_to_xarray(self, **kwargs):
369
+ return self.export(format="xarray", **kwargs)
370
+
371
+ def export_to_meshio(self, **kwargs):
372
+ return self.export(format="meshio", **kwargs)
373
+
374
+ def export_to_trimesh(self, **kwargs):
375
+ return self.export(format="trimesh", **kwargs)
@@ -0,0 +1,302 @@
1
+ # Copyright 2025 Mews Labs
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import logging
16
+ from typing import List, Tuple, Dict
17
+
18
+ import numpy as np
19
+ from scipy.spatial import cKDTree
20
+
21
+ LOG = logging.getLogger(__name__)
22
+
23
+
24
+ def clean_mesh(
25
+ vertices: np.ndarray,
26
+ faces: List[List[int]],
27
+ faces_metadata: Dict[str, np.ndarray],
28
+ max_iter: int = 5,
29
+ tol: float = 1e-8
30
+ ) -> Tuple[np.ndarray, List[List[int]]]:
31
+ """Iteratively clean a mesh by applying geometric simplifications.
32
+
33
+ Parameters
34
+ ----------
35
+ vertices : numpy.ndarray
36
+ Vertex coordinates of the input mesh.
37
+ faces : list of list of int
38
+ Face connectivity describing the mesh panels.
39
+ faces_metadata: Dict[str, np.ndarray]
40
+ Some arrays with the same first dimension (should be the number of faces)
41
+ storing some fields defined on all the faces of the mesh.
42
+ max_iter : int, default=5
43
+ Maximum number of cleaning iterations to perform.
44
+ tol : float, default=1e-8
45
+ Tolerance used when merging near-duplicate vertices.
46
+
47
+ Returns
48
+ -------
49
+ tuple[numpy.ndarray, list of list of int]
50
+ The cleaned vertex array and associated face connectivity.
51
+ """
52
+ for _ in range(max_iter):
53
+ nb_vertices_before = len(vertices)
54
+ nb_faces_before = len(faces)
55
+
56
+ vertices, faces, faces_metadata = clean_mesh_once(vertices, faces, faces_metadata, tol=tol)
57
+
58
+ if len(vertices) == nb_vertices_before and len(faces) == nb_faces_before:
59
+ break
60
+
61
+ return vertices, faces, faces_metadata
62
+
63
+
64
+ def clean_mesh_once(
65
+ vertices: np.ndarray,
66
+ faces: List[List[int]],
67
+ faces_metadata: Dict[str, np.ndarray],
68
+ tol: float = 1e-10
69
+ ) -> Tuple[np.ndarray, List[List[int]]]:
70
+ """Run a single cleaning pass on the mesh data.
71
+
72
+ Parameters
73
+ ----------
74
+ vertices : numpy.ndarray
75
+ Vertex coordinates describing the mesh geometry.
76
+ faces : list of list of int
77
+ Face connectivity with indices referencing ``vertices``.
78
+ faces_metadata: Dict[str, np.ndarray]
79
+ Some arrays with the same first dimension (should be the number of faces)
80
+ storing some fields defined on all the faces of the mesh.
81
+ tol : float, default=1e-10
82
+ Tolerance for considering vertices as duplicates.
83
+
84
+ Returns
85
+ -------
86
+ tuple[numpy.ndarray, list of list of int]
87
+ Updated vertices and faces after the cleaning step.
88
+
89
+ Raises
90
+ ------
91
+ ValueError
92
+ If an unsupported face configuration is encountered.
93
+ """
94
+ # 1) merge almost‐duplicate vertices
95
+ vertices, faces = merge_near_duplicate_vertices(vertices, faces, tol=tol)
96
+
97
+ # 2) remove duplicate vertices indices in faces
98
+ # and check that all faces have 3 or 4 unique vertices
99
+ new_faces = []
100
+ degenerate_faces_indices = []
101
+
102
+ for i_face, face in enumerate(faces):
103
+ seen = set()
104
+ uniq = []
105
+ for vi in face:
106
+ if vi not in seen:
107
+ seen.add(vi)
108
+ uniq.append(vi)
109
+
110
+ if len(uniq) in (3, 4):
111
+ new_faces.append(uniq)
112
+ elif len(uniq) < 3:
113
+ degenerate_faces_indices.append(i_face)
114
+ else:
115
+ raise ValueError(
116
+ f"Face with {len(uniq)} unique vertices: only 3 or 4 supported."
117
+ )
118
+
119
+ if len(degenerate_faces_indices) > 0:
120
+ LOG.debug(
121
+ f"Dropping {len(degenerate_faces_indices)} degenerate faces with <3 vertices: "
122
+ f"{[faces[i] for i in degenerate_faces_indices[:5]]}{' ...' if len(degenerate_faces_indices) > 5 else ''}"
123
+ )
124
+ faces_metadata = {k: np.delete(faces_metadata[k], degenerate_faces_indices, axis=0) for k in faces_metadata}
125
+
126
+ # 3) continue cleaning pipeline, all functions must accept List-of-lists too
127
+ vertices, faces = remove_duplicate_vertices(vertices, new_faces)
128
+ faces, faces_metadata = remove_duplicate_faces(faces, faces_metadata)
129
+ vertices, faces = remove_unused_vertices(vertices, faces)
130
+ faces, faces_metadata = remove_small_faces(vertices, faces, faces_metadata, tol=tol)
131
+ vertices, faces = remove_unused_vertices(vertices, faces)
132
+
133
+ return vertices, faces, faces_metadata
134
+
135
+
136
+ def merge_near_duplicate_vertices(
137
+ vertices: np.ndarray, faces: List[List[int]], tol: float = 1e-8
138
+ ) -> Tuple[np.ndarray, List[List[int]]]:
139
+ """Merge vertices that are closer than a tolerance.
140
+
141
+ Parameters
142
+ ----------
143
+ vertices : numpy.ndarray
144
+ Vertex coordinates of shape ``(n, 3)``.
145
+ faces : list of list of int
146
+ Face connectivity referencing the ``vertices`` array.
147
+ tol : float, default=1e-8
148
+ Distance threshold below which vertices are considered duplicates.
149
+
150
+ Returns
151
+ -------
152
+ tuple[numpy.ndarray, list of list of int]
153
+ Deduplicated vertices and remapped faces.
154
+ """
155
+ if len(vertices) == 0:
156
+ return vertices, faces
157
+
158
+ tree = cKDTree(vertices)
159
+ groups = tree.query_ball_tree(tree, r=tol)
160
+
161
+ representative = {}
162
+ new_vertices = []
163
+ for i, group in enumerate(groups):
164
+ rep = min(group)
165
+ if rep not in representative:
166
+ representative[rep] = len(new_vertices)
167
+ new_vertices.append(vertices[rep])
168
+ representative[i] = representative[rep]
169
+
170
+ faces = [[representative[idx] for idx in face] for face in faces]
171
+ new_vertices = np.array(new_vertices)
172
+ return new_vertices, faces
173
+
174
+
175
+ def remove_duplicate_vertices(
176
+ vertices: np.ndarray, faces: List[List[int]]
177
+ ) -> Tuple[np.ndarray, List[List[int]]]:
178
+ """Remove exactly repeated vertices and remap faces accordingly.
179
+
180
+ Parameters
181
+ ----------
182
+ vertices : numpy.ndarray
183
+ Vertex coordinates of shape ``(n, 3)``.
184
+ faces : list of list of int
185
+ Face connectivity using indices into ``vertices``.
186
+
187
+ Returns
188
+ -------
189
+ tuple[numpy.ndarray, list of list of int]
190
+ Unique vertices and faces with updated indices.
191
+ """
192
+ unique_vertices = []
193
+ vertices_map = {}
194
+ for vertex in vertices:
195
+ vertex_tuple = tuple(vertex)
196
+ if vertex_tuple not in vertices_map:
197
+ vertices_map[vertex_tuple] = len(unique_vertices)
198
+ unique_vertices.append(vertex)
199
+ new_faces = [[vertices_map[tuple(vertices[i])] for i in face] for face in faces]
200
+ new_vertices = np.array(unique_vertices)
201
+
202
+ return new_vertices, new_faces
203
+
204
+
205
+ def remove_duplicate_faces(faces, faces_metadata):
206
+ """Eliminate duplicate faces while preserving order.
207
+
208
+ Parameters
209
+ ----------
210
+ faces : list of list of int
211
+ Face connectivity to deduplicate.
212
+ faces_metadata: Dict[str, np.ndarray]
213
+ Fields associated to faces
214
+
215
+ Returns
216
+ -------
217
+ list of list of int
218
+ Face connectivity with duplicates removed.
219
+ Dict[str, np.ndarray]
220
+ Updated metadata
221
+ """
222
+ unique_faces = []
223
+ face_set = set()
224
+ deduplicated_faces_indices = []
225
+ for i_face, face in enumerate(faces):
226
+ face_tuple = tuple(sorted(face))
227
+ if face_tuple not in face_set:
228
+ face_set.add(face_tuple)
229
+ unique_faces.append(face)
230
+ else:
231
+ deduplicated_faces_indices.append(i_face)
232
+
233
+ faces_metadata = {k: np.delete(faces_metadata[k], deduplicated_faces_indices, axis=0) for k in faces_metadata}
234
+
235
+ return unique_faces, faces_metadata
236
+
237
+
238
+ def remove_unused_vertices(
239
+ vertices: np.ndarray, faces: List[List[int]]
240
+ ) -> Tuple[np.ndarray, List[List[int]]]:
241
+ """Remove vertices that are not referenced by any face.
242
+
243
+ Parameters
244
+ ----------
245
+ vertices : numpy.ndarray
246
+ Vertex coordinates of shape ``(n, 3)``.
247
+ faces : list of list of int
248
+ Face connectivity using indices into ``vertices``.
249
+
250
+ Returns
251
+ -------
252
+ tuple[numpy.ndarray, list of list of int]
253
+ Reduced vertex array and corresponding face connectivity.
254
+ """
255
+ used = sorted({i for face in faces for i in face})
256
+ remap = {old: new for new, old in enumerate(used)}
257
+ new_vs = vertices[used]
258
+ new_fs = [[remap[i] for i in face] for face in faces]
259
+ return new_vs, new_fs
260
+
261
+
262
+ def remove_small_faces(
263
+ vertices: np.ndarray,
264
+ faces: List[List[int]],
265
+ faces_metadata: Dict[str, np.ndarray],
266
+ tol: float = 1e-8
267
+ ):
268
+ """Remove faces whose area falls below a tolerance.
269
+
270
+ Parameters
271
+ ----------
272
+ vertices : numpy.ndarray
273
+ Vertex coordinates used to evaluate surface area.
274
+ faces : list of list of int
275
+ Face connectivity referencing ``vertices``.
276
+ faces_metadata: Dict[str, np.ndarray]
277
+ Fields associated to faces
278
+ tol : float, default=1e-8
279
+ Minimum allowable face area.
280
+
281
+ Returns
282
+ -------
283
+ list of list of int
284
+ Faces that exceed the area threshold.
285
+ """
286
+
287
+ def face_area(face):
288
+ v = vertices[face]
289
+ if len(face) == 4:
290
+ a1 = 0.5 * np.linalg.norm(np.cross(v[1] - v[0], v[2] - v[0]))
291
+ a2 = 0.5 * np.linalg.norm(np.cross(v[2] - v[0], v[3] - v[0]))
292
+ return a1 + a2
293
+ elif len(face) == 3:
294
+ return 0.5 * np.linalg.norm(np.cross(v[1] - v[0], v[2] - v[0]))
295
+ return 0.0
296
+
297
+ areas = np.array([face_area(face) for face in faces])
298
+ mask = areas > tol
299
+ faces = [face for face, keep in zip(faces, mask) if keep]
300
+ faces_metadata = {k: faces_metadata[k][mask, ...] for k in faces_metadata}
301
+
302
+ return faces, faces_metadata