capytaine 2.3__cp39-cp39-macosx_14_0_arm64.whl → 3.0.0a1__cp39-cp39-macosx_14_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- capytaine/.dylibs/libgcc_s.1.1.dylib +0 -0
- capytaine/.dylibs/libgfortran.5.dylib +0 -0
- capytaine/.dylibs/libquadmath.0.dylib +0 -0
- capytaine/__about__.py +7 -2
- capytaine/__init__.py +8 -12
- capytaine/bem/engines.py +234 -354
- capytaine/bem/problems_and_results.py +30 -21
- capytaine/bem/solver.py +205 -81
- capytaine/bodies/bodies.py +279 -862
- capytaine/bodies/dofs.py +136 -9
- capytaine/bodies/hydrostatics.py +540 -0
- capytaine/bodies/multibodies.py +216 -0
- capytaine/green_functions/{libs/Delhommeau_float32.cpython-39-darwin.so → Delhommeau_float32.cpython-39-darwin.so} +0 -0
- capytaine/green_functions/{libs/Delhommeau_float64.cpython-39-darwin.so → Delhommeau_float64.cpython-39-darwin.so} +0 -0
- capytaine/green_functions/abstract_green_function.py +2 -2
- capytaine/green_functions/delhommeau.py +50 -31
- capytaine/green_functions/hams.py +19 -13
- capytaine/io/legacy.py +3 -103
- capytaine/io/xarray.py +15 -10
- capytaine/meshes/__init__.py +2 -6
- capytaine/meshes/abstract_meshes.py +375 -0
- capytaine/meshes/clean.py +302 -0
- capytaine/meshes/clip.py +347 -0
- capytaine/meshes/export.py +89 -0
- capytaine/meshes/geometry.py +244 -394
- capytaine/meshes/io.py +433 -0
- capytaine/meshes/meshes.py +621 -676
- capytaine/meshes/predefined/cylinders.py +22 -56
- capytaine/meshes/predefined/rectangles.py +26 -85
- capytaine/meshes/predefined/spheres.py +4 -11
- capytaine/meshes/quality.py +118 -407
- capytaine/meshes/surface_integrals.py +48 -29
- capytaine/meshes/symmetric_meshes.py +641 -0
- capytaine/meshes/visualization.py +353 -0
- capytaine/post_pro/free_surfaces.py +1 -4
- capytaine/post_pro/kochin.py +10 -10
- capytaine/tools/block_circulant_matrices.py +275 -0
- capytaine/tools/lists_of_points.py +2 -2
- capytaine/tools/memory_monitor.py +45 -0
- capytaine/tools/symbolic_multiplication.py +31 -5
- capytaine/tools/timer.py +68 -42
- {capytaine-2.3.dist-info → capytaine-3.0.0a1.dist-info}/METADATA +8 -14
- capytaine-3.0.0a1.dist-info/RECORD +65 -0
- capytaine-3.0.0a1.dist-info/WHEEL +6 -0
- capytaine/bodies/predefined/__init__.py +0 -6
- capytaine/bodies/predefined/cylinders.py +0 -151
- capytaine/bodies/predefined/rectangles.py +0 -111
- capytaine/bodies/predefined/spheres.py +0 -70
- capytaine/green_functions/FinGreen3D/.gitignore +0 -1
- capytaine/green_functions/FinGreen3D/FinGreen3D.f90 +0 -3589
- capytaine/green_functions/FinGreen3D/LICENSE +0 -165
- capytaine/green_functions/FinGreen3D/Makefile +0 -16
- capytaine/green_functions/FinGreen3D/README.md +0 -24
- capytaine/green_functions/FinGreen3D/test_program.f90 +0 -39
- capytaine/green_functions/LiangWuNoblesse/.gitignore +0 -1
- capytaine/green_functions/LiangWuNoblesse/LICENSE +0 -504
- capytaine/green_functions/LiangWuNoblesse/LiangWuNoblesseWaveTerm.f90 +0 -751
- capytaine/green_functions/LiangWuNoblesse/Makefile +0 -18
- capytaine/green_functions/LiangWuNoblesse/README.md +0 -2
- capytaine/green_functions/LiangWuNoblesse/test_program.f90 +0 -28
- capytaine/green_functions/libs/__init__.py +0 -0
- capytaine/io/mesh_loaders.py +0 -1086
- capytaine/io/mesh_writers.py +0 -692
- capytaine/io/meshio.py +0 -38
- capytaine/matrices/__init__.py +0 -16
- capytaine/matrices/block.py +0 -592
- capytaine/matrices/block_toeplitz.py +0 -325
- capytaine/matrices/builders.py +0 -89
- capytaine/matrices/linear_solvers.py +0 -232
- capytaine/matrices/low_rank.py +0 -395
- capytaine/meshes/clipper.py +0 -465
- capytaine/meshes/collections.py +0 -334
- capytaine/meshes/mesh_like_protocol.py +0 -37
- capytaine/meshes/properties.py +0 -276
- capytaine/meshes/quadratures.py +0 -80
- capytaine/meshes/symmetric.py +0 -392
- capytaine/tools/lru_cache.py +0 -49
- capytaine/ui/vtk/__init__.py +0 -3
- capytaine/ui/vtk/animation.py +0 -329
- capytaine/ui/vtk/body_viewer.py +0 -28
- capytaine/ui/vtk/helpers.py +0 -82
- capytaine/ui/vtk/mesh_viewer.py +0 -461
- capytaine-2.3.dist-info/RECORD +0 -92
- capytaine-2.3.dist-info/WHEEL +0 -4
- {capytaine-2.3.dist-info → capytaine-3.0.0a1.dist-info}/LICENSE +0 -0
- {capytaine-2.3.dist-info → capytaine-3.0.0a1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
# Copyright 2025 Mews Labs
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
import logging
|
|
16
|
+
from typing import List, Tuple, Dict
|
|
17
|
+
|
|
18
|
+
import numpy as np
|
|
19
|
+
from scipy.spatial import cKDTree
|
|
20
|
+
|
|
21
|
+
LOG = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def clean_mesh(
|
|
25
|
+
vertices: np.ndarray,
|
|
26
|
+
faces: List[List[int]],
|
|
27
|
+
faces_metadata: Dict[str, np.ndarray],
|
|
28
|
+
max_iter: int = 5,
|
|
29
|
+
tol: float = 1e-8
|
|
30
|
+
) -> Tuple[np.ndarray, List[List[int]]]:
|
|
31
|
+
"""Iteratively clean a mesh by applying geometric simplifications.
|
|
32
|
+
|
|
33
|
+
Parameters
|
|
34
|
+
----------
|
|
35
|
+
vertices : numpy.ndarray
|
|
36
|
+
Vertex coordinates of the input mesh.
|
|
37
|
+
faces : list of list of int
|
|
38
|
+
Face connectivity describing the mesh panels.
|
|
39
|
+
faces_metadata: Dict[str, np.ndarray]
|
|
40
|
+
Some arrays with the same first dimension (should be the number of faces)
|
|
41
|
+
storing some fields defined on all the faces of the mesh.
|
|
42
|
+
max_iter : int, default=5
|
|
43
|
+
Maximum number of cleaning iterations to perform.
|
|
44
|
+
tol : float, default=1e-8
|
|
45
|
+
Tolerance used when merging near-duplicate vertices.
|
|
46
|
+
|
|
47
|
+
Returns
|
|
48
|
+
-------
|
|
49
|
+
tuple[numpy.ndarray, list of list of int]
|
|
50
|
+
The cleaned vertex array and associated face connectivity.
|
|
51
|
+
"""
|
|
52
|
+
for _ in range(max_iter):
|
|
53
|
+
nb_vertices_before = len(vertices)
|
|
54
|
+
nb_faces_before = len(faces)
|
|
55
|
+
|
|
56
|
+
vertices, faces, faces_metadata = clean_mesh_once(vertices, faces, faces_metadata, tol=tol)
|
|
57
|
+
|
|
58
|
+
if len(vertices) == nb_vertices_before and len(faces) == nb_faces_before:
|
|
59
|
+
break
|
|
60
|
+
|
|
61
|
+
return vertices, faces, faces_metadata
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def clean_mesh_once(
|
|
65
|
+
vertices: np.ndarray,
|
|
66
|
+
faces: List[List[int]],
|
|
67
|
+
faces_metadata: Dict[str, np.ndarray],
|
|
68
|
+
tol: float = 1e-10
|
|
69
|
+
) -> Tuple[np.ndarray, List[List[int]]]:
|
|
70
|
+
"""Run a single cleaning pass on the mesh data.
|
|
71
|
+
|
|
72
|
+
Parameters
|
|
73
|
+
----------
|
|
74
|
+
vertices : numpy.ndarray
|
|
75
|
+
Vertex coordinates describing the mesh geometry.
|
|
76
|
+
faces : list of list of int
|
|
77
|
+
Face connectivity with indices referencing ``vertices``.
|
|
78
|
+
faces_metadata: Dict[str, np.ndarray]
|
|
79
|
+
Some arrays with the same first dimension (should be the number of faces)
|
|
80
|
+
storing some fields defined on all the faces of the mesh.
|
|
81
|
+
tol : float, default=1e-10
|
|
82
|
+
Tolerance for considering vertices as duplicates.
|
|
83
|
+
|
|
84
|
+
Returns
|
|
85
|
+
-------
|
|
86
|
+
tuple[numpy.ndarray, list of list of int]
|
|
87
|
+
Updated vertices and faces after the cleaning step.
|
|
88
|
+
|
|
89
|
+
Raises
|
|
90
|
+
------
|
|
91
|
+
ValueError
|
|
92
|
+
If an unsupported face configuration is encountered.
|
|
93
|
+
"""
|
|
94
|
+
# 1) merge almost‐duplicate vertices
|
|
95
|
+
vertices, faces = merge_near_duplicate_vertices(vertices, faces, tol=tol)
|
|
96
|
+
|
|
97
|
+
# 2) remove duplicate vertices indices in faces
|
|
98
|
+
# and check that all faces have 3 or 4 unique vertices
|
|
99
|
+
new_faces = []
|
|
100
|
+
degenerate_faces_indices = []
|
|
101
|
+
|
|
102
|
+
for i_face, face in enumerate(faces):
|
|
103
|
+
seen = set()
|
|
104
|
+
uniq = []
|
|
105
|
+
for vi in face:
|
|
106
|
+
if vi not in seen:
|
|
107
|
+
seen.add(vi)
|
|
108
|
+
uniq.append(vi)
|
|
109
|
+
|
|
110
|
+
if len(uniq) in (3, 4):
|
|
111
|
+
new_faces.append(uniq)
|
|
112
|
+
elif len(uniq) < 3:
|
|
113
|
+
degenerate_faces_indices.append(i_face)
|
|
114
|
+
else:
|
|
115
|
+
raise ValueError(
|
|
116
|
+
f"Face with {len(uniq)} unique vertices: only 3 or 4 supported."
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if len(degenerate_faces_indices) > 0:
|
|
120
|
+
LOG.debug(
|
|
121
|
+
f"Dropping {len(degenerate_faces_indices)} degenerate faces with <3 vertices: "
|
|
122
|
+
f"{[faces[i] for i in degenerate_faces_indices[:5]]}{' ...' if len(degenerate_faces_indices) > 5 else ''}"
|
|
123
|
+
)
|
|
124
|
+
faces_metadata = {k: np.delete(faces_metadata[k], degenerate_faces_indices, axis=0) for k in faces_metadata}
|
|
125
|
+
|
|
126
|
+
# 3) continue cleaning pipeline, all functions must accept List-of-lists too
|
|
127
|
+
vertices, faces = remove_duplicate_vertices(vertices, new_faces)
|
|
128
|
+
faces, faces_metadata = remove_duplicate_faces(faces, faces_metadata)
|
|
129
|
+
vertices, faces = remove_unused_vertices(vertices, faces)
|
|
130
|
+
faces, faces_metadata = remove_small_faces(vertices, faces, faces_metadata, tol=tol)
|
|
131
|
+
vertices, faces = remove_unused_vertices(vertices, faces)
|
|
132
|
+
|
|
133
|
+
return vertices, faces, faces_metadata
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def merge_near_duplicate_vertices(
|
|
137
|
+
vertices: np.ndarray, faces: List[List[int]], tol: float = 1e-8
|
|
138
|
+
) -> Tuple[np.ndarray, List[List[int]]]:
|
|
139
|
+
"""Merge vertices that are closer than a tolerance.
|
|
140
|
+
|
|
141
|
+
Parameters
|
|
142
|
+
----------
|
|
143
|
+
vertices : numpy.ndarray
|
|
144
|
+
Vertex coordinates of shape ``(n, 3)``.
|
|
145
|
+
faces : list of list of int
|
|
146
|
+
Face connectivity referencing the ``vertices`` array.
|
|
147
|
+
tol : float, default=1e-8
|
|
148
|
+
Distance threshold below which vertices are considered duplicates.
|
|
149
|
+
|
|
150
|
+
Returns
|
|
151
|
+
-------
|
|
152
|
+
tuple[numpy.ndarray, list of list of int]
|
|
153
|
+
Deduplicated vertices and remapped faces.
|
|
154
|
+
"""
|
|
155
|
+
if len(vertices) == 0:
|
|
156
|
+
return vertices, faces
|
|
157
|
+
|
|
158
|
+
tree = cKDTree(vertices)
|
|
159
|
+
groups = tree.query_ball_tree(tree, r=tol)
|
|
160
|
+
|
|
161
|
+
representative = {}
|
|
162
|
+
new_vertices = []
|
|
163
|
+
for i, group in enumerate(groups):
|
|
164
|
+
rep = min(group)
|
|
165
|
+
if rep not in representative:
|
|
166
|
+
representative[rep] = len(new_vertices)
|
|
167
|
+
new_vertices.append(vertices[rep])
|
|
168
|
+
representative[i] = representative[rep]
|
|
169
|
+
|
|
170
|
+
faces = [[representative[idx] for idx in face] for face in faces]
|
|
171
|
+
new_vertices = np.array(new_vertices)
|
|
172
|
+
return new_vertices, faces
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def remove_duplicate_vertices(
|
|
176
|
+
vertices: np.ndarray, faces: List[List[int]]
|
|
177
|
+
) -> Tuple[np.ndarray, List[List[int]]]:
|
|
178
|
+
"""Remove exactly repeated vertices and remap faces accordingly.
|
|
179
|
+
|
|
180
|
+
Parameters
|
|
181
|
+
----------
|
|
182
|
+
vertices : numpy.ndarray
|
|
183
|
+
Vertex coordinates of shape ``(n, 3)``.
|
|
184
|
+
faces : list of list of int
|
|
185
|
+
Face connectivity using indices into ``vertices``.
|
|
186
|
+
|
|
187
|
+
Returns
|
|
188
|
+
-------
|
|
189
|
+
tuple[numpy.ndarray, list of list of int]
|
|
190
|
+
Unique vertices and faces with updated indices.
|
|
191
|
+
"""
|
|
192
|
+
unique_vertices = []
|
|
193
|
+
vertices_map = {}
|
|
194
|
+
for vertex in vertices:
|
|
195
|
+
vertex_tuple = tuple(vertex)
|
|
196
|
+
if vertex_tuple not in vertices_map:
|
|
197
|
+
vertices_map[vertex_tuple] = len(unique_vertices)
|
|
198
|
+
unique_vertices.append(vertex)
|
|
199
|
+
new_faces = [[vertices_map[tuple(vertices[i])] for i in face] for face in faces]
|
|
200
|
+
new_vertices = np.array(unique_vertices)
|
|
201
|
+
|
|
202
|
+
return new_vertices, new_faces
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def remove_duplicate_faces(faces, faces_metadata):
|
|
206
|
+
"""Eliminate duplicate faces while preserving order.
|
|
207
|
+
|
|
208
|
+
Parameters
|
|
209
|
+
----------
|
|
210
|
+
faces : list of list of int
|
|
211
|
+
Face connectivity to deduplicate.
|
|
212
|
+
faces_metadata: Dict[str, np.ndarray]
|
|
213
|
+
Fields associated to faces
|
|
214
|
+
|
|
215
|
+
Returns
|
|
216
|
+
-------
|
|
217
|
+
list of list of int
|
|
218
|
+
Face connectivity with duplicates removed.
|
|
219
|
+
Dict[str, np.ndarray]
|
|
220
|
+
Updated metadata
|
|
221
|
+
"""
|
|
222
|
+
unique_faces = []
|
|
223
|
+
face_set = set()
|
|
224
|
+
deduplicated_faces_indices = []
|
|
225
|
+
for i_face, face in enumerate(faces):
|
|
226
|
+
face_tuple = tuple(sorted(face))
|
|
227
|
+
if face_tuple not in face_set:
|
|
228
|
+
face_set.add(face_tuple)
|
|
229
|
+
unique_faces.append(face)
|
|
230
|
+
else:
|
|
231
|
+
deduplicated_faces_indices.append(i_face)
|
|
232
|
+
|
|
233
|
+
faces_metadata = {k: np.delete(faces_metadata[k], deduplicated_faces_indices, axis=0) for k in faces_metadata}
|
|
234
|
+
|
|
235
|
+
return unique_faces, faces_metadata
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def remove_unused_vertices(
|
|
239
|
+
vertices: np.ndarray, faces: List[List[int]]
|
|
240
|
+
) -> Tuple[np.ndarray, List[List[int]]]:
|
|
241
|
+
"""Remove vertices that are not referenced by any face.
|
|
242
|
+
|
|
243
|
+
Parameters
|
|
244
|
+
----------
|
|
245
|
+
vertices : numpy.ndarray
|
|
246
|
+
Vertex coordinates of shape ``(n, 3)``.
|
|
247
|
+
faces : list of list of int
|
|
248
|
+
Face connectivity using indices into ``vertices``.
|
|
249
|
+
|
|
250
|
+
Returns
|
|
251
|
+
-------
|
|
252
|
+
tuple[numpy.ndarray, list of list of int]
|
|
253
|
+
Reduced vertex array and corresponding face connectivity.
|
|
254
|
+
"""
|
|
255
|
+
used = sorted({i for face in faces for i in face})
|
|
256
|
+
remap = {old: new for new, old in enumerate(used)}
|
|
257
|
+
new_vs = vertices[used]
|
|
258
|
+
new_fs = [[remap[i] for i in face] for face in faces]
|
|
259
|
+
return new_vs, new_fs
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def remove_small_faces(
|
|
263
|
+
vertices: np.ndarray,
|
|
264
|
+
faces: List[List[int]],
|
|
265
|
+
faces_metadata: Dict[str, np.ndarray],
|
|
266
|
+
tol: float = 1e-8
|
|
267
|
+
):
|
|
268
|
+
"""Remove faces whose area falls below a tolerance.
|
|
269
|
+
|
|
270
|
+
Parameters
|
|
271
|
+
----------
|
|
272
|
+
vertices : numpy.ndarray
|
|
273
|
+
Vertex coordinates used to evaluate surface area.
|
|
274
|
+
faces : list of list of int
|
|
275
|
+
Face connectivity referencing ``vertices``.
|
|
276
|
+
faces_metadata: Dict[str, np.ndarray]
|
|
277
|
+
Fields associated to faces
|
|
278
|
+
tol : float, default=1e-8
|
|
279
|
+
Minimum allowable face area.
|
|
280
|
+
|
|
281
|
+
Returns
|
|
282
|
+
-------
|
|
283
|
+
list of list of int
|
|
284
|
+
Faces that exceed the area threshold.
|
|
285
|
+
"""
|
|
286
|
+
|
|
287
|
+
def face_area(face):
|
|
288
|
+
v = vertices[face]
|
|
289
|
+
if len(face) == 4:
|
|
290
|
+
a1 = 0.5 * np.linalg.norm(np.cross(v[1] - v[0], v[2] - v[0]))
|
|
291
|
+
a2 = 0.5 * np.linalg.norm(np.cross(v[2] - v[0], v[3] - v[0]))
|
|
292
|
+
return a1 + a2
|
|
293
|
+
elif len(face) == 3:
|
|
294
|
+
return 0.5 * np.linalg.norm(np.cross(v[1] - v[0], v[2] - v[0]))
|
|
295
|
+
return 0.0
|
|
296
|
+
|
|
297
|
+
areas = np.array([face_area(face) for face in faces])
|
|
298
|
+
mask = areas > tol
|
|
299
|
+
faces = [face for face, keep in zip(faces, mask) if keep]
|
|
300
|
+
faces_metadata = {k: faces_metadata[k][mask, ...] for k in faces_metadata}
|
|
301
|
+
|
|
302
|
+
return faces, faces_metadata
|
capytaine/meshes/clip.py
ADDED
|
@@ -0,0 +1,347 @@
|
|
|
1
|
+
# Copyright 2025 Mews Labs
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
from __future__ import annotations
|
|
16
|
+
from typing import List, Set, Tuple
|
|
17
|
+
|
|
18
|
+
import numpy as np
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _get_intersection(
|
|
22
|
+
v1: np.ndarray,
|
|
23
|
+
v2: np.ndarray,
|
|
24
|
+
normal: np.ndarray,
|
|
25
|
+
origin: np.ndarray,
|
|
26
|
+
tol: float = 1e-8,
|
|
27
|
+
) -> np.ndarray:
|
|
28
|
+
"""Intersect a line segment with a plane.
|
|
29
|
+
|
|
30
|
+
Parameters
|
|
31
|
+
----------
|
|
32
|
+
v1, v2 : numpy.ndarray
|
|
33
|
+
Endpoints of the segment expressed as 3D vectors.
|
|
34
|
+
normal : numpy.ndarray
|
|
35
|
+
Plane normal; does not need to be unit length.
|
|
36
|
+
origin : numpy.ndarray
|
|
37
|
+
Point lying on the plane.
|
|
38
|
+
tol : float, default=1e-8
|
|
39
|
+
Tolerance used to detect parallel segments and clamp the intersection
|
|
40
|
+
parameter to the segment bounds.
|
|
41
|
+
|
|
42
|
+
Returns
|
|
43
|
+
-------
|
|
44
|
+
numpy.ndarray
|
|
45
|
+
Intersection point located on the (possibly clamped) segment.
|
|
46
|
+
|
|
47
|
+
Raises
|
|
48
|
+
------
|
|
49
|
+
ValueError
|
|
50
|
+
If the segment is parallel to the plane or the intersection lies
|
|
51
|
+
outside the tolerated segment range.
|
|
52
|
+
"""
|
|
53
|
+
v1 = np.asarray(v1, dtype=float)
|
|
54
|
+
v2 = np.asarray(v2, dtype=float)
|
|
55
|
+
n = np.asarray(normal, dtype=float)
|
|
56
|
+
o = np.asarray(origin, dtype=float)
|
|
57
|
+
|
|
58
|
+
u = v2 - v1
|
|
59
|
+
denom = float(np.dot(n, u))
|
|
60
|
+
if abs(denom) < tol:
|
|
61
|
+
raise ValueError("Segment is parallel to the plane (no unique intersection).")
|
|
62
|
+
|
|
63
|
+
t = float(np.dot(n, (o - v1)) / denom)
|
|
64
|
+
|
|
65
|
+
if t < -tol or t > 1.0 + tol:
|
|
66
|
+
raise ValueError(f"Intersection t={t:.6g} lies outside the segment [0,1].")
|
|
67
|
+
|
|
68
|
+
t = max(0.0, min(1.0, t))
|
|
69
|
+
return v1 + t * u
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def compute_aspect_ratio(tri_pts: np.ndarray) -> float:
|
|
73
|
+
"""Compute the aspect ratio of a triangle.
|
|
74
|
+
|
|
75
|
+
The aspect ratio is defined as the longest edge length divided by the
|
|
76
|
+
altitude relative to that edge.
|
|
77
|
+
|
|
78
|
+
Parameters
|
|
79
|
+
----------
|
|
80
|
+
tri_pts : numpy.ndarray
|
|
81
|
+
Triangle vertices arranged in a ``(3, 3)`` array.
|
|
82
|
+
|
|
83
|
+
Returns
|
|
84
|
+
-------
|
|
85
|
+
float
|
|
86
|
+
Aspect ratio ``L / h`` (values greater than or equal to 1).
|
|
87
|
+
|
|
88
|
+
Raises
|
|
89
|
+
------
|
|
90
|
+
ValueError
|
|
91
|
+
If the triangle is degenerate and its area approaches zero.
|
|
92
|
+
"""
|
|
93
|
+
tri_pts = np.asarray(tri_pts, dtype=float)
|
|
94
|
+
if tri_pts.shape != (3, 3):
|
|
95
|
+
raise ValueError("tri_pts must have shape (3,3).")
|
|
96
|
+
|
|
97
|
+
edges = np.array(
|
|
98
|
+
[
|
|
99
|
+
np.linalg.norm(tri_pts[1] - tri_pts[0]),
|
|
100
|
+
np.linalg.norm(tri_pts[2] - tri_pts[1]),
|
|
101
|
+
np.linalg.norm(tri_pts[0] - tri_pts[2]),
|
|
102
|
+
],
|
|
103
|
+
dtype=float,
|
|
104
|
+
)
|
|
105
|
+
L = float(np.max(edges))
|
|
106
|
+
i = int(np.argmax(edges))
|
|
107
|
+
|
|
108
|
+
A, B = tri_pts[i], tri_pts[(i + 1) % 3]
|
|
109
|
+
C = tri_pts[(i + 2) % 3]
|
|
110
|
+
area = 0.5 * np.linalg.norm(np.cross(B - A, C - A))
|
|
111
|
+
if area <= 0.0:
|
|
112
|
+
raise ValueError("Degenerate triangle: zero area.")
|
|
113
|
+
h = 2.0 * area / L
|
|
114
|
+
return L / h
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def _signed_distances(
|
|
118
|
+
verts: list[list[float]],
|
|
119
|
+
face: list[int],
|
|
120
|
+
normal: np.ndarray,
|
|
121
|
+
origin: np.ndarray,
|
|
122
|
+
) -> np.ndarray:
|
|
123
|
+
"""Evaluate signed distances of face vertices to a clipping plane.
|
|
124
|
+
|
|
125
|
+
Parameters
|
|
126
|
+
----------
|
|
127
|
+
verts : list of list of float
|
|
128
|
+
All mesh vertices.
|
|
129
|
+
face : list of int
|
|
130
|
+
Indices of the vertices forming the face.
|
|
131
|
+
normal : numpy.ndarray
|
|
132
|
+
Plane normal vector.
|
|
133
|
+
origin : numpy.ndarray
|
|
134
|
+
Point belonging to the plane.
|
|
135
|
+
|
|
136
|
+
Returns
|
|
137
|
+
-------
|
|
138
|
+
numpy.ndarray
|
|
139
|
+
Signed distances for the vertices belonging to ``face``.
|
|
140
|
+
"""
|
|
141
|
+
pts = np.asarray([verts[i] for i in face], dtype=float)
|
|
142
|
+
return (origin - pts) @ normal
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def _compute_keep_sets(
|
|
146
|
+
verts: list[list[float]],
|
|
147
|
+
face: list[int],
|
|
148
|
+
normal: np.ndarray,
|
|
149
|
+
origin: np.ndarray,
|
|
150
|
+
tol: float,
|
|
151
|
+
) -> tuple[list[int], set[int], np.ndarray]:
|
|
152
|
+
"""Split vertices of a face between kept and discarded sets.
|
|
153
|
+
|
|
154
|
+
Parameters
|
|
155
|
+
----------
|
|
156
|
+
verts : list of list of float
|
|
157
|
+
All mesh vertices.
|
|
158
|
+
face : list of int
|
|
159
|
+
Indices forming the face currently being clipped.
|
|
160
|
+
normal : numpy.ndarray
|
|
161
|
+
Plane normal vector defining the clipping plane.
|
|
162
|
+
origin : numpy.ndarray
|
|
163
|
+
Point on the clipping plane.
|
|
164
|
+
tol : float
|
|
165
|
+
Tolerance used to consider vertices inside the kept half-space.
|
|
166
|
+
|
|
167
|
+
Returns
|
|
168
|
+
-------
|
|
169
|
+
list of int
|
|
170
|
+
Indices of vertices that remain after clipping.
|
|
171
|
+
set of int
|
|
172
|
+
Indices of vertices removed by the clipping plane.
|
|
173
|
+
numpy.ndarray
|
|
174
|
+
Signed distance of each face vertex to the plane.
|
|
175
|
+
"""
|
|
176
|
+
s = _signed_distances(verts, face, normal, origin)
|
|
177
|
+
mask = s >= -tol
|
|
178
|
+
keep = [face[i] for i, m in enumerate(mask) if m]
|
|
179
|
+
unkeep = set(face) - set(keep)
|
|
180
|
+
return keep, unkeep, s
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def _compute_edge_intersections(
|
|
184
|
+
verts: list[list[float]],
|
|
185
|
+
face: list[int],
|
|
186
|
+
keep: list[int],
|
|
187
|
+
normal: np.ndarray,
|
|
188
|
+
origin: np.ndarray,
|
|
189
|
+
tol: float,
|
|
190
|
+
) -> dict[tuple[int, int], int]:
|
|
191
|
+
"""Intersect face edges with the clipping plane.
|
|
192
|
+
|
|
193
|
+
Parameters
|
|
194
|
+
----------
|
|
195
|
+
verts : list of list of float
|
|
196
|
+
Mutable list of mesh vertices; intersection points are appended here.
|
|
197
|
+
face : list of int
|
|
198
|
+
Indices of the vertices forming the face being processed.
|
|
199
|
+
keep : list of int
|
|
200
|
+
Vertices that remain inside the kept half-space.
|
|
201
|
+
normal : numpy.ndarray
|
|
202
|
+
Plane normal vector.
|
|
203
|
+
origin : numpy.ndarray
|
|
204
|
+
Point belonging to the plane.
|
|
205
|
+
tol : float
|
|
206
|
+
Tolerance for plane intersection checks.
|
|
207
|
+
|
|
208
|
+
Returns
|
|
209
|
+
-------
|
|
210
|
+
dict[tuple[int, int], int]
|
|
211
|
+
Mapping from directed edges to newly created vertex indices.
|
|
212
|
+
"""
|
|
213
|
+
keep_set = set(keep)
|
|
214
|
+
edges = list(zip(face, face[1:] + face[:1]))
|
|
215
|
+
cut_edges = [(i, j) for (i, j) in edges if (i in keep_set) ^ (j in keep_set)]
|
|
216
|
+
|
|
217
|
+
edge_inters: dict[tuple[int, int], int] = {}
|
|
218
|
+
for i, j in cut_edges:
|
|
219
|
+
ip = _get_intersection(
|
|
220
|
+
np.array(verts[i]), np.array(verts[j]), normal, origin, tol
|
|
221
|
+
)
|
|
222
|
+
idx = len(verts)
|
|
223
|
+
verts.append(ip.tolist())
|
|
224
|
+
edge_inters[(i, j)] = idx
|
|
225
|
+
return edge_inters
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def _build_clipped_boundary(
|
|
229
|
+
face: list[int],
|
|
230
|
+
keep: list[int],
|
|
231
|
+
edge_inters: dict[tuple[int, int], int],
|
|
232
|
+
) -> list[int]:
|
|
233
|
+
"""Assemble the boundary indices for a clipped polygon.
|
|
234
|
+
|
|
235
|
+
Parameters
|
|
236
|
+
----------
|
|
237
|
+
face : list of int
|
|
238
|
+
Original face indices.
|
|
239
|
+
keep : list of int
|
|
240
|
+
Vertices that remain after clipping.
|
|
241
|
+
edge_inters : dict[tuple[int, int], int]
|
|
242
|
+
Mapping from edges to newly created intersection vertices.
|
|
243
|
+
|
|
244
|
+
Returns
|
|
245
|
+
-------
|
|
246
|
+
list of int
|
|
247
|
+
Ordered vertex indices describing the clipped boundary.
|
|
248
|
+
"""
|
|
249
|
+
keep_set = set(keep)
|
|
250
|
+
boundary: list[int] = []
|
|
251
|
+
for i, j in zip(face, face[1:] + face[:1]):
|
|
252
|
+
if i in keep_set:
|
|
253
|
+
boundary.append(i)
|
|
254
|
+
if (i, j) in edge_inters:
|
|
255
|
+
boundary.append(edge_inters[(i, j)])
|
|
256
|
+
return boundary
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def clip_faces(
|
|
260
|
+
vertices: np.ndarray,
|
|
261
|
+
faces: list[list[int]],
|
|
262
|
+
normal: np.ndarray,
|
|
263
|
+
origin: np.ndarray,
|
|
264
|
+
tol: float = 1e-8,
|
|
265
|
+
) -> Tuple[np.ndarray, List[List[int]], np.ndarray]:
|
|
266
|
+
"""Clip faces of a mesh against a plane.
|
|
267
|
+
|
|
268
|
+
The kept half-space is defined by ``(v - origin) · normal >= -tol``.
|
|
269
|
+
|
|
270
|
+
Parameters
|
|
271
|
+
----------
|
|
272
|
+
vertices : numpy.ndarray
|
|
273
|
+
Input vertex positions of shape ``(n, 3)``.
|
|
274
|
+
faces : list of list of int
|
|
275
|
+
Face connectivity; triangles and quads are supported.
|
|
276
|
+
normal : numpy.ndarray
|
|
277
|
+
Normal vector of the clipping plane.
|
|
278
|
+
origin : numpy.ndarray
|
|
279
|
+
Point located on the plane.
|
|
280
|
+
tol : float, default=1e-8
|
|
281
|
+
Tolerance for classifying vertices relative to the plane.
|
|
282
|
+
|
|
283
|
+
Returns
|
|
284
|
+
-------
|
|
285
|
+
np.ndarray of floats of shape (new_nb_vertices, 3)
|
|
286
|
+
The pruned vertex array
|
|
287
|
+
list of list of int
|
|
288
|
+
The list of of length new_nb_faces with clipped faces
|
|
289
|
+
np.ndarray of ints of shape (new_nb_faces,)
|
|
290
|
+
For each new face, the index of the face it comes from in the input.
|
|
291
|
+
"""
|
|
292
|
+
normal = np.asarray(normal, dtype=float)
|
|
293
|
+
origin = np.asarray(origin, dtype=float)
|
|
294
|
+
|
|
295
|
+
verts: List[List[float]] = vertices.astype(float).tolist()
|
|
296
|
+
new_faces: List[Tuple[int, List[int]]] = []
|
|
297
|
+
# A new face is a tuple storing the index of the parent face in the
|
|
298
|
+
# original mesh and a list of vertices
|
|
299
|
+
dropped_vs: Set[int] = set()
|
|
300
|
+
|
|
301
|
+
for i_face, face in enumerate(faces):
|
|
302
|
+
keep, unkeep, _ = _compute_keep_sets(verts, face, normal, origin, tol)
|
|
303
|
+
dropped_vs.update(unkeep)
|
|
304
|
+
|
|
305
|
+
if len(keep) == 0:
|
|
306
|
+
continue # fully outside
|
|
307
|
+
if len(keep) == len(face):
|
|
308
|
+
# Face fully inside → keep original (quad or triangle unchanged)
|
|
309
|
+
new_faces.append((i_face, list(face)))
|
|
310
|
+
continue
|
|
311
|
+
|
|
312
|
+
edge_inters = _compute_edge_intersections(
|
|
313
|
+
verts, face, keep, normal, origin, tol
|
|
314
|
+
)
|
|
315
|
+
boundary = _build_clipped_boundary(face, keep, edge_inters)
|
|
316
|
+
|
|
317
|
+
if len(boundary) == 3:
|
|
318
|
+
new_faces.append((i_face, boundary))
|
|
319
|
+
elif len(boundary) == 4:
|
|
320
|
+
# clipped quad → 2 triangles
|
|
321
|
+
new_faces.append((i_face, [boundary[0], boundary[1], boundary[2]]))
|
|
322
|
+
new_faces.append((i_face, [boundary[0], boundary[2], boundary[3]]))
|
|
323
|
+
elif len(boundary) == 5:
|
|
324
|
+
# pentagon → 1 triangle + 1 quad
|
|
325
|
+
tri = [boundary[0], boundary[1], boundary[2]]
|
|
326
|
+
quad = [boundary[0], boundary[2], boundary[3], boundary[4]]
|
|
327
|
+
new_faces.append((i_face, tri))
|
|
328
|
+
new_faces.append((i_face, quad))
|
|
329
|
+
else:
|
|
330
|
+
# fallback: fan triangulation
|
|
331
|
+
for k in range(1, len(boundary) - 1):
|
|
332
|
+
new_faces.append((i_face, [boundary[0], boundary[k], boundary[k + 1]]))
|
|
333
|
+
|
|
334
|
+
if not new_faces:
|
|
335
|
+
return np.empty((0, 3), dtype=float), [], np.empty((0,), dtype=int)
|
|
336
|
+
|
|
337
|
+
used = {idx for (_, f) in new_faces for idx in f}
|
|
338
|
+
dropped_vs -= used
|
|
339
|
+
keep_vs = [i for i in range(len(verts)) if i not in dropped_vs]
|
|
340
|
+
remap = {old: new for new, old in enumerate(keep_vs)}
|
|
341
|
+
|
|
342
|
+
pruned_verts = np.asarray([verts[i] for i in keep_vs], dtype=float)
|
|
343
|
+
pruned_faces = [[remap[i] for i in face] for (_, face) in new_faces]
|
|
344
|
+
|
|
345
|
+
parent_of_face = np.array([i_parent_face for (i_parent_face, _) in new_faces])
|
|
346
|
+
|
|
347
|
+
return pruned_verts, pruned_faces, parent_of_face
|