capytaine 2.3.1__cp314-cp314t-macosx_14_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. capytaine/.dylibs/libgcc_s.1.1.dylib +0 -0
  2. capytaine/.dylibs/libgfortran.5.dylib +0 -0
  3. capytaine/.dylibs/libquadmath.0.dylib +0 -0
  4. capytaine/__about__.py +16 -0
  5. capytaine/__init__.py +36 -0
  6. capytaine/bem/__init__.py +0 -0
  7. capytaine/bem/airy_waves.py +111 -0
  8. capytaine/bem/engines.py +441 -0
  9. capytaine/bem/problems_and_results.py +600 -0
  10. capytaine/bem/solver.py +594 -0
  11. capytaine/bodies/__init__.py +4 -0
  12. capytaine/bodies/bodies.py +1221 -0
  13. capytaine/bodies/dofs.py +19 -0
  14. capytaine/bodies/predefined/__init__.py +6 -0
  15. capytaine/bodies/predefined/cylinders.py +151 -0
  16. capytaine/bodies/predefined/rectangles.py +111 -0
  17. capytaine/bodies/predefined/spheres.py +70 -0
  18. capytaine/green_functions/FinGreen3D/.gitignore +1 -0
  19. capytaine/green_functions/FinGreen3D/FinGreen3D.f90 +3589 -0
  20. capytaine/green_functions/FinGreen3D/LICENSE +165 -0
  21. capytaine/green_functions/FinGreen3D/Makefile +16 -0
  22. capytaine/green_functions/FinGreen3D/README.md +24 -0
  23. capytaine/green_functions/FinGreen3D/test_program.f90 +39 -0
  24. capytaine/green_functions/LiangWuNoblesse/.gitignore +1 -0
  25. capytaine/green_functions/LiangWuNoblesse/LICENSE +504 -0
  26. capytaine/green_functions/LiangWuNoblesse/LiangWuNoblesseWaveTerm.f90 +751 -0
  27. capytaine/green_functions/LiangWuNoblesse/Makefile +16 -0
  28. capytaine/green_functions/LiangWuNoblesse/README.md +2 -0
  29. capytaine/green_functions/LiangWuNoblesse/test_program.f90 +28 -0
  30. capytaine/green_functions/__init__.py +2 -0
  31. capytaine/green_functions/abstract_green_function.py +64 -0
  32. capytaine/green_functions/delhommeau.py +507 -0
  33. capytaine/green_functions/hams.py +204 -0
  34. capytaine/green_functions/libs/Delhommeau_float32.cpython-314t-darwin.so +0 -0
  35. capytaine/green_functions/libs/Delhommeau_float64.cpython-314t-darwin.so +0 -0
  36. capytaine/green_functions/libs/__init__.py +0 -0
  37. capytaine/io/__init__.py +0 -0
  38. capytaine/io/bemio.py +153 -0
  39. capytaine/io/legacy.py +328 -0
  40. capytaine/io/mesh_loaders.py +1086 -0
  41. capytaine/io/mesh_writers.py +692 -0
  42. capytaine/io/meshio.py +38 -0
  43. capytaine/io/wamit.py +479 -0
  44. capytaine/io/xarray.py +668 -0
  45. capytaine/matrices/__init__.py +16 -0
  46. capytaine/matrices/block.py +592 -0
  47. capytaine/matrices/block_toeplitz.py +325 -0
  48. capytaine/matrices/builders.py +89 -0
  49. capytaine/matrices/linear_solvers.py +232 -0
  50. capytaine/matrices/low_rank.py +395 -0
  51. capytaine/meshes/__init__.py +6 -0
  52. capytaine/meshes/clipper.py +465 -0
  53. capytaine/meshes/collections.py +342 -0
  54. capytaine/meshes/geometry.py +409 -0
  55. capytaine/meshes/mesh_like_protocol.py +37 -0
  56. capytaine/meshes/meshes.py +890 -0
  57. capytaine/meshes/predefined/__init__.py +6 -0
  58. capytaine/meshes/predefined/cylinders.py +314 -0
  59. capytaine/meshes/predefined/rectangles.py +261 -0
  60. capytaine/meshes/predefined/spheres.py +62 -0
  61. capytaine/meshes/properties.py +276 -0
  62. capytaine/meshes/quadratures.py +80 -0
  63. capytaine/meshes/quality.py +448 -0
  64. capytaine/meshes/surface_integrals.py +63 -0
  65. capytaine/meshes/symmetric.py +462 -0
  66. capytaine/post_pro/__init__.py +6 -0
  67. capytaine/post_pro/free_surfaces.py +88 -0
  68. capytaine/post_pro/impedance.py +92 -0
  69. capytaine/post_pro/kochin.py +54 -0
  70. capytaine/post_pro/rao.py +60 -0
  71. capytaine/tools/__init__.py +0 -0
  72. capytaine/tools/cache_on_disk.py +26 -0
  73. capytaine/tools/deprecation_handling.py +18 -0
  74. capytaine/tools/lists_of_points.py +52 -0
  75. capytaine/tools/lru_cache.py +49 -0
  76. capytaine/tools/optional_imports.py +27 -0
  77. capytaine/tools/prony_decomposition.py +150 -0
  78. capytaine/tools/symbolic_multiplication.py +149 -0
  79. capytaine/tools/timer.py +66 -0
  80. capytaine/ui/__init__.py +0 -0
  81. capytaine/ui/cli.py +28 -0
  82. capytaine/ui/rich.py +5 -0
  83. capytaine/ui/vtk/__init__.py +3 -0
  84. capytaine/ui/vtk/animation.py +329 -0
  85. capytaine/ui/vtk/body_viewer.py +28 -0
  86. capytaine/ui/vtk/helpers.py +82 -0
  87. capytaine/ui/vtk/mesh_viewer.py +461 -0
  88. capytaine-2.3.1.dist-info/LICENSE +674 -0
  89. capytaine-2.3.1.dist-info/METADATA +750 -0
  90. capytaine-2.3.1.dist-info/RECORD +92 -0
  91. capytaine-2.3.1.dist-info/WHEEL +6 -0
  92. capytaine-2.3.1.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,448 @@
1
+ """Tools for mesh quality and mesh healing.
2
+ Based on meshmagick <https://github.com/LHEEA/meshmagick> by François Rongère.
3
+ """
4
+ # Copyright (C) 2017-2019 Matthieu Ancellin, based on the work of François Rongère
5
+ # See LICENSE file at <https://github.com/mancellin/capytaine>
6
+
7
+ import logging
8
+
9
+ import numpy as np
10
+
11
+ from capytaine.meshes.geometry import inplace_transformation
12
+ from capytaine.meshes.properties import compute_connectivity
13
+
14
+ LOG = logging.getLogger(__name__)
15
+
16
+
17
+ def merge_duplicates(mesh, atol=1e-8):
18
+ """Merges the duplicate vertices of the mesh in place.
19
+
20
+ Parameters
21
+ ----------
22
+ atol : float, optional
23
+ Absolute tolerance. default is 1e-8
24
+
25
+ Returns
26
+ -------
27
+ new_id : ndarray
28
+ Array of indices that merges the vertices.
29
+ """
30
+ uniq, new_id = merge_duplicate_rows(mesh.vertices, atol=atol)
31
+
32
+ nv_init = mesh.nb_vertices
33
+
34
+ # Updating mesh data
35
+ mesh.vertices = uniq
36
+ mesh.faces = new_id[mesh.faces] # Faces vertices ids are updated here
37
+
38
+ nv_final = mesh.nb_vertices
39
+
40
+ LOG.debug("* Merging duplicate vertices that lie in an absolute proximity of %.1E...", atol)
41
+ delta_n = nv_init - nv_final
42
+ if delta_n == 0:
43
+ LOG.debug("\t--> No duplicate vertices have been found")
44
+ else:
45
+ LOG.debug("\t--> Initial number of vertices : %u", nv_init)
46
+ LOG.debug("\t--> Final number of vertices : %u", nv_final)
47
+ LOG.debug("\t--> %u vertices have been merged", delta_n)
48
+
49
+ # if mesh._has_connectivity():
50
+ # mesh._remove_connectivity()
51
+
52
+ return new_id
53
+
54
+
55
+ def merge_duplicate_rows(arr, atol=1e-8):
56
+ """Returns a new node array where close nodes have been merged into one node (following atol).
57
+
58
+ Parameters
59
+ ----------
60
+ arr : array_like
61
+ array of the coordinates of the mesh's nodes
62
+ atol : float, optional
63
+ the tolerance used to define nodes that are coincident and
64
+ that have to be merged
65
+
66
+ Returns
67
+ -------
68
+ arr : ndarray
69
+ array of the coordinates of the mesh's nodes where
70
+ every node is different
71
+ newID : ndarray
72
+ array of the new new vertices IDs
73
+ """
74
+ # This function is a bottleneck in the clipping routines
75
+ # TODO: use np.unique to cluster groups --> acceleration !!
76
+
77
+ # atol = pow(10, -decimals)
78
+
79
+ arr = np.asarray(arr)
80
+
81
+ nv, nbdim = arr.shape
82
+
83
+ levels = [0, nv]
84
+ iperm = np.arange(nv)
85
+
86
+ for dim in range(nbdim):
87
+ # Sorting the first dimension
88
+ values = arr[:, dim].copy()
89
+ if dim > 0:
90
+ values = values[iperm]
91
+ levels_tmp = []
92
+ for (ilevel, istart) in enumerate(levels[:-1]):
93
+ istop = levels[ilevel+1]
94
+
95
+ if istop-istart > 1:
96
+ level_values = values[istart:istop]
97
+ iperm_view = iperm[istart:istop]
98
+
99
+ iperm_tmp = level_values.argsort()
100
+
101
+ level_values[:] = level_values[iperm_tmp]
102
+ iperm_view[:] = iperm_view[iperm_tmp]
103
+
104
+ levels_tmp.append(istart)
105
+ vref = values[istart]
106
+
107
+ for idx in range(istart, istop):
108
+ cur_val = values[idx]
109
+ if np.abs(cur_val - vref) > atol:
110
+ levels_tmp.append(idx)
111
+ vref = cur_val
112
+
113
+ else:
114
+ levels_tmp.append(levels[ilevel])
115
+ if len(levels_tmp) == nv:
116
+ # No duplicate rows
117
+ # if verbose:
118
+ # LOG.debug "\t -> No duplicate _vertices detected :)"
119
+ newID = np.arange(nv)
120
+
121
+ levels_tmp.append(nv)
122
+ levels = levels_tmp
123
+
124
+ else:
125
+ # Building the new merged node list
126
+ arr_tmp = []
127
+ newID = np.arange(nv)
128
+ for (ilevel, istart) in enumerate(levels[:-1]):
129
+ istop = levels[ilevel+1]
130
+
131
+ arr_tmp.append(arr[iperm[istart]])
132
+ newID[iperm[list(range(istart, istop))]] = ilevel
133
+ arr = np.array(arr_tmp, dtype=float)
134
+ # Applying renumbering to cells
135
+ # if F is not None:
136
+ # for cell in F:
137
+ # cell[:] = newID[cell]
138
+
139
+ # if verbose:
140
+ # nv_new = arr.shape[0]
141
+ # LOG.debug "\t -> Initial number of nodes : {:d}".format(nv)
142
+ # LOG.debug "\t -> New number of nodes : {:d}".format(nv_new)
143
+ # LOG.debug "\t -> {:d} nodes have been merged".format(nv-nv_new)
144
+
145
+ # if F is not None:
146
+ # if return_index:
147
+ # return arr, F, newID
148
+ # else:
149
+ # return arr, F
150
+ # else:
151
+ return arr, newID
152
+
153
+
154
+ @inplace_transformation
155
+ def heal_normals(mesh):
156
+ """Heals the mesh's normals orientations so that they have a consistent orientation and try to make them outward.
157
+ """
158
+ # TODO: return the different groups of a mesh in case it is made of several unrelated groups
159
+
160
+ nv = mesh.nb_vertices
161
+ nf = mesh.nb_faces
162
+ faces = mesh._faces
163
+
164
+ # Building connectivities
165
+ connectivities = compute_connectivity(mesh)
166
+ v_v = connectivities["v_v"]
167
+ v_f = connectivities["v_f"]
168
+ f_f = connectivities["f_f"]
169
+ boundaries = connectivities["boundaries"]
170
+
171
+ if len(boundaries) > 0:
172
+ mesh_closed = False
173
+ else:
174
+ mesh_closed = True
175
+
176
+ # Flooding the mesh to find inconsistent normals
177
+ type_cell = np.zeros(nf, dtype=np.int32)
178
+ type_cell[:] = 4
179
+ type_cell[mesh.triangles_ids] = 3
180
+
181
+ f_vis = np.zeros(nf, dtype=bool)
182
+ f_vis[0] = True
183
+ stack = [0]
184
+ nb_reversed = 0
185
+ while 1:
186
+ if len(stack) == 0:
187
+ if np.any(np.logical_not(f_vis)):
188
+ iface = np.where(np.logical_not(f_vis))[0][0]
189
+ stack.append(iface)
190
+ f_vis[iface] = True
191
+ else:
192
+ break
193
+
194
+ iface = stack.pop()
195
+ face = faces[iface]
196
+ s1 = set(face)
197
+
198
+ for iadj_f in f_f[iface]:
199
+ if f_vis[iadj_f]:
200
+ continue
201
+ f_vis[iadj_f] = True
202
+ # Removing the other pointer
203
+ f_f[iadj_f].remove(iface) # So as it won't go from iadj_f to iface in the future
204
+
205
+ # Shared vertices
206
+ adjface = faces[iadj_f]
207
+ s2 = set(adjface)
208
+ # try:
209
+ common_vertices = list(s1 & s2)
210
+ if len(common_vertices) == 2:
211
+ i_v1, i_v2 = common_vertices
212
+ else:
213
+ LOG.warning('faces %u and %u have more than 2 vertices in common !', iface, iadj_f)
214
+ continue
215
+
216
+ # Checking normal consistency
217
+ face_ref = np.roll(face[:type_cell[iface]], -np.where(face == i_v1)[0][0])
218
+ adj_face_ref = np.roll(adjface[:type_cell[iadj_f]], -np.where(adjface == i_v1)[0][0])
219
+
220
+ if face_ref[1] == i_v2:
221
+ i = 1
222
+ else:
223
+ i = -1
224
+
225
+ if adj_face_ref[i] == i_v2:
226
+ # Reversing normal
227
+ nb_reversed += 1
228
+ faces[iadj_f] = np.flipud(faces[iadj_f])
229
+
230
+ # Appending to the stack
231
+ stack.append(iadj_f)
232
+
233
+ LOG.debug("* Healing normals to make them consistent and if possible outward")
234
+ if nb_reversed > 0:
235
+ LOG.debug('\t--> %u faces have been reversed to make normals consistent across the mesh' % (nb_reversed))
236
+ else:
237
+ LOG.debug("\t--> Normals orientations are consistent")
238
+
239
+ mesh._faces = faces
240
+
241
+ # Checking if the normals are outward
242
+ if mesh_closed:
243
+ zmax = np.max(mesh._vertices[:, 2])
244
+
245
+ areas = mesh.faces_areas
246
+ normals = mesh.faces_normals
247
+ centers = mesh.faces_centers
248
+ # areas, normals, centers = get_all_faces_properties(vertices, faces)
249
+
250
+ hs = (np.array([(centers[:, 2] - zmax) * areas, ] * 3).T * normals).sum(axis=0)
251
+
252
+ tol = 1e-9
253
+ if np.fabs(hs[0]) > tol or np.fabs(hs[1]) > tol:
254
+ LOG.warning("\t--> the mesh does not seem watertight although marked as closed...")
255
+
256
+ if hs[2] < 0:
257
+ flipped = True
258
+ mesh.flip_normals()
259
+ else:
260
+ flipped = False
261
+
262
+ if flipped:
263
+ LOG.debug('\t--> Every normals have been reversed to be outward')
264
+
265
+ else:
266
+ LOG.debug("\t--> Mesh is not closed, Capytaine cannot test if the normals are outward")
267
+
268
+ return mesh
269
+
270
+
271
+ @inplace_transformation
272
+ def remove_unused_vertices(mesh):
273
+ """Removes unused vertices in the mesh in place.
274
+
275
+ Those are vertices that are not used by any face connectivity.
276
+ """
277
+ # TODO: implementer return_index !!
278
+ nv = mesh.nb_vertices
279
+ vertices, faces = mesh._vertices, mesh._faces
280
+
281
+ used_v = np.zeros(nv, dtype=bool)
282
+ used_v[sum(list(map(list, faces)), [])] = True
283
+ nb_used_v = sum(used_v)
284
+
285
+ if nb_used_v < nv:
286
+ new_id__v = np.arange(nv)
287
+ new_id__v[used_v] = np.arange(nb_used_v)
288
+ faces = new_id__v[faces]
289
+ vertices = vertices[used_v]
290
+
291
+ mesh._vertices, mesh._faces = vertices, faces
292
+
293
+ LOG.debug("* Removing unused vertices in the mesh:")
294
+ if nb_used_v < nv:
295
+ unused_v = np.where(np.logical_not(used_v))[0]
296
+ vlist_str = '[' + ', '.join(str(iV) for iV in unused_v) + ']'
297
+ LOG.debug("\t--> %u unused vertices have been removed" % (nv - nb_used_v))
298
+ else:
299
+ LOG.debug("\t--> No unused vertices")
300
+
301
+ return mesh
302
+
303
+
304
+ @inplace_transformation
305
+ def heal_triangles(mesh):
306
+ """Makes the triangle connectivity consistent (in place).
307
+
308
+ A general face is stored internally as a 4 integer array. It allows to describe indices of a quadrangle's vertices. For triangles, the first index should be equal to the last. This method ensures that this rule is applied everywhere and correct bad triangles description.
309
+ """
310
+ faces = mesh._faces
311
+
312
+ quads = faces[:, 0] != faces[:, -1]
313
+ nquads_init = sum(quads)
314
+
315
+ faces[quads] = np.roll(faces[quads], 1, axis=1)
316
+ quads = faces[:, 0] != faces[:, -1]
317
+
318
+ faces[quads] = np.roll(faces[quads], 1, axis=1)
319
+ quads = faces[:, 0] != faces[:, -1]
320
+
321
+ faces[quads] = np.roll(faces[quads], 1, axis=1)
322
+ quads = faces[:, 0] != faces[:, -1]
323
+ nquads_final = sum(quads)
324
+
325
+ mesh._faces = faces
326
+
327
+ LOG.debug("* Ensuring consistent definition of triangles:")
328
+ if nquads_final < nquads_init:
329
+ LOG.debug("\t--> %u triangles were described the wrong way and have been corrected" % (
330
+ nquads_init - nquads_final))
331
+ else:
332
+ LOG.debug("\t--> Triangle description is consistent")
333
+
334
+ return mesh
335
+
336
+
337
+ @inplace_transformation
338
+ def remove_degenerated_faces(mesh, rtol=1e-5):
339
+ """Removes tiny triangles from the mesh (in place).
340
+
341
+ Tiny triangles are those whose area is lower than the mean triangle area in the mesh times the relative
342
+ tolerance given.
343
+
344
+ Parameters
345
+ ----------
346
+ rtol : float, optional
347
+ Positive relative tolerance
348
+ """
349
+
350
+ assert 0 < rtol
351
+
352
+ # TODO: implementer un retour d'index des faces extraites
353
+ areas = mesh.faces_areas
354
+ area_threshold = areas.mean() * float(rtol)
355
+
356
+ # Detecting faces that have null area
357
+ faces = mesh._faces[np.logical_not(areas < area_threshold)]
358
+ nb_removed = mesh.nb_faces - faces.shape[0]
359
+ LOG.debug('* Removing degenerated faces')
360
+ if nb_removed > 0:
361
+ LOG.debug('\t-->%u degenerated faces have been removed' % nb_removed)
362
+ else:
363
+ LOG.debug('\t--> No degenerated faces')
364
+
365
+ mesh._faces = faces
366
+
367
+ return mesh
368
+
369
+
370
+ def print_quality(mesh):
371
+ """Returns data on the mesh quality.
372
+ Needs to be tested...
373
+
374
+ It uses VTK and is reproduced from
375
+ http://vtk.org/gitweb?p=VTK.git;a=blob;f=Filters/Verdict/Testing/Python/MeshQuality.py
376
+ """
377
+ # This function is reproduced from
378
+ # http://vtk.org/gitweb?p=VTK.git;a=blob;f=Filters/Verdict/Testing/Python/MeshQuality.py
379
+ polydata = mesh._vtk_polydata()
380
+ quality = vtk.vtkMeshQuality()
381
+ quality.SetInputData(polydata)
382
+
383
+ def DumpQualityStats(iq, arrayname):
384
+ an = iq.GetOutput().GetFieldData().GetArray(arrayname)
385
+ cardinality = an.GetComponent(0, 4)
386
+ range = list()
387
+ range.append(an.GetComponent(0, 0))
388
+ range.append(an.GetComponent(0, 2))
389
+ average = an.GetComponent(0, 1)
390
+ stdDev = math.sqrt(math.fabs(an.GetComponent(0, 3)))
391
+ outStr = '%s%g%s%g\n%s%g%s%g' % (
392
+ ' range: ', range[0], ' - ', range[1],
393
+ ' average: ', average, ' , standard deviation: ', stdDev)
394
+ return outStr
395
+
396
+ # Here we define the various mesh types and labels for output.
397
+ meshTypes = [
398
+ ['Triangle', 'Triangle',
399
+ [['QualityMeasureToArea', ' Area Ratio:'],
400
+ ['QualityMeasureToEdgeRatio', ' Edge Ratio:'],
401
+ ['QualityMeasureToAspectRatio', ' Aspect Ratio:'],
402
+ ['QualityMeasureToRadiusRatio', ' Radius Ratio:'],
403
+ ['QualityMeasureToAspectFrobenius', ' Frobenius Norm:'],
404
+ ['QualityMeasureToMinAngle', ' Minimal Angle:']
405
+ ]
406
+ ],
407
+
408
+ ['Quad', 'Quadrilateral',
409
+ [['QualityMeasureToArea', ' Area Ratio:'],
410
+ ['QualityMeasureToEdgeRatio', ' Edge Ratio:'],
411
+ ['QualityMeasureToAspectRatio', ' Aspect Ratio:'],
412
+ ['QualityMeasureToRadiusRatio', ' Radius Ratio:'],
413
+ ['QualityMeasureToMedAspectFrobenius',
414
+ ' Average Frobenius Norm:'],
415
+ ['QualityMeasureToMaxAspectFrobenius',
416
+ ' Maximal Frobenius Norm:'],
417
+ ['QualityMeasureToMinAngle', ' Minimal Angle:']
418
+ ]
419
+ ]
420
+ ]
421
+ res = ''
422
+ if polydata.GetNumberOfCells() > 0:
423
+ for meshType in meshTypes:
424
+ res += '\n%s%s' % (meshType[1], ' quality of the mesh ')
425
+ quality.Update()
426
+ an = quality.GetOutput().GetFieldData().GetArray('Mesh ' + meshType[1] + ' Quality')
427
+ cardinality = an.GetComponent(0, 4)
428
+
429
+ res = ''.join((res, '(%u elements):\n' % cardinality))
430
+
431
+ # res += '('+str(cardinality) +meshType[1]+'):\n'
432
+
433
+ for measure in meshType[2]:
434
+ eval('quality.Set' + meshType[0] + measure[0] + '()')
435
+ quality.Update()
436
+ res += '\n%s\n%s' % (
437
+ measure[1],
438
+ DumpQualityStats(quality, 'Mesh ' + meshType[1] + ' Quality')
439
+ )
440
+ res += '\n'
441
+
442
+ info = """\n\nDefinition of the different quality measures is given
443
+ in the verdict library manual :
444
+ http://www.vtk.org/Wiki/images/6/6b/VerdictManual-revA.pdf\n"""
445
+
446
+ res += info
447
+ print(res)
448
+ return
@@ -0,0 +1,63 @@
1
+ """Tools for surface integrals and hydrostatics."""
2
+ # Copyright (C) 2017-2022 Matthieu Ancellin
3
+ # See LICENSE file at <https://github.com/capytaine/capytaine>
4
+
5
+ from abc import ABC
6
+ import numpy as np
7
+
8
+ class SurfaceIntegralsMixin(ABC):
9
+ # These methods need to be defined for both Mesh and CollectionOfMeshes with the exact same definitions.
10
+ # To avoid redunduncy, they are defined here in a mixin inherited by both Mesh and CollectionOfMeshes.
11
+
12
+ def surface_integral(self, data, **kwargs):
13
+ """Returns integral of given data along wet surface area."""
14
+ return np.sum(data * self.faces_areas, **kwargs)
15
+
16
+ def waterplane_integral(self, data, **kwargs):
17
+ """Returns integral of given data along water plane area."""
18
+ return self.surface_integral(self.faces_normals[:,2] * data, **kwargs)
19
+
20
+ @property
21
+ def wet_surface_area(self):
22
+ """Returns wet surface area."""
23
+ return self.surface_integral(1)
24
+
25
+ @property
26
+ def volumes(self):
27
+ """Returns volumes using x, y, z components of the mesh."""
28
+ norm_coord = self.faces_normals * self.faces_centers
29
+ return self.surface_integral(norm_coord.T, axis=1)
30
+
31
+ @property
32
+ def volume(self):
33
+ """Returns volume of the mesh."""
34
+ return np.mean(self.volumes)
35
+
36
+ def disp_mass(self, *, rho=1000):
37
+ return rho * self.volume
38
+
39
+ @property
40
+ def center_of_buoyancy(self):
41
+ """Returns center of buoyancy of the mesh."""
42
+ coords_sq_norm = self.faces_normals * self.faces_centers**2
43
+ return self.surface_integral(coords_sq_norm.T, axis=1) / (2*self.volume)
44
+
45
+ @property
46
+ def waterplane_area(self):
47
+ """Returns water plane area of the mesh."""
48
+ waterplane_area = -self.waterplane_integral(1)
49
+ return waterplane_area
50
+
51
+ @property
52
+ def waterplane_center(self):
53
+ """Returns water plane center of the mesh.
54
+
55
+ Note: Returns None if the mesh is full submerged.
56
+ """
57
+ waterplane_area = self.waterplane_area
58
+ if abs(waterplane_area) < 1e-10:
59
+ return None
60
+ else:
61
+ waterplane_center = -self.waterplane_integral(
62
+ self.faces_centers.T, axis=1) / waterplane_area
63
+ return waterplane_center[:-1]