capytaine 2.3.1__cp314-cp314-macosx_14_0_arm64.whl → 3.0.0a1__cp314-cp314-macosx_14_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. capytaine/__about__.py +7 -2
  2. capytaine/__init__.py +8 -12
  3. capytaine/bem/engines.py +234 -354
  4. capytaine/bem/problems_and_results.py +14 -13
  5. capytaine/bem/solver.py +204 -80
  6. capytaine/bodies/bodies.py +278 -869
  7. capytaine/bodies/dofs.py +136 -9
  8. capytaine/bodies/hydrostatics.py +540 -0
  9. capytaine/bodies/multibodies.py +216 -0
  10. capytaine/green_functions/{libs/Delhommeau_float32.cpython-314-darwin.so → Delhommeau_float32.cpython-314-darwin.so} +0 -0
  11. capytaine/green_functions/{libs/Delhommeau_float64.cpython-314-darwin.so → Delhommeau_float64.cpython-314-darwin.so} +0 -0
  12. capytaine/green_functions/abstract_green_function.py +2 -2
  13. capytaine/green_functions/delhommeau.py +31 -16
  14. capytaine/green_functions/hams.py +19 -13
  15. capytaine/io/legacy.py +3 -103
  16. capytaine/io/xarray.py +11 -6
  17. capytaine/meshes/__init__.py +2 -6
  18. capytaine/meshes/abstract_meshes.py +375 -0
  19. capytaine/meshes/clean.py +302 -0
  20. capytaine/meshes/clip.py +347 -0
  21. capytaine/meshes/export.py +89 -0
  22. capytaine/meshes/geometry.py +244 -394
  23. capytaine/meshes/io.py +433 -0
  24. capytaine/meshes/meshes.py +617 -681
  25. capytaine/meshes/predefined/cylinders.py +22 -56
  26. capytaine/meshes/predefined/rectangles.py +26 -85
  27. capytaine/meshes/predefined/spheres.py +4 -11
  28. capytaine/meshes/quality.py +118 -407
  29. capytaine/meshes/surface_integrals.py +48 -29
  30. capytaine/meshes/symmetric_meshes.py +641 -0
  31. capytaine/meshes/visualization.py +353 -0
  32. capytaine/post_pro/free_surfaces.py +1 -4
  33. capytaine/post_pro/kochin.py +10 -10
  34. capytaine/tools/block_circulant_matrices.py +275 -0
  35. capytaine/tools/lists_of_points.py +2 -2
  36. capytaine/tools/memory_monitor.py +45 -0
  37. capytaine/tools/symbolic_multiplication.py +13 -1
  38. capytaine/tools/timer.py +58 -34
  39. {capytaine-2.3.1.dist-info → capytaine-3.0.0a1.dist-info}/METADATA +7 -2
  40. capytaine-3.0.0a1.dist-info/RECORD +65 -0
  41. capytaine/bodies/predefined/__init__.py +0 -6
  42. capytaine/bodies/predefined/cylinders.py +0 -151
  43. capytaine/bodies/predefined/rectangles.py +0 -111
  44. capytaine/bodies/predefined/spheres.py +0 -70
  45. capytaine/green_functions/FinGreen3D/.gitignore +0 -1
  46. capytaine/green_functions/FinGreen3D/FinGreen3D.f90 +0 -3589
  47. capytaine/green_functions/FinGreen3D/LICENSE +0 -165
  48. capytaine/green_functions/FinGreen3D/Makefile +0 -16
  49. capytaine/green_functions/FinGreen3D/README.md +0 -24
  50. capytaine/green_functions/FinGreen3D/test_program.f90 +0 -39
  51. capytaine/green_functions/LiangWuNoblesse/.gitignore +0 -1
  52. capytaine/green_functions/LiangWuNoblesse/LICENSE +0 -504
  53. capytaine/green_functions/LiangWuNoblesse/LiangWuNoblesseWaveTerm.f90 +0 -751
  54. capytaine/green_functions/LiangWuNoblesse/Makefile +0 -16
  55. capytaine/green_functions/LiangWuNoblesse/README.md +0 -2
  56. capytaine/green_functions/LiangWuNoblesse/test_program.f90 +0 -28
  57. capytaine/green_functions/libs/__init__.py +0 -0
  58. capytaine/io/mesh_loaders.py +0 -1086
  59. capytaine/io/mesh_writers.py +0 -692
  60. capytaine/io/meshio.py +0 -38
  61. capytaine/matrices/__init__.py +0 -16
  62. capytaine/matrices/block.py +0 -592
  63. capytaine/matrices/block_toeplitz.py +0 -325
  64. capytaine/matrices/builders.py +0 -89
  65. capytaine/matrices/linear_solvers.py +0 -232
  66. capytaine/matrices/low_rank.py +0 -395
  67. capytaine/meshes/clipper.py +0 -465
  68. capytaine/meshes/collections.py +0 -342
  69. capytaine/meshes/mesh_like_protocol.py +0 -37
  70. capytaine/meshes/properties.py +0 -276
  71. capytaine/meshes/quadratures.py +0 -80
  72. capytaine/meshes/symmetric.py +0 -462
  73. capytaine/tools/lru_cache.py +0 -49
  74. capytaine/ui/vtk/__init__.py +0 -3
  75. capytaine/ui/vtk/animation.py +0 -329
  76. capytaine/ui/vtk/body_viewer.py +0 -28
  77. capytaine/ui/vtk/helpers.py +0 -82
  78. capytaine/ui/vtk/mesh_viewer.py +0 -461
  79. capytaine-2.3.1.dist-info/RECORD +0 -92
  80. {capytaine-2.3.1.dist-info → capytaine-3.0.0a1.dist-info}/LICENSE +0 -0
  81. {capytaine-2.3.1.dist-info → capytaine-3.0.0a1.dist-info}/WHEEL +0 -0
  82. {capytaine-2.3.1.dist-info → capytaine-3.0.0a1.dist-info}/entry_points.txt +0 -0
@@ -9,8 +9,8 @@ import pandas as pd
9
9
  from scipy.optimize import newton
10
10
 
11
11
  from capytaine.tools.deprecation_handling import _get_water_depth
12
- from capytaine.meshes.collections import CollectionOfMeshes
13
12
  from capytaine.bem.airy_waves import airy_waves_velocity, froude_krylov_force
13
+ from capytaine.bodies.dofs import AbstractDof
14
14
  from capytaine.tools.symbolic_multiplication import SymbolicMultiplication
15
15
 
16
16
  LOG = logging.getLogger(__name__)
@@ -195,8 +195,7 @@ class LinearPotentialFlowProblem:
195
195
 
196
196
 
197
197
  if self.body is not None:
198
- if ((isinstance(self.body.mesh, CollectionOfMeshes) and len(self.body.mesh) == 0)
199
- or len(self.body.mesh.faces) == 0):
198
+ if self.body.mesh.nb_faces == 0:
200
199
  raise ValueError(f"The mesh of the body {self.body.__short_str__()} is empty.")
201
200
 
202
201
  self.body._check_dofs_shape_consistency()
@@ -422,6 +421,10 @@ class RadiationProblem(LinearPotentialFlowProblem):
422
421
  f"The dofs of the body are {list(self.body.dofs.keys())}")
423
422
 
424
423
  dof = self.body.dofs[self.radiating_dof]
424
+ if isinstance(dof, AbstractDof):
425
+ dof_motion = dof.evaluate_motion(self.body.mesh)
426
+ else:
427
+ dof_motion = dof
425
428
 
426
429
  self.boundary_condition = self.encounter_omega * np.zeros(
427
430
  shape=(self.body.mesh_including_lid.nb_faces,),
@@ -431,20 +434,18 @@ class RadiationProblem(LinearPotentialFlowProblem):
431
434
  # is implemented with the correct type (for zero and infinite frequencies), it does not affect the value.
432
435
  # Below the value is update on the hull. It remains zero on the lid.
433
436
 
434
- displacement_on_face = np.sum(dof * self.body.mesh.faces_normals, axis=1) # This is a dot product on each face
437
+ displacement_on_face = np.sum(dof_motion * self.body.mesh.faces_normals, axis=1) # This is a dot product on each face
435
438
  self.boundary_condition[self.body.hull_mask] = -1j * self.encounter_omega * displacement_on_face
436
439
 
437
440
  if self.forward_speed != 0.0:
438
- if self.radiating_dof.lower() == "pitch":
439
- ddofdx_dot_n = np.array([nz for (nx, ny, nz) in self.body.mesh.faces_normals])
440
- elif self.radiating_dof.lower() == "yaw":
441
- ddofdx_dot_n = np.array([-ny for (nx, ny, nz) in self.body.mesh.faces_normals])
442
- elif self.radiating_dof.lower() in {"surge", "sway", "heave", "roll"}:
443
- ddofdx_dot_n = 0.0
444
- else:
441
+ # Adding the "m-terms":
442
+ try:
443
+ ddofdx = dof.evaluate_gradient_of_motion(self.body.mesh)[:, :, 0]
444
+ ddofdx_dot_n = np.sum(ddofdx * self.body.mesh.faces_normals)
445
+ except AttributeError:
445
446
  raise NotImplementedError(
446
- "Radiation problem with forward speed is currently only implemented for a single rigid body.\n"
447
- "Only radiating dofs with name in {'Surge', 'Sway', 'Heave', 'Roll', 'Pitch', 'Yaw'} are supported.\n"
447
+ "Radiation problem with forward speed is currently only implemented for rigid bodies.\n"
448
+ "Only radiating dofs instantiating a TranslationDof or a RotationDof are supported.\n"
448
449
  f"Got instead `radiating_dof={self.radiating_dof}`"
449
450
  )
450
451
 
capytaine/bem/solver.py CHANGED
@@ -5,24 +5,26 @@
5
5
  .. code-block:: python
6
6
 
7
7
  problem = RadiationProblem(...)
8
- result = BEMSolver(green_functions=..., engine=...).solve(problem)
8
+ result = BEMSolver(engine=..., method=...).solve(problem)
9
9
 
10
10
  """
11
11
 
12
12
  import os
13
+ import shutil
14
+ import textwrap
13
15
  import logging
14
16
 
15
17
  import numpy as np
16
- import pandas as pd
17
18
 
18
19
  from datetime import datetime
20
+ from collections import defaultdict
19
21
 
20
22
  from rich.progress import track
21
23
 
22
24
  from capytaine.bem.problems_and_results import LinearPotentialFlowProblem, DiffractionProblem
23
- from capytaine.green_functions.delhommeau import Delhommeau
24
25
  from capytaine.bem.engines import BasicMatrixEngine
25
26
  from capytaine.io.xarray import problems_from_dataset, assemble_dataset, kochin_data_array
27
+ from capytaine.tools.memory_monitor import MemoryMonitor
26
28
  from capytaine.tools.optional_imports import silently_import_optional_dependency
27
29
  from capytaine.tools.lists_of_points import _normalize_points, _normalize_free_surface_points
28
30
  from capytaine.tools.symbolic_multiplication import supporting_symbolic_multiplication
@@ -30,15 +32,20 @@ from capytaine.tools.timer import Timer
30
32
 
31
33
  LOG = logging.getLogger(__name__)
32
34
 
35
+ # Mapping between a dtype and its complex version
36
+ COMPLEX_DTYPE = {
37
+ np.float32: np.complex64,
38
+ np.float64: np.complex128,
39
+ np.complex64 : np.complex64,
40
+ np.complex128 : np.complex128
41
+ }
42
+
33
43
  class BEMSolver:
34
44
  """
35
45
  Solver for linear potential flow problems.
36
46
 
37
47
  Parameters
38
48
  ----------
39
- green_function: AbstractGreenFunction, optional
40
- Object handling the computation of the Green function.
41
- (default: :class:`~capytaine.green_function.delhommeau.Delhommeau`)
42
49
  engine: MatrixEngine, optional
43
50
  Object handling the building of matrices and the resolution of linear systems with these matrices.
44
51
  (default: :class:`~capytaine.bem.engines.BasicMatrixEngine`)
@@ -46,50 +53,72 @@ class BEMSolver:
46
53
  select boundary integral equation used to solve the problems.
47
54
  Accepted values: "indirect" (as in e.g. Nemoh), "direct" (as in e.g. WAMIT)
48
55
  Default value: "indirect"
56
+ green_function: AbstractGreenFunction, optional
57
+ For convenience and backward compatibility, the Green function can be
58
+ set here if the engine is the default one.
59
+ This argument is just passed to the default engine at initialization.
49
60
 
50
61
  Attributes
51
62
  ----------
52
- timer: dict[str, Timer]
63
+ timer: Timer
53
64
  Storing the time spent on each subtasks of the resolution
54
65
  exportable_settings : dict
55
66
  Settings of the solver that can be saved to reinit the same solver later.
56
67
  """
57
68
 
58
69
  def __init__(self, *, green_function=None, engine=None, method="indirect"):
59
- self.green_function = Delhommeau() if green_function is None else green_function
60
- self.engine = BasicMatrixEngine() if engine is None else engine
70
+
71
+ if engine is None:
72
+ self.engine = BasicMatrixEngine(green_function=green_function)
73
+ else:
74
+ if green_function is not None:
75
+ raise ValueError("If you are not using the default engine, set the Green function in the engine.\n"
76
+ "Setting the Green function in the solver is only a shortcut to set up "
77
+ "the Green function of the default engine since Capytaine version 3.0")
78
+ self.engine = engine
61
79
 
62
80
  if method.lower() not in {"direct", "indirect"}:
63
81
  raise ValueError(f"Unrecognized method when initializing solver: {repr(method)}. Expected \"direct\" or \"indirect\".")
64
82
  self.method = method.lower()
65
83
 
66
- self.timer = {"Solve total": Timer(), " Green function": Timer(), " Linear solver": Timer()}
67
-
68
- self.solve = self.timer["Solve total"].wraps_function(self.solve)
84
+ self.reset_timer()
69
85
 
70
- try:
71
- self.exportable_settings = {
72
- **self.green_function.exportable_settings,
73
- **self.engine.exportable_settings,
74
- "method": self.method,
75
- }
76
- except AttributeError:
77
- self.exportable_settings = {}
86
+ self.exportable_settings = {
87
+ **self.engine.exportable_settings,
88
+ "method": self.method,
89
+ }
78
90
 
79
91
  def __str__(self):
80
- return f"BEMSolver(engine={self.engine}, green_function={self.green_function})"
92
+ return f"BEMSolver(engine={self.engine}, method={self.method})"
81
93
 
82
94
  def __repr__(self):
83
95
  return self.__str__()
84
96
 
97
+ def reset_timer(self):
98
+ self.timer = Timer(default_tags={"process": 0})
99
+ self.solve = self.timer.wraps_function(step="Total solve function")(self._solve)
100
+
85
101
  def timer_summary(self):
86
- return pd.DataFrame([
87
- {
88
- "task": name,
89
- "total": self.timer[name].total,
90
- "nb_calls": self.timer[name].nb_timings,
91
- "mean": self.timer[name].mean
92
- } for name in self.timer]).set_index("task")
102
+ df = self.timer.as_dataframe()
103
+ df["step"] = df["step"].where(
104
+ df["step"].str.startswith("Total"), " " + df["step"]
105
+ )
106
+ total = (
107
+ df.groupby(["step", "process"])
108
+ ["timing"].sum()
109
+ .unstack()
110
+ )
111
+ return total
112
+
113
+ def displayed_total_summary(self, width=None):
114
+ total = self.timer_summary()
115
+ if width is None:
116
+ width = shutil.get_terminal_size(fallback=(80, 20)).columns - 25
117
+ total_str = total.to_string(
118
+ float_format="{:.2f}".format,
119
+ line_width=width,
120
+ )
121
+ return textwrap.indent(total_str, " ")
93
122
 
94
123
  def _repr_pretty_(self, p, cycle):
95
124
  p.text(self.__str__())
@@ -98,7 +127,7 @@ class BEMSolver:
98
127
  def from_exported_settings(settings):
99
128
  raise NotImplementedError
100
129
 
101
- def solve(self, problem, method=None, keep_details=True, _check_wavelength=True):
130
+ def _solve(self, problem, method=None, keep_details=True, _check_wavelength=True):
102
131
  """Solve the linear potential flow problem.
103
132
 
104
133
  Parameters
@@ -141,6 +170,7 @@ class BEMSolver:
141
170
  omega, wavenumber = problem.encounter_omega, problem.encounter_wavenumber
142
171
  else:
143
172
  omega, wavenumber = problem.omega, problem.wavenumber
173
+ gf_params = dict(free_surface=problem.free_surface, water_depth=problem.water_depth, wavenumber=wavenumber)
144
174
 
145
175
  linear_solver = supporting_symbolic_multiplication(self.engine.linear_solver)
146
176
  method = method if method is not None else self.method
@@ -148,34 +178,29 @@ class BEMSolver:
148
178
  if problem.forward_speed != 0.0:
149
179
  raise NotImplementedError("Direct solver is not able to solve problems with forward speed.")
150
180
 
151
- with self.timer[" Green function"]:
181
+ with self.timer(step="Green function"):
152
182
  S, D = self.engine.build_matrices(
153
183
  problem.body.mesh_including_lid, problem.body.mesh_including_lid,
154
- problem.free_surface, problem.water_depth, wavenumber,
155
- self.green_function, adjoint_double_layer=False
184
+ **gf_params, adjoint_double_layer=False, diagonal_term_in_double_layer=True,
156
185
  )
157
- rhs = S @ problem.boundary_condition
158
- with self.timer[" Linear solver"]:
186
+ with self.timer(step="Matrix-vector product"):
187
+ rhs = S @ problem.boundary_condition
188
+ with self.timer(step="Linear solver"):
189
+ rhs = rhs.astype(COMPLEX_DTYPE[D.dtype.type])
159
190
  potential = linear_solver(D, rhs)
160
- if not potential.shape == problem.boundary_condition.shape:
161
- raise ValueError(f"Error in linear solver of {self.engine}: the shape of the output ({potential.shape}) "
162
- f"does not match the expected shape ({problem.boundary_condition.shape})")
163
191
  pressure = 1j * omega * problem.rho * potential
164
192
  sources = None
165
193
  else:
166
- with self.timer[" Green function"]:
194
+ with self.timer(step="Green function"):
167
195
  S, K = self.engine.build_matrices(
168
196
  problem.body.mesh_including_lid, problem.body.mesh_including_lid,
169
- problem.free_surface, problem.water_depth, wavenumber,
170
- self.green_function, adjoint_double_layer=True
197
+ **gf_params, adjoint_double_layer=True, diagonal_term_in_double_layer=True,
171
198
  )
172
-
173
- with self.timer[" Linear solver"]:
174
- sources = linear_solver(K, problem.boundary_condition)
175
- if not sources.shape == problem.boundary_condition.shape:
176
- raise ValueError(f"Error in linear solver of {self.engine}: the shape of the output ({sources.shape}) "
177
- f"does not match the expected shape ({problem.boundary_condition.shape})")
178
- potential = S @ sources
199
+ with self.timer(step="Linear solver"):
200
+ rhs = problem.boundary_condition.astype(COMPLEX_DTYPE[K.dtype.type])
201
+ sources = linear_solver(K, rhs)
202
+ with self.timer(step="Matrix-vector product"):
203
+ potential = S @ sources
179
204
  pressure = 1j * omega * problem.rho * potential
180
205
  if problem.forward_speed != 0.0:
181
206
  result = problem.make_results_container(sources=sources)
@@ -185,7 +210,6 @@ class BEMSolver:
185
210
 
186
211
  pressure_on_hull = pressure[problem.body.hull_mask] # Discards pressure on lid if any
187
212
  forces = problem.body.integrate_pressure(pressure_on_hull)
188
-
189
213
  if not keep_details:
190
214
  result = problem.make_results_container(forces)
191
215
  else:
@@ -195,17 +219,40 @@ class BEMSolver:
195
219
 
196
220
  return result
197
221
 
198
- def _solve_and_catch_errors(self, problem, *args, **kwargs):
222
+ def _solve_and_catch_errors(self, problem, *args, _display_errors, **kwargs):
199
223
  """Same as BEMSolver.solve() but returns a
200
224
  FailedLinearPotentialFlowResult when the resolution failed."""
201
225
  try:
202
226
  res = self.solve(problem, *args, **kwargs)
203
227
  except Exception as e:
204
- LOG.info(f"Skipped {problem}\nbecause of {repr(e)}")
205
228
  res = problem.make_failed_results_container(e)
229
+ if _display_errors:
230
+ self._display_errors([res])
206
231
  return res
207
232
 
208
- def solve_all(self, problems, *, method=None, n_jobs=1, progress_bar=None, _check_wavelength=True, **kwargs):
233
+ @staticmethod
234
+ def _display_errors(results):
235
+ """Displays errors that occur during the solver execution and groups them according
236
+ to the problem type and exception type for easier reading."""
237
+ failed_results = defaultdict(list)
238
+ for res in results:
239
+ if hasattr(res, "exception") and hasattr(res, "problem"):
240
+ key = (type(res.exception), str(res.exception), res.problem.omega, res.problem.water_depth, res.problem.forward_speed)
241
+ failed_results[key].append(res.problem)
242
+
243
+ for (exc_type, exc_msg, omega, water_depth, forward_speed), problems in failed_results.items():
244
+ nb = len(problems)
245
+ if nb > 1:
246
+ if forward_speed != 0.0:
247
+ LOG.warning("Skipped %d problems for body=%s, omega=%s, water_depth=%s, forward_speed=%s\nbecause of %s(%r)",
248
+ nb, problems[0].body.__short_str__(), omega, water_depth, forward_speed, exc_type.__name__, exc_msg)
249
+ else:
250
+ LOG.warning("Skipped %d problems for body=%s, omega=%s, water_depth=%s\nbecause of %s(%r)",
251
+ nb, problems[0].body.__short_str__(), omega, water_depth, exc_type.__name__, exc_msg)
252
+ else:
253
+ LOG.warning("Skipped %s\nbecause of %s(%r)", problems[0], exc_type.__name__, exc_msg)
254
+
255
+ def solve_all(self, problems, *, method=None, n_jobs=1, n_threads=None, progress_bar=None, _check_wavelength=True, _display_errors=True, **kwargs):
209
256
  """Solve several problems.
210
257
  Optional keyword arguments are passed to `BEMSolver.solve`.
211
258
 
@@ -218,8 +265,14 @@ class BEMSolver:
218
265
  It is recommended to set the method more globally when initializing the solver.
219
266
  If provided here, the value in argument of `solve_all` overrides the global one.
220
267
  n_jobs: int, optional (default: 1)
221
- the number of jobs to run in parallel using the optional dependency `joblib`
268
+ the number of jobs to run in parallel using the optional dependency ``joblib``
222
269
  By defaults: do not use joblib and solve sequentially.
270
+ n_threads: int, optional
271
+ the number of threads used to solve each problem.
272
+ The total number of used CPU will be n_jobs×n_threads.
273
+ By default: use as much as possible.
274
+ Requires the optional dependency ``threadpoolctl`` if ``n_jobs==1``.
275
+ Also controlled by the environment variables ``OMP_NUM_THREADS`` and ``MKL_NUM_THREADS``.
223
276
  progress_bar: bool, optional
224
277
  Display a progress bar while solving.
225
278
  If no value is provided to this method directly,
@@ -238,6 +291,8 @@ class BEMSolver:
238
291
  self._check_wavelength_and_mesh_resolution(problems)
239
292
  self._check_wavelength_and_irregular_frequencies(problems)
240
293
 
294
+ self._check_ram(problems, n_jobs)
295
+
241
296
  if progress_bar is None:
242
297
  if "CAPYTAINE_PROGRESS_BAR" in os.environ:
243
298
  env_var = os.environ["CAPYTAINE_PROGRESS_BAR"].lower()
@@ -250,26 +305,68 @@ class BEMSolver:
250
305
  else:
251
306
  progress_bar = True
252
307
 
308
+ monitor = MemoryMonitor()
253
309
  if n_jobs == 1: # force sequential resolution
254
310
  problems = sorted(problems)
255
311
  if progress_bar:
256
312
  problems = track(problems, total=len(problems), description="Solving BEM problems")
257
- results = [self._solve_and_catch_errors(pb, method=method, _check_wavelength=False, **kwargs) for pb in problems]
313
+ if n_threads is None:
314
+ results = [self._solve_and_catch_errors(pb, method=method, _display_errors=_display_errors, _check_wavelength=False, **kwargs) for pb in problems]
315
+ else:
316
+ threadpoolctl = silently_import_optional_dependency("threadpoolctl")
317
+ if threadpoolctl is None:
318
+ raise ImportError(f"Setting the `n_threads` argument to {n_threads} with `n_jobs=1` requires the missing optional dependency 'threadpoolctl'.")
319
+ with threadpoolctl.threadpool_limits(limits=n_threads):
320
+ results = [self._solve_and_catch_errors(pb, method=method, _display_errors=_display_errors, _check_wavelength=False, **kwargs) for pb in problems]
258
321
  else:
259
322
  joblib = silently_import_optional_dependency("joblib")
260
323
  if joblib is None:
261
324
  raise ImportError(f"Setting the `n_jobs` argument to {n_jobs} requires the missing optional dependency 'joblib'.")
262
325
  groups_of_problems = LinearPotentialFlowProblem._group_for_parallel_resolution(problems)
263
- parallel = joblib.Parallel(return_as="generator", n_jobs=n_jobs)
264
- groups_of_results = parallel(joblib.delayed(self.solve_all)(grp, method=method, n_jobs=1, progress_bar=False, _check_wavelength=False, **kwargs) for grp in groups_of_problems)
326
+ with joblib.parallel_config(backend='loky', inner_max_num_threads=n_threads):
327
+ parallel = joblib.Parallel(return_as="generator", n_jobs=n_jobs)
328
+ groups_of_results = parallel(joblib.delayed(self._solve_all_and_return_timer)(grp, method=method, n_threads=None, progress_bar=False, _display_errors=False, _check_wavelength=False, **kwargs) for grp in groups_of_problems)
265
329
  if progress_bar:
266
330
  groups_of_results = track(groups_of_results,
267
331
  total=len(groups_of_problems),
268
- description=f"Solving BEM problems with {n_jobs} threads:")
269
- results = [res for grp in groups_of_results for res in grp] # flatten the nested list
270
- LOG.info("Solver timer summary:\n%s", self.timer_summary())
332
+ description=f"Solving BEM problems with {n_jobs} processes:")
333
+ results = []
334
+ process_id_mapping = {}
335
+ for grp_results, other_timer, process_id in groups_of_results:
336
+ results.extend(grp_results)
337
+ self._display_errors(grp_results)
338
+ if process_id not in process_id_mapping:
339
+ process_id_mapping[process_id] = len(process_id_mapping) + 1
340
+ self.timer.add_data_from_other_timer(other_timer, process=process_id_mapping[process_id])
341
+ memory_peak = monitor.get_memory_peak()
342
+ if memory_peak is None:
343
+ LOG.info("Actual peak RAM usage: Not measured since optional dependency `psutil` cannot be found.")
344
+ else:
345
+ LOG.info(f"Actual peak RAM usage: {memory_peak} GB.")
346
+ LOG.info("Solver timer summary (in seconds):\n%s", self.displayed_total_summary())
271
347
  return results
272
348
 
349
+ def _solve_all_and_return_timer(
350
+ self, grp, *,
351
+ method, n_threads,
352
+ progress_bar, _check_wavelength, **kwargs
353
+ ):
354
+ # This method is only called in joblib's Parallel loop.
355
+ # It contains some pre-processing and post-processing that
356
+ # should be done in each process when solving the batch of problems.
357
+
358
+ self.reset_timer()
359
+ # Timer data will be concatenated to the Timer of process 0.
360
+ # We reset the timer at each call to avoid concatenating
361
+ # the same data twice in the main process.
362
+
363
+ results = self.solve_all(
364
+ grp, method=method, n_jobs=1,
365
+ n_threads=n_threads, progress_bar=progress_bar,
366
+ _check_wavelength=_check_wavelength, **kwargs
367
+ )
368
+ return results, self.timer, os.getpid()
369
+
273
370
  @staticmethod
274
371
  def _check_wavelength_and_mesh_resolution(problems):
275
372
  """Display a warning if some of the problems have a mesh resolution
@@ -329,14 +426,37 @@ class BEMSolver:
329
426
  + recommendation
330
427
  )
331
428
 
332
- def fill_dataset(self, dataset, bodies, *, method=None, n_jobs=1, _check_wavelength=True, progress_bar=None, **kwargs):
429
+ def _check_ram(self,problems, n_jobs = 1):
430
+ """Display a warning if the RAM estimation is larger than a certain limit."""
431
+ LOG.debug("Check RAM estimation.")
432
+ psutil = silently_import_optional_dependency("psutil")
433
+ if psutil is None:
434
+ ram_limit = 8
435
+ else :
436
+ ram_limit = psutil.virtual_memory().total / (1024**3) * 0.3
437
+
438
+ if n_jobs == - 1:
439
+ n_jobs = os.cpu_count()
440
+
441
+ estimated_peak_memory = n_jobs*max(self.engine.compute_ram_estimation(pb) for pb in problems)
442
+
443
+ if estimated_peak_memory < 0.5:
444
+ LOG.info("Estimated peak RAM usage: <1 GB.")
445
+
446
+ elif estimated_peak_memory < ram_limit:
447
+ LOG.info(f"Estimated peak RAM usage: {int(np.ceil(estimated_peak_memory))} GB.")
448
+
449
+ else:
450
+ LOG.warning(f"Estimated peak RAM usage: {int(np.ceil(estimated_peak_memory))} GB.")
451
+
452
+ def fill_dataset(self, dataset, bodies, *, method=None, n_jobs=1, n_threads=None, _check_wavelength=True, progress_bar=None, **kwargs):
333
453
  """Solve a set of problems defined by the coordinates of an xarray dataset.
334
454
 
335
455
  Parameters
336
456
  ----------
337
457
  dataset : xarray Dataset
338
458
  dataset containing the problems parameters: frequency, radiating_dof, water_depth, ...
339
- bodies : FloatingBody or list of FloatingBody
459
+ bodies : FloatingBody or Multibody or list of FloatingBody or list of Multibody
340
460
  The body or bodies involved in the problems
341
461
  They should all have different names.
342
462
  method: string, optional
@@ -344,8 +464,14 @@ class BEMSolver:
344
464
  It is recommended to set the method more globally when initializing the solver.
345
465
  If provided here, the value in argument of `fill_dataset` overrides the global one.
346
466
  n_jobs: int, optional (default: 1)
347
- the number of jobs to run in parallel using the optional dependency `joblib`
467
+ the number of jobs to run in parallel using the optional dependency ``joblib``.
348
468
  By defaults: do not use joblib and solve sequentially.
469
+ n_threads: int, optional
470
+ the number of threads used to solve each problem.
471
+ The total number of used CPU will be n_jobs×n_threads.
472
+ By default: use as much as possible.
473
+ Requires the optional dependency ``threadpoolctl`` if ``n_jobs==1``.
474
+ Also controlled by the environment variables ``OMP_NUM_THREADS`` and ``MKL_NUM_THREADS``.
349
475
  progress_bar: bool, optional
350
476
  Display a progress bar while solving.
351
477
  If no value is provided to this method directly,
@@ -365,12 +491,12 @@ class BEMSolver:
365
491
  attrs["method"] = method
366
492
  problems = problems_from_dataset(dataset, bodies)
367
493
  if 'theta' in dataset.coords:
368
- results = self.solve_all(problems, keep_details=True, method=method, n_jobs=n_jobs, _check_wavelength=_check_wavelength, progress_bar=progress_bar)
494
+ results = self.solve_all(problems, keep_details=True, method=method, n_jobs=n_jobs, n_threads=n_threads, _check_wavelength=_check_wavelength, progress_bar=progress_bar)
369
495
  kochin = kochin_data_array(results, dataset.coords['theta'])
370
496
  dataset = assemble_dataset(results, attrs=attrs, **kwargs)
371
497
  dataset.update(kochin)
372
498
  else:
373
- results = self.solve_all(problems, keep_details=False, method=method, n_jobs=n_jobs, _check_wavelength=_check_wavelength, progress_bar=progress_bar)
499
+ results = self.solve_all(problems, keep_details=False, method=method, n_jobs=n_jobs, n_threads=n_threads, _check_wavelength=_check_wavelength, progress_bar=progress_bar)
374
500
  dataset = assemble_dataset(results, attrs=attrs, **kwargs)
375
501
  return dataset
376
502
 
@@ -394,29 +520,32 @@ class BEMSolver:
394
520
  ------
395
521
  Exception: if the :code:`LinearPotentialFlowResult` object given as input does not contain the source distribution.
396
522
  """
397
- points, output_shape = _normalize_points(points, keep_mesh=True)
523
+ gf_params = dict(free_surface=result.free_surface, water_depth=result.water_depth, wavenumber=result.encounter_wavenumber)
524
+
525
+ points, output_shape = _normalize_points(points)
398
526
  if result.sources is None:
399
527
  raise Exception(f"""The values of the sources of {result} cannot been found.
400
528
  They probably have not been stored by the solver because the option keep_details=True have not been set or the direct method has been used.
401
529
  Please re-run the resolution with the indirect method and keep_details=True.""")
402
530
 
403
- with self.timer[" Green function"]:
404
- S, _ = self.green_function.evaluate(points, result.body.mesh_including_lid, result.free_surface, result.water_depth, result.encounter_wavenumber)
405
- potential = S @ result.sources # Sum the contributions of all panels in the mesh
531
+ with self.timer(step="Post-processing potential"):
532
+ S = self.engine.build_S_matrix(points, result.body.mesh_including_lid, **gf_params)
533
+ potential = S @ result.sources # Sum the contributions of all panels in the mesh
406
534
  return potential.reshape(output_shape)
407
535
 
408
536
  def _compute_potential_gradient(self, points, result):
409
537
  points, output_shape = _normalize_points(points, keep_mesh=True)
538
+ # keep_mesh, because we need the normal vectors associated with each collocation points to compute the fullK matrix
410
539
 
411
540
  if result.sources is None:
412
541
  raise Exception(f"""The values of the sources of {result} cannot been found.
413
542
  They probably have not been stored by the solver because the option keep_details=True have not been set.
414
543
  Please re-run the resolution with this option.""")
415
544
 
416
- with self.timer[" Green function"]:
417
- _, gradG = self.green_function.evaluate(points, result.body.mesh_including_lid, result.free_surface, result.water_depth, result.encounter_wavenumber,
418
- early_dot_product=False)
419
- velocities = np.einsum('ijk,j->ik', gradG, result.sources) # Sum the contributions of all panels in the mesh
545
+ gf_params = dict(free_surface=result.free_surface, water_depth=result.water_depth, wavenumber=result.encounter_wavenumber)
546
+ with self.timer(step="Post-processing velocity"):
547
+ gradG = self.engine.build_fullK_matrix(points, result.body.mesh_including_lid, **gf_params)
548
+ velocities = np.einsum('ijk,j->ik', gradG, result.sources) # Sum the contributions of all panels in the mesh
420
549
  return velocities.reshape((*output_shape, 3))
421
550
 
422
551
  def compute_velocity(self, points, result):
@@ -490,7 +619,7 @@ class BEMSolver:
490
619
  ------
491
620
  Exception: if the :code:`LinearPotentialFlowResult` object given as input does not contain the source distribution.
492
621
  """
493
- points, output_shape = _normalize_free_surface_points(points, keep_mesh=True)
622
+ points, output_shape = _normalize_free_surface_points(points)
494
623
 
495
624
  if result.forward_speed != 0:
496
625
  fs_elevation = -1/result.g * (-1j*result.encounter_omega) * self.compute_potential(points, result)
@@ -537,13 +666,9 @@ class BEMSolver:
537
666
  They probably have not been stored by the solver because the option keep_details=True have not been set or the direct method has been used.
538
667
  Please re-run the resolution with the indirect method and keep_details=True.""")
539
668
 
669
+ gf_params = dict(free_surface=result.free_surface, water_depth=result.water_depth, wavenumber=result.encounter_wavenumber)
540
670
  if chunk_size > mesh.nb_faces:
541
- S = self.engine.build_S_matrix(
542
- mesh,
543
- result.body.mesh_including_lid,
544
- result.free_surface, result.water_depth, result.wavenumber,
545
- self.green_function
546
- )
671
+ S = self.engine.build_S_matrix(mesh, result.body.mesh_including_lid, **gf_params)
547
672
  phi = S @ result.sources
548
673
 
549
674
  else:
@@ -553,8 +678,7 @@ class BEMSolver:
553
678
  S = self.engine.build_S_matrix(
554
679
  mesh.extract_faces(faces_to_extract),
555
680
  result.body.mesh_including_lid,
556
- result.free_surface, result.water_depth, result.wavenumber,
557
- self.green_function
681
+ **gf_params
558
682
  )
559
683
  phi[i:i+chunk_size] = S @ result.sources
560
684