capytaine 2.2__cp39-cp39-win_amd64.whl → 2.3__cp39-cp39-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. capytaine/__about__.py +1 -1
  2. capytaine/__init__.py +12 -8
  3. capytaine/bem/airy_waves.py +7 -2
  4. capytaine/bem/engines.py +2 -2
  5. capytaine/bem/problems_and_results.py +82 -35
  6. capytaine/bem/solver.py +138 -41
  7. capytaine/bodies/bodies.py +40 -12
  8. capytaine/bodies/predefined/rectangles.py +2 -0
  9. capytaine/green_functions/FinGreen3D/.gitignore +1 -0
  10. capytaine/green_functions/FinGreen3D/FinGreen3D.f90 +3589 -0
  11. capytaine/green_functions/FinGreen3D/LICENSE +165 -0
  12. capytaine/green_functions/FinGreen3D/Makefile +16 -0
  13. capytaine/green_functions/FinGreen3D/README.md +24 -0
  14. capytaine/green_functions/FinGreen3D/test_program.f90 +39 -0
  15. capytaine/green_functions/LiangWuNoblesse/.gitignore +1 -0
  16. capytaine/green_functions/LiangWuNoblesse/LICENSE +504 -0
  17. capytaine/green_functions/LiangWuNoblesse/LiangWuNoblesseWaveTerm.f90 +751 -0
  18. capytaine/green_functions/LiangWuNoblesse/Makefile +18 -0
  19. capytaine/green_functions/LiangWuNoblesse/README.md +2 -0
  20. capytaine/green_functions/LiangWuNoblesse/test_program.f90 +28 -0
  21. capytaine/green_functions/abstract_green_function.py +55 -3
  22. capytaine/green_functions/delhommeau.py +186 -115
  23. capytaine/green_functions/hams.py +204 -0
  24. capytaine/green_functions/libs/Delhommeau_float32.cp39-win_amd64.dll.a +0 -0
  25. capytaine/green_functions/libs/Delhommeau_float32.cp39-win_amd64.pyd +0 -0
  26. capytaine/green_functions/libs/Delhommeau_float64.cp39-win_amd64.dll.a +0 -0
  27. capytaine/green_functions/libs/Delhommeau_float64.cp39-win_amd64.pyd +0 -0
  28. capytaine/io/bemio.py +14 -2
  29. capytaine/io/mesh_loaders.py +2 -1
  30. capytaine/io/wamit.py +479 -0
  31. capytaine/io/xarray.py +252 -100
  32. capytaine/matrices/block.py +4 -2
  33. capytaine/matrices/linear_solvers.py +1 -1
  34. capytaine/matrices/low_rank.py +3 -1
  35. capytaine/meshes/clipper.py +4 -3
  36. capytaine/meshes/collections.py +11 -1
  37. capytaine/meshes/mesh_like_protocol.py +37 -0
  38. capytaine/meshes/meshes.py +22 -9
  39. capytaine/meshes/properties.py +58 -24
  40. capytaine/meshes/symmetric.py +11 -2
  41. capytaine/post_pro/kochin.py +4 -4
  42. capytaine/tools/lists_of_points.py +3 -3
  43. capytaine/tools/prony_decomposition.py +60 -4
  44. capytaine/tools/symbolic_multiplication.py +30 -2
  45. capytaine/tools/timer.py +64 -0
  46. capytaine-2.3.dist-info/DELVEWHEEL +2 -0
  47. capytaine-2.3.dist-info/METADATA +761 -0
  48. capytaine-2.3.dist-info/RECORD +98 -0
  49. capytaine-2.2.dist-info/DELVEWHEEL +0 -2
  50. capytaine-2.2.dist-info/METADATA +0 -751
  51. capytaine-2.2.dist-info/RECORD +0 -82
  52. {capytaine-2.2.dist-info → capytaine-2.3.dist-info}/LICENSE +0 -0
  53. {capytaine-2.2.dist-info → capytaine-2.3.dist-info}/WHEEL +0 -0
  54. {capytaine-2.2.dist-info → capytaine-2.3.dist-info}/entry_points.txt +0 -0
  55. capytaine.libs/{.load-order-capytaine-2.2 → .load-order-capytaine-2.3} +2 -2
capytaine/io/xarray.py CHANGED
@@ -3,8 +3,8 @@
3
3
  .. todo:: This module could be tidied up a bit and some methods merged or
4
4
  uniformized.
5
5
  """
6
- # Copyright (C) 2017-2019 Matthieu Ancellin
7
- # See LICENSE file at <https://github.com/mancellin/capytaine>
6
+ # Copyright (C) 2017-2025 Matthieu Ancellin
7
+ # See LICENSE file at <https://github.com/capytaine/capytaine>
8
8
 
9
9
  import logging
10
10
  from datetime import datetime
@@ -32,6 +32,16 @@ LOG = logging.getLogger(__name__)
32
32
  # Reading test matrix #
33
33
  #########################
34
34
 
35
+ def _unsqueeze_dimensions(data_array, dimensions=None):
36
+ """Add scalar coordinates as dimensions of size 1."""
37
+ if dimensions is None:
38
+ dimensions = list(data_array.coords.keys())
39
+ for dim in dimensions:
40
+ if len(data_array.coords[dim].values.shape) == 0:
41
+ data_array = xr.concat([data_array], dim=dim)
42
+ return data_array
43
+
44
+
35
45
  def problems_from_dataset(dataset: xr.Dataset,
36
46
  bodies: Union[FloatingBody, Sequence[FloatingBody]],
37
47
  ) -> List[LinearPotentialFlowProblem]:
@@ -68,7 +78,7 @@ def problems_from_dataset(dataset: xr.Dataset,
68
78
  # Warn user in case of key with unrecognized name (e.g. misspells)
69
79
  keys_in_dataset = set(dataset.dims)
70
80
  accepted_keys = {'wave_direction', 'radiating_dof', 'influenced_dof',
71
- 'body_name', 'omega', 'period', 'wavelength', 'wavenumber',
81
+ 'body_name', 'omega', 'freq', 'period', 'wavelength', 'wavenumber',
72
82
  'forward_speed', 'water_depth', 'rho', 'g', 'theta'}
73
83
  unrecognized_keys = keys_in_dataset.difference(accepted_keys)
74
84
  if len(unrecognized_keys) > 0:
@@ -78,9 +88,9 @@ def problems_from_dataset(dataset: xr.Dataset,
78
88
  raise ValueError("Neither 'radiating_dof' nor 'wave_direction' has been provided in the dataset. "
79
89
  "No linear potential flow problem can be inferred.")
80
90
 
81
- frequency_keys = keys_in_dataset & {'omega', 'period', 'wavelength', 'wavenumber'}
91
+ frequency_keys = keys_in_dataset & {'omega', 'freq', 'period', 'wavelength', 'wavenumber'}
82
92
  if len(frequency_keys) > 1:
83
- raise ValueError("Setting problems requires at most one of the following: omega (angular frequency) OR period OR wavenumber OR wavelength.\n"
93
+ raise ValueError("Setting problems requires at most one of the following: omega (angular frequency) OR freq (in Hz) OR period OR wavenumber OR wavelength.\n"
84
94
  "Received {}".format(frequency_keys))
85
95
  # END SANITY CHECKS
86
96
 
@@ -111,7 +121,8 @@ def problems_from_dataset(dataset: xr.Dataset,
111
121
  problems = []
112
122
  if wave_direction_range is not None:
113
123
  for freq, wave_direction, water_depth, body_name, forward_speed, rho, g \
114
- in product(freq_range, wave_direction_range, water_depth_range, body_range, forward_speed_range, rho_range, g_range):
124
+ in product(freq_range, wave_direction_range, water_depth_range, body_range,
125
+ forward_speed_range, rho_range, g_range):
115
126
  problems.append(
116
127
  DiffractionProblem(body=body_range[body_name], **{freq_type: freq},
117
128
  wave_direction=wave_direction, water_depth=water_depth,
@@ -142,6 +153,61 @@ def problems_from_dataset(dataset: xr.Dataset,
142
153
  return sorted(problems)
143
154
 
144
155
 
156
+ ########################
157
+ # Dataframe creation #
158
+ ########################
159
+
160
+ def _detect_bemio_results(results, calling_function="_detect_bemio_results"):
161
+ error_msg = (
162
+ f"The function {calling_function} expected either a non-empty list of LinearPotentialFlowResult or a bemio.io object.\n"
163
+ f"Instead, it received:\n{repr(results)}"
164
+ )
165
+
166
+ if hasattr(results, '__iter__'):
167
+ if len(results) == 0:
168
+ raise ValueError("Iterable provided to `assemble_dataset` is empty.")
169
+ try:
170
+ if 'capytaine' in results[0].__module__:
171
+ bemio_import = False
172
+ else:
173
+ raise TypeError(error_msg)
174
+ except:
175
+ raise TypeError(error_msg)
176
+
177
+ else:
178
+ try:
179
+ if 'bemio.io' in results.__module__:
180
+ bemio_import = True
181
+ else:
182
+ raise TypeError(error_msg)
183
+ except:
184
+ raise TypeError(error_msg)
185
+
186
+ return bemio_import
187
+
188
+
189
+ def assemble_dataframe(results, wavenumber=True, wavelength=True):
190
+ if _detect_bemio_results(results, calling_function="assemble_dataframe"):
191
+ return dataframe_from_bemio(results, wavenumber, wavelength) # TODO add hydrostatics
192
+
193
+ records_list = [record for result in results for record in result.records]
194
+ df = pd.DataFrame(records_list)
195
+
196
+ all_dofs_in_order = list({k: None for r in results for k in r.body.dofs.keys()})
197
+ # Using a dict above to remove duplicates while conserving ordering
198
+ inf_dof_cat = pd.CategoricalDtype(categories=all_dofs_in_order)
199
+ df["influenced_dof"] = df["influenced_dof"].astype(inf_dof_cat)
200
+ if 'added_mass' in df.columns:
201
+ rad_dof_cat = pd.CategoricalDtype(categories=all_dofs_in_order)
202
+ df["radiating_dof"] = df["radiating_dof"].astype(rad_dof_cat)
203
+
204
+ return df
205
+
206
+
207
+ ######################
208
+ # Dataset creation #
209
+ ######################
210
+
145
211
  def _squeeze_dimensions(data_array, dimensions=None):
146
212
  """Remove dimensions if they are of size 1. The coordinates become scalar coordinates."""
147
213
  if dimensions is None:
@@ -152,20 +218,6 @@ def _squeeze_dimensions(data_array, dimensions=None):
152
218
  return data_array
153
219
 
154
220
 
155
- def _unsqueeze_dimensions(data_array, dimensions=None):
156
- """Add scalar coordinates as dimensions of size 1."""
157
- if dimensions is None:
158
- dimensions = list(data_array.coords.keys())
159
- for dim in dimensions:
160
- if len(data_array.coords[dim].values.shape) == 0:
161
- data_array = xr.concat([data_array], dim=dim)
162
- return data_array
163
-
164
-
165
- ######################
166
- # Dataset creation #
167
- ######################
168
-
169
221
  def _dataset_from_dataframe(df: pd.DataFrame,
170
222
  variables: Union[str, Sequence[str]],
171
223
  dimensions: Sequence[str],
@@ -188,12 +240,8 @@ def _dataset_from_dataframe(df: pd.DataFrame,
188
240
  They will appears as dimension in the output dataset only if they have
189
241
  more than one different values.
190
242
  """
191
-
192
- for variable_name in variables:
193
- df = df[df[variable_name].notnull()].dropna(axis='columns') # Keep only records with non null values of all the variables
194
243
  df = df.drop_duplicates(optional_dims + dimensions)
195
244
  df = df.set_index(optional_dims + dimensions)
196
-
197
245
  da = df.to_xarray()[variables]
198
246
  da = _squeeze_dimensions(da, dimensions=optional_dims)
199
247
  return da
@@ -223,6 +271,7 @@ def kochin_data_array(results: Sequence[LinearPotentialFlowResult],
223
271
  :meth:`~capytaine.post_pro.kochin.compute_kochin`
224
272
  The present function is just a wrapper around :code:`compute_kochin`.
225
273
  """
274
+ # TODO: this not very good to mix computation and data manipulation here...
226
275
  records = pd.DataFrame([
227
276
  dict(**result.problem._asdict(), theta=theta, kochin=kochin, kind=result.__class__.__name__)
228
277
  for result in results
@@ -252,26 +301,61 @@ def kochin_data_array(results: Sequence[LinearPotentialFlowResult],
252
301
 
253
302
  return kochin_data
254
303
 
255
-
256
- def collect_records(results):
257
- records_list = []
258
- warned_once_about_no_free_surface = False
259
- for result in results:
260
- if result.free_surface == np.inf:
261
- if not warned_once_about_no_free_surface:
262
- LOG.warning("Datasets currently only support cases with a free surface (free_surface=0.0).\n"
263
- "Cases without a free surface (free_surface=inf) are ignored.\n"
264
- "See also https://github.com/mancellin/capytaine/issues/88")
265
- warned_once_about_no_free_surface = True
266
- else:
267
- pass
268
- else:
269
- for record in result.records:
270
- records_list.append(record)
271
- return records_list
304
+ VARIABLES_ATTRIBUTES = {
305
+ "omega": {
306
+ 'long_name': 'Angular frequency',
307
+ 'units': 'rad/s',
308
+ },
309
+ "freq": {
310
+ 'long_name': 'Frequency',
311
+ 'units': 'Hz',
312
+ },
313
+ "period": {
314
+ 'long_name': 'Period',
315
+ 'units': 's',
316
+ },
317
+ "wavenumber": {
318
+ 'long_name': "Angular wavenumber",
319
+ 'units': 'rad/m',
320
+ },
321
+ "wavelength": {
322
+ 'long_name': "Wave length",
323
+ 'units': 'm',
324
+ },
325
+ "encounter_omega": {
326
+ 'long_name': "Encounter angular frequency",
327
+ 'units': 'rad/s',
328
+ },
329
+ "encounter_wave_direction": {
330
+ 'long_name': "Encounter wave direction",
331
+ 'units': 'rad',
332
+ },
333
+ "wave_direction": {
334
+ 'long_name': "Wave direction",
335
+ 'units': "rad"
336
+ },
337
+ "radiating_dof": {
338
+ 'long_name': 'Radiating DOF',
339
+ },
340
+ "influenced_dof": {
341
+ 'long_name': 'Influenced DOF',
342
+ },
343
+ "added_mass": {
344
+ 'long_name': 'Added mass',
345
+ },
346
+ "radiation_damping": {
347
+ 'long_name': 'Radiation damping',
348
+ },
349
+ "diffraction_force": {
350
+ 'long_name': "Diffraction force",
351
+ },
352
+ "Froude_Krylov_force": {
353
+ 'long_name': "Froude Krylov force",
354
+ },
355
+ }
272
356
 
273
357
  def assemble_dataset(results,
274
- omega=True, wavenumber=True, wavelength=True, period=True,
358
+ omega=True, freq=True, wavenumber=True, wavelength=True, period=True,
275
359
  mesh=False, hydrostatics=True, attrs=None) -> xr.Dataset:
276
360
  """Transform a list of :class:`LinearPotentialFlowResult` into a :class:`xarray.Dataset`.
277
361
 
@@ -281,10 +365,12 @@ def assemble_dataset(results,
281
365
 
282
366
  Parameters
283
367
  ----------
284
- results: list of LinearPotentialFlowResult
368
+ results: list of LinearPotentialFlowResult or BEMIO dataset
285
369
  The results that will be read.
286
370
  omega: bool, optional
287
371
  If True, the coordinate 'omega' will be added to the output dataset.
372
+ freq: bool, optional
373
+ If True, the coordinate 'freq' will be added to the output dataset.
288
374
  wavenumber: bool, optional
289
375
  If True, the coordinate 'wavenumber' will be added to the output dataset.
290
376
  wavelength: bool, optional
@@ -298,77 +384,47 @@ def assemble_dataset(results,
298
384
  attrs: dict, optional
299
385
  Attributes that should be added to the output dataset.
300
386
  """
301
- dataset = xr.Dataset()
387
+ bemio_import = _detect_bemio_results(results, calling_function="assemble_dataset")
302
388
 
303
- error_msg = 'The first argument of `assemble_dataset` must be either a list of LinearPotentialFlowResult or a bemio.io object'
304
- if hasattr(results, '__iter__'):
305
- try:
306
- if 'capytaine' in results[0].__module__:
307
- bemio_import = False
308
- else:
309
- raise TypeError(error_msg)
310
- except:
311
- raise TypeError(error_msg)
312
-
313
- else:
314
- try:
315
- if 'bemio.io' in results.__module__:
316
- bemio_import = True
317
- else:
318
- raise TypeError(error_msg)
319
- except:
320
- raise TypeError(error_msg)
389
+ records = assemble_dataframe(results)
321
390
 
322
391
  if bemio_import:
323
- records = dataframe_from_bemio(results, wavenumber, wavelength) # TODO add hydrostatics
324
- all_dofs_in_order = {'Surge': None, 'Sway': None, 'Heave': None, 'Roll': None, 'Pitch': None, 'Yaw': None}
325
392
  main_freq_type = "omega"
326
-
327
393
  else:
328
- records = pd.DataFrame(collect_records(results))
329
- all_dofs_in_order = {k: None for r in results for k in r.body.dofs.keys()}
330
394
  main_freq_type = Counter((res.provided_freq_type for res in results)).most_common(1)[0][0]
331
395
 
396
+ if np.any(records["free_surface"] != 0.0):
397
+ LOG.warning("Datasets only support cases with a free surface (free_surface=0.0).\n"
398
+ "Cases without a free surface (free_surface=inf) are ignored.\n"
399
+ "See also https://github.com/mancellin/capytaine/issues/88")
400
+ records = records[records["free_surface"] == 0.0]
401
+
332
402
  if attrs is None:
333
403
  attrs = {}
334
404
  attrs['creation_of_dataset'] = datetime.now().isoformat()
335
405
 
336
- if len(records) == 0:
337
- raise ValueError("No result passed to assemble_dataset.")
338
-
339
- inf_dof_cat = pd.CategoricalDtype(categories=all_dofs_in_order.keys())
340
- records["influenced_dof"] = records["influenced_dof"].astype(inf_dof_cat)
341
- rad_dof_cat = pd.CategoricalDtype(categories=all_dofs_in_order.keys())
342
- if 'added_mass' in records.columns:
343
- records["radiating_dof"] = records["radiating_dof"].astype(rad_dof_cat)
406
+ kinds_of_results = set(records['kind'])
344
407
 
345
408
  optional_dims = ['g', 'rho', 'body_name', 'water_depth', 'forward_speed']
346
409
 
410
+ dataset = xr.Dataset()
411
+
347
412
  # RADIATION RESULTS
348
- if 'added_mass' in records.columns:
413
+ if "RadiationResult" in kinds_of_results:
349
414
  radiation_cases = _dataset_from_dataframe(
350
- records,
415
+ records[records['kind'] == "RadiationResult"],
351
416
  variables=['added_mass', 'radiation_damping'],
352
417
  dimensions=[main_freq_type, 'radiating_dof', 'influenced_dof'],
353
418
  optional_dims=optional_dims + ['wave_direction'])
354
- radiation_cases.added_mass.attrs['long_name'] = 'Added mass'
355
- radiation_cases.radiation_damping.attrs['long_name'] = 'Radiation damping'
356
- radiation_cases.radiating_dof.attrs['long_name'] = 'Radiating DOF'
357
- radiation_cases.influenced_dof.attrs['long_name'] = 'Influenced DOF'
358
419
  dataset = xr.merge([dataset, radiation_cases])
359
420
 
360
421
  # DIFFRACTION RESULTS
361
- if 'diffraction_force' in records.columns:
422
+ if "DiffractionResult" in kinds_of_results:
362
423
  diffraction_cases = _dataset_from_dataframe(
363
- records,
424
+ records[records['kind'] == "DiffractionResult"],
364
425
  variables=['diffraction_force', 'Froude_Krylov_force'],
365
426
  dimensions=[main_freq_type, 'wave_direction', 'influenced_dof'],
366
427
  optional_dims=optional_dims)
367
- diffraction_cases.diffraction_force.attrs['long_name'] = 'Diffraction force'
368
- diffraction_cases.Froude_Krylov_force.attrs['long_name'] = 'Froude Krylov force'
369
- diffraction_cases.influenced_dof.attrs['long_name'] = 'Influenced DOF'
370
- diffraction_cases.wave_direction.attrs['long_name'] = 'Wave direction'
371
- diffraction_cases.wave_direction.attrs['units'] = 'rad'
372
428
  dataset = xr.merge([dataset, diffraction_cases])
373
429
  dataset['excitation_force'] = dataset['Froude_Krylov_force'] + dataset['diffraction_force']
374
430
 
@@ -381,8 +437,15 @@ def assemble_dataset(results,
381
437
  optional_dims=['g', 'water_depth'] if main_freq_type in {'wavelength', 'wavenumber'} else []
382
438
  )
383
439
  dataset.coords['omega'] = omega_ds['omega']
384
- dataset.omega.attrs['long_name'] = 'Angular frequency'
385
- dataset.omega.attrs['units'] = 'rad/s'
440
+
441
+ if freq and main_freq_type != "freq":
442
+ freq_ds = _dataset_from_dataframe(
443
+ records,
444
+ variables=['freq'],
445
+ dimensions=[main_freq_type],
446
+ optional_dims=['g', 'water_depth'] if main_freq_type in {'wavelength', 'wavenumber'} else []
447
+ )
448
+ dataset.coords['freq'] = freq_ds['freq']
386
449
 
387
450
  if period and main_freq_type != "period":
388
451
  period_ds = _dataset_from_dataframe(
@@ -392,8 +455,6 @@ def assemble_dataset(results,
392
455
  optional_dims=['g', 'water_depth'] if main_freq_type in {'wavelength', 'wavenumber'} else []
393
456
  )
394
457
  dataset.coords['period'] = period_ds['period']
395
- dataset.period.attrs['long_name'] = 'Period'
396
- dataset.period.attrs['units'] = 's'
397
458
 
398
459
  if wavenumber and main_freq_type != "wavenumber":
399
460
  wavenumber_ds = _dataset_from_dataframe(
@@ -403,8 +464,6 @@ def assemble_dataset(results,
403
464
  optional_dims=['g', 'water_depth'] if main_freq_type in {'period', 'omega'} else []
404
465
  )
405
466
  dataset.coords['wavenumber'] = wavenumber_ds['wavenumber']
406
- dataset.wavenumber.attrs['long_name'] = 'Angular wavenumber'
407
- dataset.wavenumber.attrs['units'] = 'rad/m'
408
467
 
409
468
  if wavelength and main_freq_type != "wavelength":
410
469
  wavelength_ds = _dataset_from_dataframe(
@@ -414,8 +473,6 @@ def assemble_dataset(results,
414
473
  optional_dims=['g', 'water_depth'] if main_freq_type in {'period', 'omega'} else []
415
474
  )
416
475
  dataset.coords['wavelength'] = wavelength_ds['wavelength']
417
- dataset.wavelength.attrs['long_name'] = 'Wave length'
418
- dataset.wavelength.attrs['units'] = 'm'
419
476
 
420
477
  if not all(records["forward_speed"] == 0.0):
421
478
  omegae_ds = _dataset_from_dataframe(
@@ -425,8 +482,6 @@ def assemble_dataset(results,
425
482
  optional_dims=['g', 'water_depth'],
426
483
  )
427
484
  dataset.coords['encounter_omega'] = omegae_ds['encounter_omega']
428
- dataset.encounter_omega.attrs['long_name'] = 'Encounter angular frequency'
429
- dataset.encounter_omega.attrs['units'] = 'rad/s'
430
485
 
431
486
  encounter_wave_direction_ds = _dataset_from_dataframe(
432
487
  records,
@@ -435,8 +490,6 @@ def assemble_dataset(results,
435
490
  optional_dims=[],
436
491
  )
437
492
  dataset.coords['encounter_wave_direction'] = encounter_wave_direction_ds['encounter_wave_direction']
438
- dataset.encounter_wave_direction.attrs['long_name'] = 'Encounter wave direction'
439
- dataset.encounter_wave_direction.attrs['units'] = 'rad'
440
493
 
441
494
  if mesh:
442
495
  if bemio_import:
@@ -468,11 +521,52 @@ def assemble_dataset(results,
468
521
  bodies = list({result.body for result in results})
469
522
  dataset = xr.merge([dataset, hydrostatics_dataset(bodies)])
470
523
 
524
+ for var in set(dataset) | set(dataset.coords):
525
+ if var in VARIABLES_ATTRIBUTES:
526
+ dataset[var].attrs.update(VARIABLES_ATTRIBUTES[var])
527
+
471
528
  dataset.attrs.update(attrs)
472
529
  dataset.attrs['capytaine_version'] = __version__
473
530
  return dataset
474
531
 
475
532
 
533
+ def assemble_matrices(results):
534
+ """Simplified version of assemble_dataset, returning only bare matrices.
535
+ Meant mainly for teaching without introducing Xarray to beginers.
536
+
537
+ Parameters
538
+ ----------
539
+ results: list of LinearPotentialFlowResult
540
+ The results that will be read.
541
+
542
+ Returns
543
+ -------
544
+ 3-ple of (np.arrays or None)
545
+ The added mass matrix, the radiation damping matrix and the excitation force.
546
+ If the data are no available in the results, returns None instead.
547
+ """
548
+
549
+ ds = assemble_dataset(results)
550
+
551
+ if "added_mass" in ds:
552
+ A = np.atleast_2d(ds.added_mass.values.squeeze())
553
+ else:
554
+ A = None
555
+
556
+ if "radiation_damping" in ds:
557
+ B = np.atleast_2d(ds.radiation_damping.values.squeeze())
558
+ else:
559
+ B = None
560
+
561
+ if "excitation_force" in ds:
562
+ F = np.atleast_1d(ds.excitation_force.values.squeeze())
563
+ else:
564
+ F = None
565
+
566
+ return A, B, F
567
+
568
+
569
+
476
570
  ################################
477
571
  # Handling of complex values #
478
572
  ################################
@@ -514,3 +608,61 @@ def merge_complex_values(ds: xr.Dataset) -> xr.Dataset:
514
608
  ds[variable] = new_da
515
609
  ds = ds.drop_vars('complex')
516
610
  return ds
611
+
612
+
613
+ ##################
614
+ # Save dataset #
615
+ ##################
616
+
617
+ def save_dataset_as_netcdf(filename, dataset):
618
+ """Save `dataset` as a NetCDF file with name (or path) `filename`"""
619
+ ds = separate_complex_values(dataset)
620
+
621
+ # Workaround https://github.com/capytaine/capytaine/issues/683
622
+ ds['radiating_dof'] = ds['radiating_dof'].astype('str')
623
+ ds['influenced_dof'] = ds['influenced_dof'].astype('str')
624
+
625
+ # Make sure all strings are exported as strings and not Python objects
626
+ encoding = {'radiating_dof': {'dtype': 'U'},
627
+ 'influenced_dof': {'dtype': 'U'}}
628
+
629
+ ds.to_netcdf(filename, encoding=encoding)
630
+
631
+
632
+ def export_dataset(filename, dataset, format=None, **kwargs):
633
+ """Save `dataset` into a format, provided by the `format` argument or inferred by the `filename`.
634
+
635
+ Parameters
636
+ ----------
637
+ filename: str or Path
638
+ Where to store the data
639
+ dataset: xarray.Dataset
640
+ Dataset, which is assumed to have been computed by Capytaine
641
+ format: str, optional
642
+ Format of output. Accepted values: "netcdf"
643
+ **kwargs: optional
644
+ Remaining argument are passed to the specific export function,
645
+ such as ``save_dataset_as_netcdf``, ``export_to_wamit`` or ``write_dataset_as_tecplot_files``.
646
+
647
+ Returns
648
+ -------
649
+ None
650
+ """
651
+ if (
652
+ (format is not None and format.lower() == "netcdf") or
653
+ (format is None and str(filename).endswith(".nc"))
654
+ ):
655
+ save_dataset_as_netcdf(filename, dataset, **kwargs)
656
+ elif (
657
+ (format is not None and format.lower() == "wamit")
658
+ ):
659
+ from capytaine.io.wamit import export_to_wamit
660
+ export_to_wamit(dataset, filename, **kwargs)
661
+ elif (
662
+ (format is not None and format.lower() == "nemoh")
663
+ ):
664
+ from capytaine.io.legacy import write_dataset_as_tecplot_files
665
+ write_dataset_as_tecplot_files(filename, dataset, **kwargs)
666
+ else:
667
+ raise ValueError("`export_dataset` could not infer export format based on filename or `format` argument.\n"
668
+ f"provided filename: {filename}\nprovided format: {format}")
@@ -185,7 +185,9 @@ class BlockMatrix:
185
185
  self._put_in_full_matrix(full_matrix)
186
186
  return full_matrix
187
187
 
188
- def __array__(self, dtype=None):
188
+ def __array__(self, dtype=None, copy=True):
189
+ if not copy:
190
+ raise ValueError("Making an ndarray out of a BlockMatrix requires copy")
189
191
  return self.full_matrix(dtype=dtype)
190
192
 
191
193
  def no_toeplitz(self):
@@ -587,4 +589,4 @@ class BlockMatrix:
587
589
  this_block = self
588
590
  for index in path:
589
591
  this_block = this_block.all_blocks[index, index]
590
- return this_block
592
+ return this_block
@@ -127,7 +127,7 @@ def solve_gmres(A, b):
127
127
 
128
128
  if LOG.isEnabledFor(logging.INFO):
129
129
  counter = Counter()
130
- x, info = ssl.gmres(A, b, atol=1e-6, callback=counter)
130
+ x, info = ssl.gmres(A, b, atol=1e-6, callback=counter, callback_type="pr_norm")
131
131
  LOG.info(f"End of GMRES after {counter.nb_iter} iterations.")
132
132
 
133
133
  else:
@@ -318,7 +318,9 @@ class LowRankMatrix:
318
318
  else:
319
319
  return self.left_matrix @ self.right_matrix
320
320
 
321
- def __array__(self, dtype=None):
321
+ def __array__(self, dtype=None, copy=True):
322
+ if not copy:
323
+ raise ValueError("Making an ndarray out of a BlockMatrix requires copy")
322
324
  return self.full_matrix(dtype=dtype)
323
325
 
324
326
  @property
@@ -30,14 +30,14 @@ def clip(source_mesh: Mesh, plane: Plane, vicinity_tol=1e-12, name=None):
30
30
  """
31
31
  vertices_data = _vertices_positions_wrt_plane(source_mesh, plane, vicinity_tol)
32
32
 
33
- nb_vertices_above_or_on_plane = np.count_nonzero(
34
- vertices_data['vertices_above_mask'] | vertices_data['vertices_on_mask']
33
+ nb_vertices_strictly_above_plane = np.count_nonzero(
34
+ vertices_data['vertices_above_mask']
35
35
  )
36
36
  nb_vertices_below_or_on_plane = np.count_nonzero(
37
37
  vertices_data['vertices_below_mask'] | vertices_data['vertices_on_mask']
38
38
  )
39
39
 
40
- if nb_vertices_above_or_on_plane == source_mesh.nb_vertices:
40
+ if nb_vertices_strictly_above_plane == source_mesh.nb_vertices:
41
41
  LOG.warning(f"Clipping {source_mesh.name} by {plane}: all vertices are removed.")
42
42
  clipped_mesh = Mesh(None, None)
43
43
  clipped_mesh._clipping_data = dict(faces_ids=[])
@@ -63,6 +63,7 @@ def clip(source_mesh: Mesh, plane: Plane, vicinity_tol=1e-12, name=None):
63
63
  if name is None:
64
64
  clipped_mesh.name = f'{source_mesh.name}_clipped'
65
65
  clipped_mesh.remove_unused_vertices()
66
+ clipped_mesh.remove_degenerated_faces()
66
67
 
67
68
  return clipped_mesh
68
69
 
@@ -223,6 +223,12 @@ class CollectionOfMeshes(ClippableMixin, SurfaceIntegralsMixin, Abstract3DObject
223
223
  # Transformation #
224
224
  ##################
225
225
 
226
+ def join_meshes(*meshes, name=None):
227
+ return CollectionOfMeshes(meshes, name=name)
228
+
229
+ def __add__(self, mesh_to_add):
230
+ return self.join_meshes(mesh_to_add)
231
+
226
232
  def merged(self, name=None) -> Mesh:
227
233
  """Merge the sub-meshes and return a full mesh.
228
234
  If the collection contains other collections, they are merged recursively.
@@ -286,7 +292,11 @@ class CollectionOfMeshes(ClippableMixin, SurfaceIntegralsMixin, Abstract3DObject
286
292
  @inplace_transformation
287
293
  def prune_empty_meshes(self):
288
294
  """Remove empty meshes from the collection."""
289
- self._meshes = tuple(mesh for mesh in self if mesh.nb_faces > 0 and mesh.nb_vertices > 0)
295
+ remaining_meshes = tuple(mesh for mesh in self if mesh.nb_faces > 0 and mesh.nb_vertices > 0)
296
+ if len(remaining_meshes) == 0:
297
+ self._meshes = (Mesh(name="empty_mesh"),)
298
+ else:
299
+ self._meshes = remaining_meshes
290
300
 
291
301
  @property
292
302
  def axis_aligned_bbox(self):
@@ -0,0 +1,37 @@
1
+ from typing import Tuple, Protocol, runtime_checkable
2
+ from numpy.typing import ArrayLike
3
+
4
+
5
+ @runtime_checkable
6
+ class MeshLike(Protocol):
7
+ """Minimal API that a class describing a mesh should implement to be
8
+ usable with the rest of Capytaine.
9
+
10
+ The goal is two-fold:
11
+ 1. Use at runtime to identify a mesh for functions that behaves
12
+ differently depending on the type of the input (e.g. Delhommeau().evaluate).
13
+ 2. Use as documentation for third-party mesh implementation.
14
+
15
+ In the future, it could also be used for static typing.
16
+ """
17
+ vertices: ArrayLike
18
+ faces: ArrayLike
19
+ nb_vertices: int
20
+ nb_faces: int
21
+ faces_centers: ArrayLike
22
+ faces_normals: ArrayLike
23
+ faces_areas: ArrayLike
24
+ faces_radiuses: ArrayLike
25
+ quadrature_points: Tuple[ArrayLike, ArrayLike]
26
+
27
+ def __short_str__(self) -> str:
28
+ ...
29
+
30
+ def extract_faces(self, faces_id):
31
+ ...
32
+
33
+ def join_meshes(*meshes):
34
+ ...
35
+
36
+ def with_normal_vector_going_down(self, **kwargs):
37
+ ...