capytaine 2.2.1__cp38-cp38-macosx_14_0_arm64.whl → 2.3.1__cp38-cp38-macosx_14_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- capytaine/.dylibs/libgcc_s.1.1.dylib +0 -0
- capytaine/.dylibs/libgfortran.5.dylib +0 -0
- capytaine/.dylibs/libquadmath.0.dylib +0 -0
- capytaine/__about__.py +1 -1
- capytaine/__init__.py +2 -1
- capytaine/bem/airy_waves.py +7 -2
- capytaine/bem/problems_and_results.py +91 -39
- capytaine/bem/solver.py +128 -40
- capytaine/bodies/bodies.py +46 -18
- capytaine/bodies/predefined/rectangles.py +2 -0
- capytaine/green_functions/FinGreen3D/.gitignore +1 -0
- capytaine/green_functions/FinGreen3D/FinGreen3D.f90 +3589 -0
- capytaine/green_functions/FinGreen3D/LICENSE +165 -0
- capytaine/green_functions/FinGreen3D/Makefile +16 -0
- capytaine/green_functions/FinGreen3D/README.md +24 -0
- capytaine/green_functions/FinGreen3D/test_program.f90 +39 -0
- capytaine/green_functions/LiangWuNoblesse/.gitignore +1 -0
- capytaine/green_functions/LiangWuNoblesse/LICENSE +504 -0
- capytaine/green_functions/LiangWuNoblesse/LiangWuNoblesseWaveTerm.f90 +751 -0
- capytaine/green_functions/LiangWuNoblesse/Makefile +16 -0
- capytaine/green_functions/LiangWuNoblesse/README.md +2 -0
- capytaine/green_functions/LiangWuNoblesse/test_program.f90 +28 -0
- capytaine/green_functions/abstract_green_function.py +55 -3
- capytaine/green_functions/delhommeau.py +205 -130
- capytaine/green_functions/hams.py +204 -0
- capytaine/green_functions/libs/Delhommeau_float32.cpython-38-darwin.so +0 -0
- capytaine/green_functions/libs/Delhommeau_float64.cpython-38-darwin.so +0 -0
- capytaine/io/bemio.py +14 -2
- capytaine/io/mesh_loaders.py +1 -1
- capytaine/io/wamit.py +479 -0
- capytaine/io/xarray.py +261 -117
- capytaine/matrices/linear_solvers.py +1 -1
- capytaine/meshes/clipper.py +1 -0
- capytaine/meshes/collections.py +19 -1
- capytaine/meshes/mesh_like_protocol.py +37 -0
- capytaine/meshes/meshes.py +28 -8
- capytaine/meshes/symmetric.py +89 -10
- capytaine/post_pro/kochin.py +4 -4
- capytaine/tools/lists_of_points.py +3 -3
- capytaine/tools/prony_decomposition.py +60 -4
- capytaine/tools/symbolic_multiplication.py +30 -4
- capytaine/tools/timer.py +66 -0
- {capytaine-2.2.1.dist-info → capytaine-2.3.1.dist-info}/METADATA +6 -10
- capytaine-2.3.1.dist-info/RECORD +92 -0
- capytaine-2.2.1.dist-info/RECORD +0 -76
- {capytaine-2.2.1.dist-info → capytaine-2.3.1.dist-info}/LICENSE +0 -0
- {capytaine-2.2.1.dist-info → capytaine-2.3.1.dist-info}/WHEEL +0 -0
- {capytaine-2.2.1.dist-info → capytaine-2.3.1.dist-info}/entry_points.txt +0 -0
capytaine/io/xarray.py
CHANGED
|
@@ -3,8 +3,8 @@
|
|
|
3
3
|
.. todo:: This module could be tidied up a bit and some methods merged or
|
|
4
4
|
uniformized.
|
|
5
5
|
"""
|
|
6
|
-
# Copyright (C) 2017-
|
|
7
|
-
# See LICENSE file at <https://github.com/
|
|
6
|
+
# Copyright (C) 2017-2025 Matthieu Ancellin
|
|
7
|
+
# See LICENSE file at <https://github.com/capytaine/capytaine>
|
|
8
8
|
|
|
9
9
|
import logging
|
|
10
10
|
from datetime import datetime
|
|
@@ -32,6 +32,16 @@ LOG = logging.getLogger(__name__)
|
|
|
32
32
|
# Reading test matrix #
|
|
33
33
|
#########################
|
|
34
34
|
|
|
35
|
+
def _unsqueeze_dimensions(data_array, dimensions=None):
|
|
36
|
+
"""Add scalar coordinates as dimensions of size 1."""
|
|
37
|
+
if dimensions is None:
|
|
38
|
+
dimensions = list(data_array.coords.keys())
|
|
39
|
+
for dim in dimensions:
|
|
40
|
+
if len(data_array.coords[dim].values.shape) == 0:
|
|
41
|
+
data_array = xr.concat([data_array], dim=dim)
|
|
42
|
+
return data_array
|
|
43
|
+
|
|
44
|
+
|
|
35
45
|
def problems_from_dataset(dataset: xr.Dataset,
|
|
36
46
|
bodies: Union[FloatingBody, Sequence[FloatingBody]],
|
|
37
47
|
) -> List[LinearPotentialFlowProblem]:
|
|
@@ -68,7 +78,7 @@ def problems_from_dataset(dataset: xr.Dataset,
|
|
|
68
78
|
# Warn user in case of key with unrecognized name (e.g. misspells)
|
|
69
79
|
keys_in_dataset = set(dataset.dims)
|
|
70
80
|
accepted_keys = {'wave_direction', 'radiating_dof', 'influenced_dof',
|
|
71
|
-
'body_name', 'omega', 'period', 'wavelength', 'wavenumber',
|
|
81
|
+
'body_name', 'omega', 'freq', 'period', 'wavelength', 'wavenumber',
|
|
72
82
|
'forward_speed', 'water_depth', 'rho', 'g', 'theta'}
|
|
73
83
|
unrecognized_keys = keys_in_dataset.difference(accepted_keys)
|
|
74
84
|
if len(unrecognized_keys) > 0:
|
|
@@ -78,9 +88,9 @@ def problems_from_dataset(dataset: xr.Dataset,
|
|
|
78
88
|
raise ValueError("Neither 'radiating_dof' nor 'wave_direction' has been provided in the dataset. "
|
|
79
89
|
"No linear potential flow problem can be inferred.")
|
|
80
90
|
|
|
81
|
-
frequency_keys = keys_in_dataset & {'omega', 'period', 'wavelength', 'wavenumber'}
|
|
91
|
+
frequency_keys = keys_in_dataset & {'omega', 'freq', 'period', 'wavelength', 'wavenumber'}
|
|
82
92
|
if len(frequency_keys) > 1:
|
|
83
|
-
raise ValueError("Setting problems requires at most one of the following: omega (angular frequency) OR period OR wavenumber OR wavelength.\n"
|
|
93
|
+
raise ValueError("Setting problems requires at most one of the following: omega (angular frequency) OR freq (in Hz) OR period OR wavenumber OR wavelength.\n"
|
|
84
94
|
"Received {}".format(frequency_keys))
|
|
85
95
|
# END SANITY CHECKS
|
|
86
96
|
|
|
@@ -111,20 +121,13 @@ def problems_from_dataset(dataset: xr.Dataset,
|
|
|
111
121
|
problems = []
|
|
112
122
|
if wave_direction_range is not None:
|
|
113
123
|
for freq, wave_direction, water_depth, body_name, forward_speed, rho, g \
|
|
114
|
-
in product(freq_range, wave_direction_range, water_depth_range, body_range,
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
elif freq in {0.0, np.inf} and radiating_dofs is not None:
|
|
122
|
-
# Diffraction problems are not defined for 0 and infinite frequency.
|
|
123
|
-
# But we don't want the whole batch to fail, as these frequencies are there for the radiation problems.
|
|
124
|
-
# The excitation force will be NaN for these frequencies in the resulting dataset.
|
|
125
|
-
pass
|
|
126
|
-
else:
|
|
127
|
-
raise ValueError("Zero and infinite frequencies are not defined when solving only diffraction problems.")
|
|
124
|
+
in product(freq_range, wave_direction_range, water_depth_range, body_range,
|
|
125
|
+
forward_speed_range, rho_range, g_range):
|
|
126
|
+
problems.append(
|
|
127
|
+
DiffractionProblem(body=body_range[body_name], **{freq_type: freq},
|
|
128
|
+
wave_direction=wave_direction, water_depth=water_depth,
|
|
129
|
+
forward_speed=forward_speed, rho=rho, g=g)
|
|
130
|
+
)
|
|
128
131
|
|
|
129
132
|
if radiating_dofs is not None:
|
|
130
133
|
for freq, radiating_dof, water_depth, body_name, forward_speed, rho, g \
|
|
@@ -150,6 +153,61 @@ def problems_from_dataset(dataset: xr.Dataset,
|
|
|
150
153
|
return sorted(problems)
|
|
151
154
|
|
|
152
155
|
|
|
156
|
+
########################
|
|
157
|
+
# Dataframe creation #
|
|
158
|
+
########################
|
|
159
|
+
|
|
160
|
+
def _detect_bemio_results(results, calling_function="_detect_bemio_results"):
|
|
161
|
+
error_msg = (
|
|
162
|
+
f"The function {calling_function} expected either a non-empty list of LinearPotentialFlowResult or a bemio.io object.\n"
|
|
163
|
+
f"Instead, it received:\n{repr(results)}"
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
if hasattr(results, '__iter__'):
|
|
167
|
+
if len(results) == 0:
|
|
168
|
+
raise ValueError("Iterable provided to `assemble_dataset` is empty.")
|
|
169
|
+
try:
|
|
170
|
+
if 'capytaine' in results[0].__module__:
|
|
171
|
+
bemio_import = False
|
|
172
|
+
else:
|
|
173
|
+
raise TypeError(error_msg)
|
|
174
|
+
except:
|
|
175
|
+
raise TypeError(error_msg)
|
|
176
|
+
|
|
177
|
+
else:
|
|
178
|
+
try:
|
|
179
|
+
if 'bemio.io' in results.__module__:
|
|
180
|
+
bemio_import = True
|
|
181
|
+
else:
|
|
182
|
+
raise TypeError(error_msg)
|
|
183
|
+
except:
|
|
184
|
+
raise TypeError(error_msg)
|
|
185
|
+
|
|
186
|
+
return bemio_import
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def assemble_dataframe(results, wavenumber=True, wavelength=True):
|
|
190
|
+
if _detect_bemio_results(results, calling_function="assemble_dataframe"):
|
|
191
|
+
return dataframe_from_bemio(results, wavenumber, wavelength) # TODO add hydrostatics
|
|
192
|
+
|
|
193
|
+
records_list = [record for result in results for record in result.records]
|
|
194
|
+
df = pd.DataFrame(records_list)
|
|
195
|
+
|
|
196
|
+
all_dofs_in_order = list({k: None for r in results for k in r.body.dofs.keys()})
|
|
197
|
+
# Using a dict above to remove duplicates while conserving ordering
|
|
198
|
+
inf_dof_cat = pd.CategoricalDtype(categories=all_dofs_in_order)
|
|
199
|
+
df["influenced_dof"] = df["influenced_dof"].astype(inf_dof_cat)
|
|
200
|
+
if 'added_mass' in df.columns:
|
|
201
|
+
rad_dof_cat = pd.CategoricalDtype(categories=all_dofs_in_order)
|
|
202
|
+
df["radiating_dof"] = df["radiating_dof"].astype(rad_dof_cat)
|
|
203
|
+
|
|
204
|
+
return df
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
######################
|
|
208
|
+
# Dataset creation #
|
|
209
|
+
######################
|
|
210
|
+
|
|
153
211
|
def _squeeze_dimensions(data_array, dimensions=None):
|
|
154
212
|
"""Remove dimensions if they are of size 1. The coordinates become scalar coordinates."""
|
|
155
213
|
if dimensions is None:
|
|
@@ -160,20 +218,6 @@ def _squeeze_dimensions(data_array, dimensions=None):
|
|
|
160
218
|
return data_array
|
|
161
219
|
|
|
162
220
|
|
|
163
|
-
def _unsqueeze_dimensions(data_array, dimensions=None):
|
|
164
|
-
"""Add scalar coordinates as dimensions of size 1."""
|
|
165
|
-
if dimensions is None:
|
|
166
|
-
dimensions = list(data_array.coords.keys())
|
|
167
|
-
for dim in dimensions:
|
|
168
|
-
if len(data_array.coords[dim].values.shape) == 0:
|
|
169
|
-
data_array = xr.concat([data_array], dim=dim)
|
|
170
|
-
return data_array
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
######################
|
|
174
|
-
# Dataset creation #
|
|
175
|
-
######################
|
|
176
|
-
|
|
177
221
|
def _dataset_from_dataframe(df: pd.DataFrame,
|
|
178
222
|
variables: Union[str, Sequence[str]],
|
|
179
223
|
dimensions: Sequence[str],
|
|
@@ -196,12 +240,8 @@ def _dataset_from_dataframe(df: pd.DataFrame,
|
|
|
196
240
|
They will appears as dimension in the output dataset only if they have
|
|
197
241
|
more than one different values.
|
|
198
242
|
"""
|
|
199
|
-
|
|
200
|
-
for variable_name in variables:
|
|
201
|
-
df = df[df[variable_name].notnull()].dropna(axis='columns') # Keep only records with non null values of all the variables
|
|
202
243
|
df = df.drop_duplicates(optional_dims + dimensions)
|
|
203
244
|
df = df.set_index(optional_dims + dimensions)
|
|
204
|
-
|
|
205
245
|
da = df.to_xarray()[variables]
|
|
206
246
|
da = _squeeze_dimensions(da, dimensions=optional_dims)
|
|
207
247
|
return da
|
|
@@ -217,7 +257,7 @@ def hydrostatics_dataset(bodies: Sequence[FloatingBody]) -> xr.Dataset:
|
|
|
217
257
|
if len(bodies_properties) > 0:
|
|
218
258
|
bodies_properties = xr.concat(bodies_properties.values(), pd.Index(bodies_properties.keys(), name='body_name'))
|
|
219
259
|
bodies_properties = _squeeze_dimensions(bodies_properties, dimensions=['body_name'])
|
|
220
|
-
dataset = xr.merge([dataset, {body_property: bodies_properties}])
|
|
260
|
+
dataset = xr.merge([dataset, {body_property: bodies_properties}], compat="no_conflicts", join="outer")
|
|
221
261
|
return dataset
|
|
222
262
|
|
|
223
263
|
|
|
@@ -231,6 +271,7 @@ def kochin_data_array(results: Sequence[LinearPotentialFlowResult],
|
|
|
231
271
|
:meth:`~capytaine.post_pro.kochin.compute_kochin`
|
|
232
272
|
The present function is just a wrapper around :code:`compute_kochin`.
|
|
233
273
|
"""
|
|
274
|
+
# TODO: this not very good to mix computation and data manipulation here...
|
|
234
275
|
records = pd.DataFrame([
|
|
235
276
|
dict(**result.problem._asdict(), theta=theta, kochin=kochin, kind=result.__class__.__name__)
|
|
236
277
|
for result in results
|
|
@@ -260,26 +301,61 @@ def kochin_data_array(results: Sequence[LinearPotentialFlowResult],
|
|
|
260
301
|
|
|
261
302
|
return kochin_data
|
|
262
303
|
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
304
|
+
VARIABLES_ATTRIBUTES = {
|
|
305
|
+
"omega": {
|
|
306
|
+
'long_name': 'Angular frequency',
|
|
307
|
+
'units': 'rad/s',
|
|
308
|
+
},
|
|
309
|
+
"freq": {
|
|
310
|
+
'long_name': 'Frequency',
|
|
311
|
+
'units': 'Hz',
|
|
312
|
+
},
|
|
313
|
+
"period": {
|
|
314
|
+
'long_name': 'Period',
|
|
315
|
+
'units': 's',
|
|
316
|
+
},
|
|
317
|
+
"wavenumber": {
|
|
318
|
+
'long_name': "Angular wavenumber",
|
|
319
|
+
'units': 'rad/m',
|
|
320
|
+
},
|
|
321
|
+
"wavelength": {
|
|
322
|
+
'long_name': "Wave length",
|
|
323
|
+
'units': 'm',
|
|
324
|
+
},
|
|
325
|
+
"encounter_omega": {
|
|
326
|
+
'long_name': "Encounter angular frequency",
|
|
327
|
+
'units': 'rad/s',
|
|
328
|
+
},
|
|
329
|
+
"encounter_wave_direction": {
|
|
330
|
+
'long_name': "Encounter wave direction",
|
|
331
|
+
'units': 'rad',
|
|
332
|
+
},
|
|
333
|
+
"wave_direction": {
|
|
334
|
+
'long_name': "Wave direction",
|
|
335
|
+
'units': "rad"
|
|
336
|
+
},
|
|
337
|
+
"radiating_dof": {
|
|
338
|
+
'long_name': 'Radiating DOF',
|
|
339
|
+
},
|
|
340
|
+
"influenced_dof": {
|
|
341
|
+
'long_name': 'Influenced DOF',
|
|
342
|
+
},
|
|
343
|
+
"added_mass": {
|
|
344
|
+
'long_name': 'Added mass',
|
|
345
|
+
},
|
|
346
|
+
"radiation_damping": {
|
|
347
|
+
'long_name': 'Radiation damping',
|
|
348
|
+
},
|
|
349
|
+
"diffraction_force": {
|
|
350
|
+
'long_name': "Diffraction force",
|
|
351
|
+
},
|
|
352
|
+
"Froude_Krylov_force": {
|
|
353
|
+
'long_name': "Froude Krylov force",
|
|
354
|
+
},
|
|
355
|
+
}
|
|
280
356
|
|
|
281
357
|
def assemble_dataset(results,
|
|
282
|
-
omega=True, wavenumber=True, wavelength=True, period=True,
|
|
358
|
+
omega=True, freq=True, wavenumber=True, wavelength=True, period=True,
|
|
283
359
|
mesh=False, hydrostatics=True, attrs=None) -> xr.Dataset:
|
|
284
360
|
"""Transform a list of :class:`LinearPotentialFlowResult` into a :class:`xarray.Dataset`.
|
|
285
361
|
|
|
@@ -289,10 +365,12 @@ def assemble_dataset(results,
|
|
|
289
365
|
|
|
290
366
|
Parameters
|
|
291
367
|
----------
|
|
292
|
-
results: list of LinearPotentialFlowResult
|
|
368
|
+
results: list of LinearPotentialFlowResult or BEMIO dataset
|
|
293
369
|
The results that will be read.
|
|
294
370
|
omega: bool, optional
|
|
295
371
|
If True, the coordinate 'omega' will be added to the output dataset.
|
|
372
|
+
freq: bool, optional
|
|
373
|
+
If True, the coordinate 'freq' will be added to the output dataset.
|
|
296
374
|
wavenumber: bool, optional
|
|
297
375
|
If True, the coordinate 'wavenumber' will be added to the output dataset.
|
|
298
376
|
wavelength: bool, optional
|
|
@@ -306,78 +384,48 @@ def assemble_dataset(results,
|
|
|
306
384
|
attrs: dict, optional
|
|
307
385
|
Attributes that should be added to the output dataset.
|
|
308
386
|
"""
|
|
309
|
-
|
|
387
|
+
bemio_import = _detect_bemio_results(results, calling_function="assemble_dataset")
|
|
310
388
|
|
|
311
|
-
|
|
312
|
-
if hasattr(results, '__iter__'):
|
|
313
|
-
try:
|
|
314
|
-
if 'capytaine' in results[0].__module__:
|
|
315
|
-
bemio_import = False
|
|
316
|
-
else:
|
|
317
|
-
raise TypeError(error_msg)
|
|
318
|
-
except:
|
|
319
|
-
raise TypeError(error_msg)
|
|
320
|
-
|
|
321
|
-
else:
|
|
322
|
-
try:
|
|
323
|
-
if 'bemio.io' in results.__module__:
|
|
324
|
-
bemio_import = True
|
|
325
|
-
else:
|
|
326
|
-
raise TypeError(error_msg)
|
|
327
|
-
except:
|
|
328
|
-
raise TypeError(error_msg)
|
|
389
|
+
records = assemble_dataframe(results)
|
|
329
390
|
|
|
330
391
|
if bemio_import:
|
|
331
|
-
records = dataframe_from_bemio(results, wavenumber, wavelength) # TODO add hydrostatics
|
|
332
|
-
all_dofs_in_order = {'Surge': None, 'Sway': None, 'Heave': None, 'Roll': None, 'Pitch': None, 'Yaw': None}
|
|
333
392
|
main_freq_type = "omega"
|
|
334
|
-
|
|
335
393
|
else:
|
|
336
|
-
records = pd.DataFrame(collect_records(results))
|
|
337
|
-
all_dofs_in_order = {k: None for r in results for k in r.body.dofs.keys()}
|
|
338
394
|
main_freq_type = Counter((res.provided_freq_type for res in results)).most_common(1)[0][0]
|
|
339
395
|
|
|
396
|
+
if np.any(records["free_surface"] != 0.0):
|
|
397
|
+
LOG.warning("Datasets only support cases with a free surface (free_surface=0.0).\n"
|
|
398
|
+
"Cases without a free surface (free_surface=inf) are ignored.\n"
|
|
399
|
+
"See also https://github.com/mancellin/capytaine/issues/88")
|
|
400
|
+
records = records[records["free_surface"] == 0.0]
|
|
401
|
+
|
|
340
402
|
if attrs is None:
|
|
341
403
|
attrs = {}
|
|
342
404
|
attrs['creation_of_dataset'] = datetime.now().isoformat()
|
|
343
405
|
|
|
344
|
-
|
|
345
|
-
raise ValueError("No result passed to assemble_dataset.")
|
|
346
|
-
|
|
347
|
-
inf_dof_cat = pd.CategoricalDtype(categories=all_dofs_in_order.keys())
|
|
348
|
-
records["influenced_dof"] = records["influenced_dof"].astype(inf_dof_cat)
|
|
349
|
-
rad_dof_cat = pd.CategoricalDtype(categories=all_dofs_in_order.keys())
|
|
350
|
-
if 'added_mass' in records.columns:
|
|
351
|
-
records["radiating_dof"] = records["radiating_dof"].astype(rad_dof_cat)
|
|
406
|
+
kinds_of_results = set(records['kind'])
|
|
352
407
|
|
|
353
408
|
optional_dims = ['g', 'rho', 'body_name', 'water_depth', 'forward_speed']
|
|
354
409
|
|
|
410
|
+
dataset = xr.Dataset()
|
|
411
|
+
|
|
355
412
|
# RADIATION RESULTS
|
|
356
|
-
if
|
|
413
|
+
if "RadiationResult" in kinds_of_results:
|
|
357
414
|
radiation_cases = _dataset_from_dataframe(
|
|
358
|
-
records,
|
|
415
|
+
records[records['kind'] == "RadiationResult"],
|
|
359
416
|
variables=['added_mass', 'radiation_damping'],
|
|
360
417
|
dimensions=[main_freq_type, 'radiating_dof', 'influenced_dof'],
|
|
361
418
|
optional_dims=optional_dims + ['wave_direction'])
|
|
362
|
-
|
|
363
|
-
radiation_cases.radiation_damping.attrs['long_name'] = 'Radiation damping'
|
|
364
|
-
radiation_cases.radiating_dof.attrs['long_name'] = 'Radiating DOF'
|
|
365
|
-
radiation_cases.influenced_dof.attrs['long_name'] = 'Influenced DOF'
|
|
366
|
-
dataset = xr.merge([dataset, radiation_cases])
|
|
419
|
+
dataset = xr.merge([dataset, radiation_cases], compat="no_conflicts", join="outer")
|
|
367
420
|
|
|
368
421
|
# DIFFRACTION RESULTS
|
|
369
|
-
if
|
|
422
|
+
if "DiffractionResult" in kinds_of_results:
|
|
370
423
|
diffraction_cases = _dataset_from_dataframe(
|
|
371
|
-
records,
|
|
424
|
+
records[records['kind'] == "DiffractionResult"],
|
|
372
425
|
variables=['diffraction_force', 'Froude_Krylov_force'],
|
|
373
426
|
dimensions=[main_freq_type, 'wave_direction', 'influenced_dof'],
|
|
374
427
|
optional_dims=optional_dims)
|
|
375
|
-
|
|
376
|
-
diffraction_cases.Froude_Krylov_force.attrs['long_name'] = 'Froude Krylov force'
|
|
377
|
-
diffraction_cases.influenced_dof.attrs['long_name'] = 'Influenced DOF'
|
|
378
|
-
diffraction_cases.wave_direction.attrs['long_name'] = 'Wave direction'
|
|
379
|
-
diffraction_cases.wave_direction.attrs['units'] = 'rad'
|
|
380
|
-
dataset = xr.merge([dataset, diffraction_cases])
|
|
428
|
+
dataset = xr.merge([dataset, diffraction_cases], compat="no_conflicts", join="outer")
|
|
381
429
|
dataset['excitation_force'] = dataset['Froude_Krylov_force'] + dataset['diffraction_force']
|
|
382
430
|
|
|
383
431
|
# OTHER FREQUENCIES TYPES
|
|
@@ -389,8 +437,15 @@ def assemble_dataset(results,
|
|
|
389
437
|
optional_dims=['g', 'water_depth'] if main_freq_type in {'wavelength', 'wavenumber'} else []
|
|
390
438
|
)
|
|
391
439
|
dataset.coords['omega'] = omega_ds['omega']
|
|
392
|
-
|
|
393
|
-
|
|
440
|
+
|
|
441
|
+
if freq and main_freq_type != "freq":
|
|
442
|
+
freq_ds = _dataset_from_dataframe(
|
|
443
|
+
records,
|
|
444
|
+
variables=['freq'],
|
|
445
|
+
dimensions=[main_freq_type],
|
|
446
|
+
optional_dims=['g', 'water_depth'] if main_freq_type in {'wavelength', 'wavenumber'} else []
|
|
447
|
+
)
|
|
448
|
+
dataset.coords['freq'] = freq_ds['freq']
|
|
394
449
|
|
|
395
450
|
if period and main_freq_type != "period":
|
|
396
451
|
period_ds = _dataset_from_dataframe(
|
|
@@ -400,8 +455,6 @@ def assemble_dataset(results,
|
|
|
400
455
|
optional_dims=['g', 'water_depth'] if main_freq_type in {'wavelength', 'wavenumber'} else []
|
|
401
456
|
)
|
|
402
457
|
dataset.coords['period'] = period_ds['period']
|
|
403
|
-
dataset.period.attrs['long_name'] = 'Period'
|
|
404
|
-
dataset.period.attrs['units'] = 's'
|
|
405
458
|
|
|
406
459
|
if wavenumber and main_freq_type != "wavenumber":
|
|
407
460
|
wavenumber_ds = _dataset_from_dataframe(
|
|
@@ -411,8 +464,6 @@ def assemble_dataset(results,
|
|
|
411
464
|
optional_dims=['g', 'water_depth'] if main_freq_type in {'period', 'omega'} else []
|
|
412
465
|
)
|
|
413
466
|
dataset.coords['wavenumber'] = wavenumber_ds['wavenumber']
|
|
414
|
-
dataset.wavenumber.attrs['long_name'] = 'Angular wavenumber'
|
|
415
|
-
dataset.wavenumber.attrs['units'] = 'rad/m'
|
|
416
467
|
|
|
417
468
|
if wavelength and main_freq_type != "wavelength":
|
|
418
469
|
wavelength_ds = _dataset_from_dataframe(
|
|
@@ -422,8 +473,6 @@ def assemble_dataset(results,
|
|
|
422
473
|
optional_dims=['g', 'water_depth'] if main_freq_type in {'period', 'omega'} else []
|
|
423
474
|
)
|
|
424
475
|
dataset.coords['wavelength'] = wavelength_ds['wavelength']
|
|
425
|
-
dataset.wavelength.attrs['long_name'] = 'Wave length'
|
|
426
|
-
dataset.wavelength.attrs['units'] = 'm'
|
|
427
476
|
|
|
428
477
|
if not all(records["forward_speed"] == 0.0):
|
|
429
478
|
omegae_ds = _dataset_from_dataframe(
|
|
@@ -433,8 +482,6 @@ def assemble_dataset(results,
|
|
|
433
482
|
optional_dims=['g', 'water_depth'],
|
|
434
483
|
)
|
|
435
484
|
dataset.coords['encounter_omega'] = omegae_ds['encounter_omega']
|
|
436
|
-
dataset.encounter_omega.attrs['long_name'] = 'Encounter angular frequency'
|
|
437
|
-
dataset.encounter_omega.attrs['units'] = 'rad/s'
|
|
438
485
|
|
|
439
486
|
encounter_wave_direction_ds = _dataset_from_dataframe(
|
|
440
487
|
records,
|
|
@@ -443,8 +490,6 @@ def assemble_dataset(results,
|
|
|
443
490
|
optional_dims=[],
|
|
444
491
|
)
|
|
445
492
|
dataset.coords['encounter_wave_direction'] = encounter_wave_direction_ds['encounter_wave_direction']
|
|
446
|
-
dataset.encounter_wave_direction.attrs['long_name'] = 'Encounter wave direction'
|
|
447
|
-
dataset.encounter_wave_direction.attrs['units'] = 'rad'
|
|
448
493
|
|
|
449
494
|
if mesh:
|
|
450
495
|
if bemio_import:
|
|
@@ -474,13 +519,54 @@ def assemble_dataset(results,
|
|
|
474
519
|
LOG.warning('Bemio data import being used, hydrostatics=True is ignored.')
|
|
475
520
|
else:
|
|
476
521
|
bodies = list({result.body for result in results})
|
|
477
|
-
dataset = xr.merge([dataset, hydrostatics_dataset(bodies)])
|
|
522
|
+
dataset = xr.merge([dataset, hydrostatics_dataset(bodies)], compat="no_conflicts", join="outer")
|
|
523
|
+
|
|
524
|
+
for var in set(dataset) | set(dataset.coords):
|
|
525
|
+
if var in VARIABLES_ATTRIBUTES:
|
|
526
|
+
dataset[var].attrs.update(VARIABLES_ATTRIBUTES[var])
|
|
478
527
|
|
|
479
528
|
dataset.attrs.update(attrs)
|
|
480
529
|
dataset.attrs['capytaine_version'] = __version__
|
|
481
530
|
return dataset
|
|
482
531
|
|
|
483
532
|
|
|
533
|
+
def assemble_matrices(results):
|
|
534
|
+
"""Simplified version of assemble_dataset, returning only bare matrices.
|
|
535
|
+
Meant mainly for teaching without introducing Xarray to beginers.
|
|
536
|
+
|
|
537
|
+
Parameters
|
|
538
|
+
----------
|
|
539
|
+
results: list of LinearPotentialFlowResult
|
|
540
|
+
The results that will be read.
|
|
541
|
+
|
|
542
|
+
Returns
|
|
543
|
+
-------
|
|
544
|
+
3-ple of (np.arrays or None)
|
|
545
|
+
The added mass matrix, the radiation damping matrix and the excitation force.
|
|
546
|
+
If the data are no available in the results, returns None instead.
|
|
547
|
+
"""
|
|
548
|
+
|
|
549
|
+
ds = assemble_dataset(results)
|
|
550
|
+
|
|
551
|
+
if "added_mass" in ds:
|
|
552
|
+
A = np.atleast_2d(ds.added_mass.values.squeeze())
|
|
553
|
+
else:
|
|
554
|
+
A = None
|
|
555
|
+
|
|
556
|
+
if "radiation_damping" in ds:
|
|
557
|
+
B = np.atleast_2d(ds.radiation_damping.values.squeeze())
|
|
558
|
+
else:
|
|
559
|
+
B = None
|
|
560
|
+
|
|
561
|
+
if "excitation_force" in ds:
|
|
562
|
+
F = np.atleast_1d(ds.excitation_force.values.squeeze())
|
|
563
|
+
else:
|
|
564
|
+
F = None
|
|
565
|
+
|
|
566
|
+
return A, B, F
|
|
567
|
+
|
|
568
|
+
|
|
569
|
+
|
|
484
570
|
################################
|
|
485
571
|
# Handling of complex values #
|
|
486
572
|
################################
|
|
@@ -522,3 +608,61 @@ def merge_complex_values(ds: xr.Dataset) -> xr.Dataset:
|
|
|
522
608
|
ds[variable] = new_da
|
|
523
609
|
ds = ds.drop_vars('complex')
|
|
524
610
|
return ds
|
|
611
|
+
|
|
612
|
+
|
|
613
|
+
##################
|
|
614
|
+
# Save dataset #
|
|
615
|
+
##################
|
|
616
|
+
|
|
617
|
+
def save_dataset_as_netcdf(filename, dataset):
|
|
618
|
+
"""Save `dataset` as a NetCDF file with name (or path) `filename`"""
|
|
619
|
+
ds = separate_complex_values(dataset)
|
|
620
|
+
|
|
621
|
+
# Workaround https://github.com/capytaine/capytaine/issues/683
|
|
622
|
+
ds['radiating_dof'] = ds['radiating_dof'].astype('str')
|
|
623
|
+
ds['influenced_dof'] = ds['influenced_dof'].astype('str')
|
|
624
|
+
|
|
625
|
+
# Make sure all strings are exported as strings and not Python objects
|
|
626
|
+
encoding = {'radiating_dof': {'dtype': 'U'},
|
|
627
|
+
'influenced_dof': {'dtype': 'U'}}
|
|
628
|
+
|
|
629
|
+
ds.to_netcdf(filename, encoding=encoding)
|
|
630
|
+
|
|
631
|
+
|
|
632
|
+
def export_dataset(filename, dataset, format=None, **kwargs):
|
|
633
|
+
"""Save `dataset` into a format, provided by the `format` argument or inferred by the `filename`.
|
|
634
|
+
|
|
635
|
+
Parameters
|
|
636
|
+
----------
|
|
637
|
+
filename: str or Path
|
|
638
|
+
Where to store the data
|
|
639
|
+
dataset: xarray.Dataset
|
|
640
|
+
Dataset, which is assumed to have been computed by Capytaine
|
|
641
|
+
format: str, optional
|
|
642
|
+
Format of output. Accepted values: "netcdf"
|
|
643
|
+
**kwargs: optional
|
|
644
|
+
Remaining argument are passed to the specific export function,
|
|
645
|
+
such as ``save_dataset_as_netcdf``, ``export_to_wamit`` or ``write_dataset_as_tecplot_files``.
|
|
646
|
+
|
|
647
|
+
Returns
|
|
648
|
+
-------
|
|
649
|
+
None
|
|
650
|
+
"""
|
|
651
|
+
if (
|
|
652
|
+
(format is not None and format.lower() == "netcdf") or
|
|
653
|
+
(format is None and str(filename).endswith(".nc"))
|
|
654
|
+
):
|
|
655
|
+
save_dataset_as_netcdf(filename, dataset, **kwargs)
|
|
656
|
+
elif (
|
|
657
|
+
(format is not None and format.lower() == "wamit")
|
|
658
|
+
):
|
|
659
|
+
from capytaine.io.wamit import export_to_wamit
|
|
660
|
+
export_to_wamit(dataset, filename, **kwargs)
|
|
661
|
+
elif (
|
|
662
|
+
(format is not None and format.lower() == "nemoh")
|
|
663
|
+
):
|
|
664
|
+
from capytaine.io.legacy import write_dataset_as_tecplot_files
|
|
665
|
+
write_dataset_as_tecplot_files(filename, dataset, **kwargs)
|
|
666
|
+
else:
|
|
667
|
+
raise ValueError("`export_dataset` could not infer export format based on filename or `format` argument.\n"
|
|
668
|
+
f"provided filename: {filename}\nprovided format: {format}")
|
|
@@ -127,7 +127,7 @@ def solve_gmres(A, b):
|
|
|
127
127
|
|
|
128
128
|
if LOG.isEnabledFor(logging.INFO):
|
|
129
129
|
counter = Counter()
|
|
130
|
-
x, info = ssl.gmres(A, b, atol=1e-6, callback=counter)
|
|
130
|
+
x, info = ssl.gmres(A, b, atol=1e-6, callback=counter, callback_type="pr_norm")
|
|
131
131
|
LOG.info(f"End of GMRES after {counter.nb_iter} iterations.")
|
|
132
132
|
|
|
133
133
|
else:
|
capytaine/meshes/clipper.py
CHANGED
capytaine/meshes/collections.py
CHANGED
|
@@ -223,6 +223,20 @@ class CollectionOfMeshes(ClippableMixin, SurfaceIntegralsMixin, Abstract3DObject
|
|
|
223
223
|
# Transformation #
|
|
224
224
|
##################
|
|
225
225
|
|
|
226
|
+
def join_meshes(*meshes, name=None, return_masks=False):
|
|
227
|
+
coll = CollectionOfMeshes(meshes, name=name)
|
|
228
|
+
if return_masks:
|
|
229
|
+
masks = []
|
|
230
|
+
for i_mesh in range(len(meshes)):
|
|
231
|
+
mask = np.full((coll.nb_faces,), False)
|
|
232
|
+
mask[coll.indices_of_mesh(i_mesh)] = True
|
|
233
|
+
masks.append(mask)
|
|
234
|
+
return coll, masks
|
|
235
|
+
return coll
|
|
236
|
+
|
|
237
|
+
def __add__(self, mesh_to_add):
|
|
238
|
+
return self.join_meshes(mesh_to_add)
|
|
239
|
+
|
|
226
240
|
def merged(self, name=None) -> Mesh:
|
|
227
241
|
"""Merge the sub-meshes and return a full mesh.
|
|
228
242
|
If the collection contains other collections, they are merged recursively.
|
|
@@ -286,7 +300,11 @@ class CollectionOfMeshes(ClippableMixin, SurfaceIntegralsMixin, Abstract3DObject
|
|
|
286
300
|
@inplace_transformation
|
|
287
301
|
def prune_empty_meshes(self):
|
|
288
302
|
"""Remove empty meshes from the collection."""
|
|
289
|
-
|
|
303
|
+
remaining_meshes = tuple(mesh for mesh in self if mesh.nb_faces > 0 and mesh.nb_vertices > 0)
|
|
304
|
+
if len(remaining_meshes) == 0:
|
|
305
|
+
self._meshes = (Mesh(name="empty_mesh"),)
|
|
306
|
+
else:
|
|
307
|
+
self._meshes = remaining_meshes
|
|
290
308
|
|
|
291
309
|
@property
|
|
292
310
|
def axis_aligned_bbox(self):
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from typing import Tuple, Protocol, runtime_checkable
|
|
2
|
+
from numpy.typing import ArrayLike
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@runtime_checkable
|
|
6
|
+
class MeshLike(Protocol):
|
|
7
|
+
"""Minimal API that a class describing a mesh should implement to be
|
|
8
|
+
usable with the rest of Capytaine.
|
|
9
|
+
|
|
10
|
+
The goal is two-fold:
|
|
11
|
+
1. Use at runtime to identify a mesh for functions that behaves
|
|
12
|
+
differently depending on the type of the input (e.g. Delhommeau().evaluate).
|
|
13
|
+
2. Use as documentation for third-party mesh implementation.
|
|
14
|
+
|
|
15
|
+
In the future, it could also be used for static typing.
|
|
16
|
+
"""
|
|
17
|
+
vertices: ArrayLike
|
|
18
|
+
faces: ArrayLike
|
|
19
|
+
nb_vertices: int
|
|
20
|
+
nb_faces: int
|
|
21
|
+
faces_centers: ArrayLike
|
|
22
|
+
faces_normals: ArrayLike
|
|
23
|
+
faces_areas: ArrayLike
|
|
24
|
+
faces_radiuses: ArrayLike
|
|
25
|
+
quadrature_points: Tuple[ArrayLike, ArrayLike]
|
|
26
|
+
|
|
27
|
+
def __short_str__(self) -> str:
|
|
28
|
+
...
|
|
29
|
+
|
|
30
|
+
def extract_faces(self, faces_id):
|
|
31
|
+
...
|
|
32
|
+
|
|
33
|
+
def join_meshes(*meshes, return_mask):
|
|
34
|
+
...
|
|
35
|
+
|
|
36
|
+
def with_normal_vector_going_down(self, **kwargs):
|
|
37
|
+
...
|