capytaine 2.3.1__cp314-cp314t-macosx_14_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- capytaine/.dylibs/libgcc_s.1.1.dylib +0 -0
- capytaine/.dylibs/libgfortran.5.dylib +0 -0
- capytaine/.dylibs/libquadmath.0.dylib +0 -0
- capytaine/__about__.py +16 -0
- capytaine/__init__.py +36 -0
- capytaine/bem/__init__.py +0 -0
- capytaine/bem/airy_waves.py +111 -0
- capytaine/bem/engines.py +441 -0
- capytaine/bem/problems_and_results.py +600 -0
- capytaine/bem/solver.py +594 -0
- capytaine/bodies/__init__.py +4 -0
- capytaine/bodies/bodies.py +1221 -0
- capytaine/bodies/dofs.py +19 -0
- capytaine/bodies/predefined/__init__.py +6 -0
- capytaine/bodies/predefined/cylinders.py +151 -0
- capytaine/bodies/predefined/rectangles.py +111 -0
- capytaine/bodies/predefined/spheres.py +70 -0
- capytaine/green_functions/FinGreen3D/.gitignore +1 -0
- capytaine/green_functions/FinGreen3D/FinGreen3D.f90 +3589 -0
- capytaine/green_functions/FinGreen3D/LICENSE +165 -0
- capytaine/green_functions/FinGreen3D/Makefile +16 -0
- capytaine/green_functions/FinGreen3D/README.md +24 -0
- capytaine/green_functions/FinGreen3D/test_program.f90 +39 -0
- capytaine/green_functions/LiangWuNoblesse/.gitignore +1 -0
- capytaine/green_functions/LiangWuNoblesse/LICENSE +504 -0
- capytaine/green_functions/LiangWuNoblesse/LiangWuNoblesseWaveTerm.f90 +751 -0
- capytaine/green_functions/LiangWuNoblesse/Makefile +16 -0
- capytaine/green_functions/LiangWuNoblesse/README.md +2 -0
- capytaine/green_functions/LiangWuNoblesse/test_program.f90 +28 -0
- capytaine/green_functions/__init__.py +2 -0
- capytaine/green_functions/abstract_green_function.py +64 -0
- capytaine/green_functions/delhommeau.py +507 -0
- capytaine/green_functions/hams.py +204 -0
- capytaine/green_functions/libs/Delhommeau_float32.cpython-314t-darwin.so +0 -0
- capytaine/green_functions/libs/Delhommeau_float64.cpython-314t-darwin.so +0 -0
- capytaine/green_functions/libs/__init__.py +0 -0
- capytaine/io/__init__.py +0 -0
- capytaine/io/bemio.py +153 -0
- capytaine/io/legacy.py +328 -0
- capytaine/io/mesh_loaders.py +1086 -0
- capytaine/io/mesh_writers.py +692 -0
- capytaine/io/meshio.py +38 -0
- capytaine/io/wamit.py +479 -0
- capytaine/io/xarray.py +668 -0
- capytaine/matrices/__init__.py +16 -0
- capytaine/matrices/block.py +592 -0
- capytaine/matrices/block_toeplitz.py +325 -0
- capytaine/matrices/builders.py +89 -0
- capytaine/matrices/linear_solvers.py +232 -0
- capytaine/matrices/low_rank.py +395 -0
- capytaine/meshes/__init__.py +6 -0
- capytaine/meshes/clipper.py +465 -0
- capytaine/meshes/collections.py +342 -0
- capytaine/meshes/geometry.py +409 -0
- capytaine/meshes/mesh_like_protocol.py +37 -0
- capytaine/meshes/meshes.py +890 -0
- capytaine/meshes/predefined/__init__.py +6 -0
- capytaine/meshes/predefined/cylinders.py +314 -0
- capytaine/meshes/predefined/rectangles.py +261 -0
- capytaine/meshes/predefined/spheres.py +62 -0
- capytaine/meshes/properties.py +276 -0
- capytaine/meshes/quadratures.py +80 -0
- capytaine/meshes/quality.py +448 -0
- capytaine/meshes/surface_integrals.py +63 -0
- capytaine/meshes/symmetric.py +462 -0
- capytaine/post_pro/__init__.py +6 -0
- capytaine/post_pro/free_surfaces.py +88 -0
- capytaine/post_pro/impedance.py +92 -0
- capytaine/post_pro/kochin.py +54 -0
- capytaine/post_pro/rao.py +60 -0
- capytaine/tools/__init__.py +0 -0
- capytaine/tools/cache_on_disk.py +26 -0
- capytaine/tools/deprecation_handling.py +18 -0
- capytaine/tools/lists_of_points.py +52 -0
- capytaine/tools/lru_cache.py +49 -0
- capytaine/tools/optional_imports.py +27 -0
- capytaine/tools/prony_decomposition.py +150 -0
- capytaine/tools/symbolic_multiplication.py +149 -0
- capytaine/tools/timer.py +66 -0
- capytaine/ui/__init__.py +0 -0
- capytaine/ui/cli.py +28 -0
- capytaine/ui/rich.py +5 -0
- capytaine/ui/vtk/__init__.py +3 -0
- capytaine/ui/vtk/animation.py +329 -0
- capytaine/ui/vtk/body_viewer.py +28 -0
- capytaine/ui/vtk/helpers.py +82 -0
- capytaine/ui/vtk/mesh_viewer.py +461 -0
- capytaine-2.3.1.dist-info/LICENSE +674 -0
- capytaine-2.3.1.dist-info/METADATA +750 -0
- capytaine-2.3.1.dist-info/RECORD +92 -0
- capytaine-2.3.1.dist-info/WHEEL +6 -0
- capytaine-2.3.1.dist-info/entry_points.txt +3 -0
capytaine/io/xarray.py
ADDED
|
@@ -0,0 +1,668 @@
|
|
|
1
|
+
"""Tools to use xarray Datasets as inputs and outputs.
|
|
2
|
+
|
|
3
|
+
.. todo:: This module could be tidied up a bit and some methods merged or
|
|
4
|
+
uniformized.
|
|
5
|
+
"""
|
|
6
|
+
# Copyright (C) 2017-2025 Matthieu Ancellin
|
|
7
|
+
# See LICENSE file at <https://github.com/capytaine/capytaine>
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from itertools import product
|
|
12
|
+
from collections import Counter
|
|
13
|
+
from typing import Sequence, List, Union
|
|
14
|
+
|
|
15
|
+
import numpy as np
|
|
16
|
+
import pandas as pd
|
|
17
|
+
import xarray as xr
|
|
18
|
+
|
|
19
|
+
from capytaine import __version__
|
|
20
|
+
from capytaine.bodies.bodies import FloatingBody
|
|
21
|
+
from capytaine.bem.problems_and_results import (
|
|
22
|
+
LinearPotentialFlowProblem, DiffractionProblem, RadiationProblem,
|
|
23
|
+
LinearPotentialFlowResult, _default_parameters)
|
|
24
|
+
from capytaine.post_pro.kochin import compute_kochin
|
|
25
|
+
from capytaine.io.bemio import dataframe_from_bemio
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
LOG = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
#########################
|
|
32
|
+
# Reading test matrix #
|
|
33
|
+
#########################
|
|
34
|
+
|
|
35
|
+
def _unsqueeze_dimensions(data_array, dimensions=None):
|
|
36
|
+
"""Add scalar coordinates as dimensions of size 1."""
|
|
37
|
+
if dimensions is None:
|
|
38
|
+
dimensions = list(data_array.coords.keys())
|
|
39
|
+
for dim in dimensions:
|
|
40
|
+
if len(data_array.coords[dim].values.shape) == 0:
|
|
41
|
+
data_array = xr.concat([data_array], dim=dim)
|
|
42
|
+
return data_array
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def problems_from_dataset(dataset: xr.Dataset,
|
|
46
|
+
bodies: Union[FloatingBody, Sequence[FloatingBody]],
|
|
47
|
+
) -> List[LinearPotentialFlowProblem]:
|
|
48
|
+
"""Generate a list of problems from a test matrix.
|
|
49
|
+
|
|
50
|
+
Parameters
|
|
51
|
+
----------
|
|
52
|
+
dataset : xarray Dataset
|
|
53
|
+
Test matrix containing the problems parameters.
|
|
54
|
+
bodies : FloatingBody or list of FloatingBody
|
|
55
|
+
The bodies on which the computations of the test matrix will be applied.
|
|
56
|
+
They should all have different names.
|
|
57
|
+
|
|
58
|
+
Returns
|
|
59
|
+
-------
|
|
60
|
+
list of LinearPotentialFlowProblem
|
|
61
|
+
|
|
62
|
+
Raises
|
|
63
|
+
------
|
|
64
|
+
ValueError
|
|
65
|
+
if required fields are missing in the dataset
|
|
66
|
+
"""
|
|
67
|
+
if isinstance(bodies, FloatingBody):
|
|
68
|
+
bodies = [bodies]
|
|
69
|
+
|
|
70
|
+
# Should be done before looking for `frequency_keys`, otherwise
|
|
71
|
+
# frequencies provided as a scalar dimension will be skipped.
|
|
72
|
+
dataset = _unsqueeze_dimensions(dataset)
|
|
73
|
+
|
|
74
|
+
# SANITY CHECKS
|
|
75
|
+
assert len(list(set(body.name for body in bodies))) == len(bodies), \
|
|
76
|
+
"All bodies should have different names."
|
|
77
|
+
|
|
78
|
+
# Warn user in case of key with unrecognized name (e.g. misspells)
|
|
79
|
+
keys_in_dataset = set(dataset.dims)
|
|
80
|
+
accepted_keys = {'wave_direction', 'radiating_dof', 'influenced_dof',
|
|
81
|
+
'body_name', 'omega', 'freq', 'period', 'wavelength', 'wavenumber',
|
|
82
|
+
'forward_speed', 'water_depth', 'rho', 'g', 'theta'}
|
|
83
|
+
unrecognized_keys = keys_in_dataset.difference(accepted_keys)
|
|
84
|
+
if len(unrecognized_keys) > 0:
|
|
85
|
+
LOG.warning(f"Unrecognized key(s) in dataset: {unrecognized_keys}")
|
|
86
|
+
|
|
87
|
+
if ("radiating_dof" not in keys_in_dataset) and ("wave_direction" not in keys_in_dataset):
|
|
88
|
+
raise ValueError("Neither 'radiating_dof' nor 'wave_direction' has been provided in the dataset. "
|
|
89
|
+
"No linear potential flow problem can be inferred.")
|
|
90
|
+
|
|
91
|
+
frequency_keys = keys_in_dataset & {'omega', 'freq', 'period', 'wavelength', 'wavenumber'}
|
|
92
|
+
if len(frequency_keys) > 1:
|
|
93
|
+
raise ValueError("Setting problems requires at most one of the following: omega (angular frequency) OR freq (in Hz) OR period OR wavenumber OR wavelength.\n"
|
|
94
|
+
"Received {}".format(frequency_keys))
|
|
95
|
+
# END SANITY CHECKS
|
|
96
|
+
|
|
97
|
+
if len(frequency_keys) == 0:
|
|
98
|
+
freq_type = "omega"
|
|
99
|
+
freq_range = [_default_parameters['omega']]
|
|
100
|
+
else: # len(frequency_keys) == 1
|
|
101
|
+
freq_type = list(frequency_keys)[0] # Get the only item
|
|
102
|
+
freq_range = dataset[freq_type].data
|
|
103
|
+
|
|
104
|
+
water_depth_range = dataset['water_depth'].data if 'water_depth' in dataset else [_default_parameters['water_depth']]
|
|
105
|
+
rho_range = dataset['rho'].data if 'rho' in dataset else [_default_parameters['rho']]
|
|
106
|
+
g_range = dataset['g'].data if 'g' in dataset else [_default_parameters['g']]
|
|
107
|
+
forward_speed_range = dataset['forward_speed'] if 'forward_speed' in dataset else [_default_parameters['forward_speed']]
|
|
108
|
+
|
|
109
|
+
wave_direction_range = dataset['wave_direction'].data if 'wave_direction' in dataset else None
|
|
110
|
+
radiating_dofs = dataset['radiating_dof'].data.astype(object) if 'radiating_dof' in dataset else None
|
|
111
|
+
# astype(object) is meant to convert Numpy internal string type numpy.str_ to Python general string type.
|
|
112
|
+
|
|
113
|
+
if 'body_name' in dataset:
|
|
114
|
+
assert set(dataset['body_name'].data) <= {body.name for body in bodies}, \
|
|
115
|
+
"Some body named in the dataset was not given as argument to `problems_from_dataset`."
|
|
116
|
+
body_range = {body.name: body for body in bodies if body.name in dataset['body_name'].data}
|
|
117
|
+
# Only the bodies listed in the dataset have been kept
|
|
118
|
+
else:
|
|
119
|
+
body_range = {body.name: body for body in bodies}
|
|
120
|
+
|
|
121
|
+
problems = []
|
|
122
|
+
if wave_direction_range is not None:
|
|
123
|
+
for freq, wave_direction, water_depth, body_name, forward_speed, rho, g \
|
|
124
|
+
in product(freq_range, wave_direction_range, water_depth_range, body_range,
|
|
125
|
+
forward_speed_range, rho_range, g_range):
|
|
126
|
+
problems.append(
|
|
127
|
+
DiffractionProblem(body=body_range[body_name], **{freq_type: freq},
|
|
128
|
+
wave_direction=wave_direction, water_depth=water_depth,
|
|
129
|
+
forward_speed=forward_speed, rho=rho, g=g)
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
if radiating_dofs is not None:
|
|
133
|
+
for freq, radiating_dof, water_depth, body_name, forward_speed, rho, g \
|
|
134
|
+
in product(freq_range, radiating_dofs, water_depth_range, body_range, forward_speed_range, rho_range, g_range):
|
|
135
|
+
if forward_speed == 0.0:
|
|
136
|
+
problems.append(
|
|
137
|
+
RadiationProblem(body=body_range[body_name], **{freq_type: freq},
|
|
138
|
+
radiating_dof=radiating_dof, water_depth=water_depth,
|
|
139
|
+
forward_speed=forward_speed, rho=rho, g=g)
|
|
140
|
+
)
|
|
141
|
+
else:
|
|
142
|
+
if wave_direction_range is None:
|
|
143
|
+
LOG.warning("Dataset contains non-zero forward speed (forward_speed=%.2f) but no wave_direction has been provided. Wave direction of 0 rad (x-axis) has been assumed.", forward_speed)
|
|
144
|
+
wave_direction_range = [0.0]
|
|
145
|
+
for wave_direction in wave_direction_range:
|
|
146
|
+
problems.append(
|
|
147
|
+
RadiationProblem(body=body_range[body_name], **{freq_type: freq},
|
|
148
|
+
radiating_dof=radiating_dof, water_depth=water_depth,
|
|
149
|
+
forward_speed=forward_speed, wave_direction=wave_direction,
|
|
150
|
+
rho=rho, g=g)
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
return sorted(problems)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
########################
|
|
157
|
+
# Dataframe creation #
|
|
158
|
+
########################
|
|
159
|
+
|
|
160
|
+
def _detect_bemio_results(results, calling_function="_detect_bemio_results"):
|
|
161
|
+
error_msg = (
|
|
162
|
+
f"The function {calling_function} expected either a non-empty list of LinearPotentialFlowResult or a bemio.io object.\n"
|
|
163
|
+
f"Instead, it received:\n{repr(results)}"
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
if hasattr(results, '__iter__'):
|
|
167
|
+
if len(results) == 0:
|
|
168
|
+
raise ValueError("Iterable provided to `assemble_dataset` is empty.")
|
|
169
|
+
try:
|
|
170
|
+
if 'capytaine' in results[0].__module__:
|
|
171
|
+
bemio_import = False
|
|
172
|
+
else:
|
|
173
|
+
raise TypeError(error_msg)
|
|
174
|
+
except:
|
|
175
|
+
raise TypeError(error_msg)
|
|
176
|
+
|
|
177
|
+
else:
|
|
178
|
+
try:
|
|
179
|
+
if 'bemio.io' in results.__module__:
|
|
180
|
+
bemio_import = True
|
|
181
|
+
else:
|
|
182
|
+
raise TypeError(error_msg)
|
|
183
|
+
except:
|
|
184
|
+
raise TypeError(error_msg)
|
|
185
|
+
|
|
186
|
+
return bemio_import
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def assemble_dataframe(results, wavenumber=True, wavelength=True):
|
|
190
|
+
if _detect_bemio_results(results, calling_function="assemble_dataframe"):
|
|
191
|
+
return dataframe_from_bemio(results, wavenumber, wavelength) # TODO add hydrostatics
|
|
192
|
+
|
|
193
|
+
records_list = [record for result in results for record in result.records]
|
|
194
|
+
df = pd.DataFrame(records_list)
|
|
195
|
+
|
|
196
|
+
all_dofs_in_order = list({k: None for r in results for k in r.body.dofs.keys()})
|
|
197
|
+
# Using a dict above to remove duplicates while conserving ordering
|
|
198
|
+
inf_dof_cat = pd.CategoricalDtype(categories=all_dofs_in_order)
|
|
199
|
+
df["influenced_dof"] = df["influenced_dof"].astype(inf_dof_cat)
|
|
200
|
+
if 'added_mass' in df.columns:
|
|
201
|
+
rad_dof_cat = pd.CategoricalDtype(categories=all_dofs_in_order)
|
|
202
|
+
df["radiating_dof"] = df["radiating_dof"].astype(rad_dof_cat)
|
|
203
|
+
|
|
204
|
+
return df
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
######################
|
|
208
|
+
# Dataset creation #
|
|
209
|
+
######################
|
|
210
|
+
|
|
211
|
+
def _squeeze_dimensions(data_array, dimensions=None):
|
|
212
|
+
"""Remove dimensions if they are of size 1. The coordinates become scalar coordinates."""
|
|
213
|
+
if dimensions is None:
|
|
214
|
+
dimensions = data_array.dims
|
|
215
|
+
for dim in dimensions:
|
|
216
|
+
if len(data_array[dim]) == 1:
|
|
217
|
+
data_array = data_array.squeeze(dim, drop=False)
|
|
218
|
+
return data_array
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def _dataset_from_dataframe(df: pd.DataFrame,
|
|
222
|
+
variables: Union[str, Sequence[str]],
|
|
223
|
+
dimensions: Sequence[str],
|
|
224
|
+
optional_dims: Sequence[str],
|
|
225
|
+
) -> Union[xr.DataArray, xr.Dataset]:
|
|
226
|
+
"""Transform a pandas.Dataframe into a xarray.Dataset.
|
|
227
|
+
|
|
228
|
+
Parameters
|
|
229
|
+
----------
|
|
230
|
+
df: pandas.DataFrame
|
|
231
|
+
the input dataframe
|
|
232
|
+
variables: string or sequence of strings
|
|
233
|
+
the variables that will be stored in the output dataset.
|
|
234
|
+
If a single name is provided, a DataArray of this variable will be provided instead.
|
|
235
|
+
dimensions: sequence of strings
|
|
236
|
+
Names of dimensions the variables depends on.
|
|
237
|
+
They will always appear as dimension in the output dataset.
|
|
238
|
+
optional_dims: sequence of strings
|
|
239
|
+
Names of dimensions the variables depends on.
|
|
240
|
+
They will appears as dimension in the output dataset only if they have
|
|
241
|
+
more than one different values.
|
|
242
|
+
"""
|
|
243
|
+
df = df.drop_duplicates(optional_dims + dimensions)
|
|
244
|
+
df = df.set_index(optional_dims + dimensions)
|
|
245
|
+
da = df.to_xarray()[variables]
|
|
246
|
+
da = _squeeze_dimensions(da, dimensions=optional_dims)
|
|
247
|
+
return da
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def hydrostatics_dataset(bodies: Sequence[FloatingBody]) -> xr.Dataset:
|
|
251
|
+
"""Create a dataset by looking for 'inertia_matrix' and 'hydrostatic_stiffness'
|
|
252
|
+
for each of the bodies in the list passed as argument.
|
|
253
|
+
"""
|
|
254
|
+
dataset = xr.Dataset()
|
|
255
|
+
for body_property in ['inertia_matrix', 'hydrostatic_stiffness']:
|
|
256
|
+
bodies_properties = {body.name: body.__getattribute__(body_property) for body in bodies if hasattr(body, body_property)}
|
|
257
|
+
if len(bodies_properties) > 0:
|
|
258
|
+
bodies_properties = xr.concat(bodies_properties.values(), pd.Index(bodies_properties.keys(), name='body_name'))
|
|
259
|
+
bodies_properties = _squeeze_dimensions(bodies_properties, dimensions=['body_name'])
|
|
260
|
+
dataset = xr.merge([dataset, {body_property: bodies_properties}], compat="no_conflicts", join="outer")
|
|
261
|
+
return dataset
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
def kochin_data_array(results: Sequence[LinearPotentialFlowResult],
|
|
265
|
+
theta_range: Sequence[float],
|
|
266
|
+
**kwargs,
|
|
267
|
+
) -> xr.Dataset:
|
|
268
|
+
"""Compute the Kochin function for a list of results and fills a dataset.
|
|
269
|
+
|
|
270
|
+
.. seealso::
|
|
271
|
+
:meth:`~capytaine.post_pro.kochin.compute_kochin`
|
|
272
|
+
The present function is just a wrapper around :code:`compute_kochin`.
|
|
273
|
+
"""
|
|
274
|
+
# TODO: this not very good to mix computation and data manipulation here...
|
|
275
|
+
records = pd.DataFrame([
|
|
276
|
+
dict(**result.problem._asdict(), theta=theta, kochin=kochin, kind=result.__class__.__name__)
|
|
277
|
+
for result in results
|
|
278
|
+
for theta, kochin in zip(theta_range.data,
|
|
279
|
+
compute_kochin(result, theta_range, **kwargs))
|
|
280
|
+
])
|
|
281
|
+
|
|
282
|
+
kochin_data = xr.Dataset()
|
|
283
|
+
|
|
284
|
+
if "RadiationResult" in set(records['kind']):
|
|
285
|
+
radiation = _dataset_from_dataframe(
|
|
286
|
+
records[records['kind'] == "RadiationResult"],
|
|
287
|
+
variables=['kochin'],
|
|
288
|
+
dimensions=['omega', 'radiating_dof', 'theta'],
|
|
289
|
+
optional_dims=['g', 'rho', 'body_name', 'water_depth', 'forward_speed', 'wave_direction']
|
|
290
|
+
)
|
|
291
|
+
kochin_data['kochin'] = radiation['kochin']
|
|
292
|
+
|
|
293
|
+
if "DiffractionResult" in set(records['kind']):
|
|
294
|
+
diffraction = _dataset_from_dataframe(
|
|
295
|
+
records[records['kind'] == "DiffractionResult"],
|
|
296
|
+
['kochin'],
|
|
297
|
+
dimensions=['omega', 'wave_direction', 'theta'],
|
|
298
|
+
optional_dims=['g', 'rho', 'body_name', 'water_depth', 'forward_speed']
|
|
299
|
+
)
|
|
300
|
+
kochin_data['kochin_diffraction'] = diffraction['kochin']
|
|
301
|
+
|
|
302
|
+
return kochin_data
|
|
303
|
+
|
|
304
|
+
VARIABLES_ATTRIBUTES = {
|
|
305
|
+
"omega": {
|
|
306
|
+
'long_name': 'Angular frequency',
|
|
307
|
+
'units': 'rad/s',
|
|
308
|
+
},
|
|
309
|
+
"freq": {
|
|
310
|
+
'long_name': 'Frequency',
|
|
311
|
+
'units': 'Hz',
|
|
312
|
+
},
|
|
313
|
+
"period": {
|
|
314
|
+
'long_name': 'Period',
|
|
315
|
+
'units': 's',
|
|
316
|
+
},
|
|
317
|
+
"wavenumber": {
|
|
318
|
+
'long_name': "Angular wavenumber",
|
|
319
|
+
'units': 'rad/m',
|
|
320
|
+
},
|
|
321
|
+
"wavelength": {
|
|
322
|
+
'long_name': "Wave length",
|
|
323
|
+
'units': 'm',
|
|
324
|
+
},
|
|
325
|
+
"encounter_omega": {
|
|
326
|
+
'long_name': "Encounter angular frequency",
|
|
327
|
+
'units': 'rad/s',
|
|
328
|
+
},
|
|
329
|
+
"encounter_wave_direction": {
|
|
330
|
+
'long_name': "Encounter wave direction",
|
|
331
|
+
'units': 'rad',
|
|
332
|
+
},
|
|
333
|
+
"wave_direction": {
|
|
334
|
+
'long_name': "Wave direction",
|
|
335
|
+
'units': "rad"
|
|
336
|
+
},
|
|
337
|
+
"radiating_dof": {
|
|
338
|
+
'long_name': 'Radiating DOF',
|
|
339
|
+
},
|
|
340
|
+
"influenced_dof": {
|
|
341
|
+
'long_name': 'Influenced DOF',
|
|
342
|
+
},
|
|
343
|
+
"added_mass": {
|
|
344
|
+
'long_name': 'Added mass',
|
|
345
|
+
},
|
|
346
|
+
"radiation_damping": {
|
|
347
|
+
'long_name': 'Radiation damping',
|
|
348
|
+
},
|
|
349
|
+
"diffraction_force": {
|
|
350
|
+
'long_name': "Diffraction force",
|
|
351
|
+
},
|
|
352
|
+
"Froude_Krylov_force": {
|
|
353
|
+
'long_name': "Froude Krylov force",
|
|
354
|
+
},
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
def assemble_dataset(results,
|
|
358
|
+
omega=True, freq=True, wavenumber=True, wavelength=True, period=True,
|
|
359
|
+
mesh=False, hydrostatics=True, attrs=None) -> xr.Dataset:
|
|
360
|
+
"""Transform a list of :class:`LinearPotentialFlowResult` into a :class:`xarray.Dataset`.
|
|
361
|
+
|
|
362
|
+
.. todo:: The :code:`mesh` option to store information on the mesh could be improved.
|
|
363
|
+
It could store the full mesh in the dataset to ensure the reproducibility of
|
|
364
|
+
the results.
|
|
365
|
+
|
|
366
|
+
Parameters
|
|
367
|
+
----------
|
|
368
|
+
results: list of LinearPotentialFlowResult or BEMIO dataset
|
|
369
|
+
The results that will be read.
|
|
370
|
+
omega: bool, optional
|
|
371
|
+
If True, the coordinate 'omega' will be added to the output dataset.
|
|
372
|
+
freq: bool, optional
|
|
373
|
+
If True, the coordinate 'freq' will be added to the output dataset.
|
|
374
|
+
wavenumber: bool, optional
|
|
375
|
+
If True, the coordinate 'wavenumber' will be added to the output dataset.
|
|
376
|
+
wavelength: bool, optional
|
|
377
|
+
If True, the coordinate 'wavelength' will be added to the output dataset.
|
|
378
|
+
period: bool, optional
|
|
379
|
+
If True, the coordinate 'period' will be added to the output dataset.
|
|
380
|
+
mesh: bool, optional
|
|
381
|
+
If True, store some infos on the mesh in the output dataset.
|
|
382
|
+
hydrostatics: bool, optional
|
|
383
|
+
If True, store the hydrostatic data in the output dataset if they exist.
|
|
384
|
+
attrs: dict, optional
|
|
385
|
+
Attributes that should be added to the output dataset.
|
|
386
|
+
"""
|
|
387
|
+
bemio_import = _detect_bemio_results(results, calling_function="assemble_dataset")
|
|
388
|
+
|
|
389
|
+
records = assemble_dataframe(results)
|
|
390
|
+
|
|
391
|
+
if bemio_import:
|
|
392
|
+
main_freq_type = "omega"
|
|
393
|
+
else:
|
|
394
|
+
main_freq_type = Counter((res.provided_freq_type for res in results)).most_common(1)[0][0]
|
|
395
|
+
|
|
396
|
+
if np.any(records["free_surface"] != 0.0):
|
|
397
|
+
LOG.warning("Datasets only support cases with a free surface (free_surface=0.0).\n"
|
|
398
|
+
"Cases without a free surface (free_surface=inf) are ignored.\n"
|
|
399
|
+
"See also https://github.com/mancellin/capytaine/issues/88")
|
|
400
|
+
records = records[records["free_surface"] == 0.0]
|
|
401
|
+
|
|
402
|
+
if attrs is None:
|
|
403
|
+
attrs = {}
|
|
404
|
+
attrs['creation_of_dataset'] = datetime.now().isoformat()
|
|
405
|
+
|
|
406
|
+
kinds_of_results = set(records['kind'])
|
|
407
|
+
|
|
408
|
+
optional_dims = ['g', 'rho', 'body_name', 'water_depth', 'forward_speed']
|
|
409
|
+
|
|
410
|
+
dataset = xr.Dataset()
|
|
411
|
+
|
|
412
|
+
# RADIATION RESULTS
|
|
413
|
+
if "RadiationResult" in kinds_of_results:
|
|
414
|
+
radiation_cases = _dataset_from_dataframe(
|
|
415
|
+
records[records['kind'] == "RadiationResult"],
|
|
416
|
+
variables=['added_mass', 'radiation_damping'],
|
|
417
|
+
dimensions=[main_freq_type, 'radiating_dof', 'influenced_dof'],
|
|
418
|
+
optional_dims=optional_dims + ['wave_direction'])
|
|
419
|
+
dataset = xr.merge([dataset, radiation_cases], compat="no_conflicts", join="outer")
|
|
420
|
+
|
|
421
|
+
# DIFFRACTION RESULTS
|
|
422
|
+
if "DiffractionResult" in kinds_of_results:
|
|
423
|
+
diffraction_cases = _dataset_from_dataframe(
|
|
424
|
+
records[records['kind'] == "DiffractionResult"],
|
|
425
|
+
variables=['diffraction_force', 'Froude_Krylov_force'],
|
|
426
|
+
dimensions=[main_freq_type, 'wave_direction', 'influenced_dof'],
|
|
427
|
+
optional_dims=optional_dims)
|
|
428
|
+
dataset = xr.merge([dataset, diffraction_cases], compat="no_conflicts", join="outer")
|
|
429
|
+
dataset['excitation_force'] = dataset['Froude_Krylov_force'] + dataset['diffraction_force']
|
|
430
|
+
|
|
431
|
+
# OTHER FREQUENCIES TYPES
|
|
432
|
+
if omega and main_freq_type != "omega":
|
|
433
|
+
omega_ds = _dataset_from_dataframe(
|
|
434
|
+
records,
|
|
435
|
+
variables=['omega'],
|
|
436
|
+
dimensions=[main_freq_type],
|
|
437
|
+
optional_dims=['g', 'water_depth'] if main_freq_type in {'wavelength', 'wavenumber'} else []
|
|
438
|
+
)
|
|
439
|
+
dataset.coords['omega'] = omega_ds['omega']
|
|
440
|
+
|
|
441
|
+
if freq and main_freq_type != "freq":
|
|
442
|
+
freq_ds = _dataset_from_dataframe(
|
|
443
|
+
records,
|
|
444
|
+
variables=['freq'],
|
|
445
|
+
dimensions=[main_freq_type],
|
|
446
|
+
optional_dims=['g', 'water_depth'] if main_freq_type in {'wavelength', 'wavenumber'} else []
|
|
447
|
+
)
|
|
448
|
+
dataset.coords['freq'] = freq_ds['freq']
|
|
449
|
+
|
|
450
|
+
if period and main_freq_type != "period":
|
|
451
|
+
period_ds = _dataset_from_dataframe(
|
|
452
|
+
records,
|
|
453
|
+
variables=['period'],
|
|
454
|
+
dimensions=[main_freq_type],
|
|
455
|
+
optional_dims=['g', 'water_depth'] if main_freq_type in {'wavelength', 'wavenumber'} else []
|
|
456
|
+
)
|
|
457
|
+
dataset.coords['period'] = period_ds['period']
|
|
458
|
+
|
|
459
|
+
if wavenumber and main_freq_type != "wavenumber":
|
|
460
|
+
wavenumber_ds = _dataset_from_dataframe(
|
|
461
|
+
records,
|
|
462
|
+
variables=['wavenumber'],
|
|
463
|
+
dimensions=[main_freq_type],
|
|
464
|
+
optional_dims=['g', 'water_depth'] if main_freq_type in {'period', 'omega'} else []
|
|
465
|
+
)
|
|
466
|
+
dataset.coords['wavenumber'] = wavenumber_ds['wavenumber']
|
|
467
|
+
|
|
468
|
+
if wavelength and main_freq_type != "wavelength":
|
|
469
|
+
wavelength_ds = _dataset_from_dataframe(
|
|
470
|
+
records,
|
|
471
|
+
variables=['wavelength'],
|
|
472
|
+
dimensions=[main_freq_type],
|
|
473
|
+
optional_dims=['g', 'water_depth'] if main_freq_type in {'period', 'omega'} else []
|
|
474
|
+
)
|
|
475
|
+
dataset.coords['wavelength'] = wavelength_ds['wavelength']
|
|
476
|
+
|
|
477
|
+
if not all(records["forward_speed"] == 0.0):
|
|
478
|
+
omegae_ds = _dataset_from_dataframe(
|
|
479
|
+
records,
|
|
480
|
+
variables=['encounter_omega'],
|
|
481
|
+
dimensions=['forward_speed', 'wave_direction', main_freq_type],
|
|
482
|
+
optional_dims=['g', 'water_depth'],
|
|
483
|
+
)
|
|
484
|
+
dataset.coords['encounter_omega'] = omegae_ds['encounter_omega']
|
|
485
|
+
|
|
486
|
+
encounter_wave_direction_ds = _dataset_from_dataframe(
|
|
487
|
+
records,
|
|
488
|
+
variables=['encounter_wave_direction'],
|
|
489
|
+
dimensions=['forward_speed', 'wave_direction', main_freq_type],
|
|
490
|
+
optional_dims=[],
|
|
491
|
+
)
|
|
492
|
+
dataset.coords['encounter_wave_direction'] = encounter_wave_direction_ds['encounter_wave_direction']
|
|
493
|
+
|
|
494
|
+
if mesh:
|
|
495
|
+
if bemio_import:
|
|
496
|
+
LOG.warning('Bemio data does not include mesh data. mesh=True is ignored.')
|
|
497
|
+
else:
|
|
498
|
+
# TODO: Store full mesh...
|
|
499
|
+
bodies = list({result.body for result in results}) # Filter out duplicate bodies in the list of results
|
|
500
|
+
nb_faces = {body.name: body.mesh.nb_faces for body in bodies}
|
|
501
|
+
|
|
502
|
+
def name_or_str(c):
|
|
503
|
+
return c.name if hasattr(c, 'name') else str(c)
|
|
504
|
+
quad_methods = {body.name: name_or_str(body.mesh.quadrature_method) for body in bodies}
|
|
505
|
+
|
|
506
|
+
if len(nb_faces) > 1:
|
|
507
|
+
dataset.coords['nb_faces'] = ('body_name', [nb_faces[name] for name in dataset.coords['body_name'].data])
|
|
508
|
+
dataset.coords['quadrature_method'] = ('body_name', [quad_methods[name] for name in dataset.coords['body_name'].data])
|
|
509
|
+
else:
|
|
510
|
+
def the_only(d):
|
|
511
|
+
"""Return the only element of a 1-element dictionary"""
|
|
512
|
+
return next(iter(d.values()))
|
|
513
|
+
dataset.coords['nb_faces'] = the_only(nb_faces)
|
|
514
|
+
dataset.coords['quadrature_method'] = the_only(quad_methods)
|
|
515
|
+
|
|
516
|
+
# HYDROSTATICS
|
|
517
|
+
if hydrostatics:
|
|
518
|
+
if bemio_import:
|
|
519
|
+
LOG.warning('Bemio data import being used, hydrostatics=True is ignored.')
|
|
520
|
+
else:
|
|
521
|
+
bodies = list({result.body for result in results})
|
|
522
|
+
dataset = xr.merge([dataset, hydrostatics_dataset(bodies)], compat="no_conflicts", join="outer")
|
|
523
|
+
|
|
524
|
+
for var in set(dataset) | set(dataset.coords):
|
|
525
|
+
if var in VARIABLES_ATTRIBUTES:
|
|
526
|
+
dataset[var].attrs.update(VARIABLES_ATTRIBUTES[var])
|
|
527
|
+
|
|
528
|
+
dataset.attrs.update(attrs)
|
|
529
|
+
dataset.attrs['capytaine_version'] = __version__
|
|
530
|
+
return dataset
|
|
531
|
+
|
|
532
|
+
|
|
533
|
+
def assemble_matrices(results):
|
|
534
|
+
"""Simplified version of assemble_dataset, returning only bare matrices.
|
|
535
|
+
Meant mainly for teaching without introducing Xarray to beginers.
|
|
536
|
+
|
|
537
|
+
Parameters
|
|
538
|
+
----------
|
|
539
|
+
results: list of LinearPotentialFlowResult
|
|
540
|
+
The results that will be read.
|
|
541
|
+
|
|
542
|
+
Returns
|
|
543
|
+
-------
|
|
544
|
+
3-ple of (np.arrays or None)
|
|
545
|
+
The added mass matrix, the radiation damping matrix and the excitation force.
|
|
546
|
+
If the data are no available in the results, returns None instead.
|
|
547
|
+
"""
|
|
548
|
+
|
|
549
|
+
ds = assemble_dataset(results)
|
|
550
|
+
|
|
551
|
+
if "added_mass" in ds:
|
|
552
|
+
A = np.atleast_2d(ds.added_mass.values.squeeze())
|
|
553
|
+
else:
|
|
554
|
+
A = None
|
|
555
|
+
|
|
556
|
+
if "radiation_damping" in ds:
|
|
557
|
+
B = np.atleast_2d(ds.radiation_damping.values.squeeze())
|
|
558
|
+
else:
|
|
559
|
+
B = None
|
|
560
|
+
|
|
561
|
+
if "excitation_force" in ds:
|
|
562
|
+
F = np.atleast_1d(ds.excitation_force.values.squeeze())
|
|
563
|
+
else:
|
|
564
|
+
F = None
|
|
565
|
+
|
|
566
|
+
return A, B, F
|
|
567
|
+
|
|
568
|
+
|
|
569
|
+
|
|
570
|
+
################################
|
|
571
|
+
# Handling of complex values #
|
|
572
|
+
################################
|
|
573
|
+
|
|
574
|
+
def separate_complex_values(ds: xr.Dataset) -> xr.Dataset:
|
|
575
|
+
"""Return a new Dataset where complex-valued arrays of shape (...)
|
|
576
|
+
have been replaced by real-valued arrays of shape (2, ...).
|
|
577
|
+
|
|
578
|
+
.. seealso::
|
|
579
|
+
:func:`merge_complex_values`
|
|
580
|
+
The invert operation
|
|
581
|
+
"""
|
|
582
|
+
ds = ds.copy()
|
|
583
|
+
for variable in ds.data_vars:
|
|
584
|
+
if ds[variable].dtype == complex:
|
|
585
|
+
da = ds[variable]
|
|
586
|
+
new_da = xr.DataArray(np.asarray((np.real(da).data, np.imag(da).data)),
|
|
587
|
+
dims=('complex',) + da.dims)
|
|
588
|
+
ds[variable] = new_da
|
|
589
|
+
ds.coords['complex'] = ['re', 'im']
|
|
590
|
+
return ds
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
def merge_complex_values(ds: xr.Dataset) -> xr.Dataset:
|
|
594
|
+
"""Return a new Dataset where real-valued arrays of shape (2, ...)
|
|
595
|
+
have been replaced by complex-valued arrays of shape (...).
|
|
596
|
+
|
|
597
|
+
.. seealso::
|
|
598
|
+
:func:`separate_complex_values`
|
|
599
|
+
The invert operation
|
|
600
|
+
"""
|
|
601
|
+
if 'complex' in ds.coords:
|
|
602
|
+
ds = ds.copy()
|
|
603
|
+
for variable in ds.data_vars:
|
|
604
|
+
if 'complex' in ds[variable].coords:
|
|
605
|
+
da = ds[variable]
|
|
606
|
+
new_dims = [d for d in da.dims if d != 'complex']
|
|
607
|
+
new_da = xr.DataArray(da.sel(complex='re').data + 1j*da.sel(complex='im').data, dims=new_dims)
|
|
608
|
+
ds[variable] = new_da
|
|
609
|
+
ds = ds.drop_vars('complex')
|
|
610
|
+
return ds
|
|
611
|
+
|
|
612
|
+
|
|
613
|
+
##################
|
|
614
|
+
# Save dataset #
|
|
615
|
+
##################
|
|
616
|
+
|
|
617
|
+
def save_dataset_as_netcdf(filename, dataset):
|
|
618
|
+
"""Save `dataset` as a NetCDF file with name (or path) `filename`"""
|
|
619
|
+
ds = separate_complex_values(dataset)
|
|
620
|
+
|
|
621
|
+
# Workaround https://github.com/capytaine/capytaine/issues/683
|
|
622
|
+
ds['radiating_dof'] = ds['radiating_dof'].astype('str')
|
|
623
|
+
ds['influenced_dof'] = ds['influenced_dof'].astype('str')
|
|
624
|
+
|
|
625
|
+
# Make sure all strings are exported as strings and not Python objects
|
|
626
|
+
encoding = {'radiating_dof': {'dtype': 'U'},
|
|
627
|
+
'influenced_dof': {'dtype': 'U'}}
|
|
628
|
+
|
|
629
|
+
ds.to_netcdf(filename, encoding=encoding)
|
|
630
|
+
|
|
631
|
+
|
|
632
|
+
def export_dataset(filename, dataset, format=None, **kwargs):
|
|
633
|
+
"""Save `dataset` into a format, provided by the `format` argument or inferred by the `filename`.
|
|
634
|
+
|
|
635
|
+
Parameters
|
|
636
|
+
----------
|
|
637
|
+
filename: str or Path
|
|
638
|
+
Where to store the data
|
|
639
|
+
dataset: xarray.Dataset
|
|
640
|
+
Dataset, which is assumed to have been computed by Capytaine
|
|
641
|
+
format: str, optional
|
|
642
|
+
Format of output. Accepted values: "netcdf"
|
|
643
|
+
**kwargs: optional
|
|
644
|
+
Remaining argument are passed to the specific export function,
|
|
645
|
+
such as ``save_dataset_as_netcdf``, ``export_to_wamit`` or ``write_dataset_as_tecplot_files``.
|
|
646
|
+
|
|
647
|
+
Returns
|
|
648
|
+
-------
|
|
649
|
+
None
|
|
650
|
+
"""
|
|
651
|
+
if (
|
|
652
|
+
(format is not None and format.lower() == "netcdf") or
|
|
653
|
+
(format is None and str(filename).endswith(".nc"))
|
|
654
|
+
):
|
|
655
|
+
save_dataset_as_netcdf(filename, dataset, **kwargs)
|
|
656
|
+
elif (
|
|
657
|
+
(format is not None and format.lower() == "wamit")
|
|
658
|
+
):
|
|
659
|
+
from capytaine.io.wamit import export_to_wamit
|
|
660
|
+
export_to_wamit(dataset, filename, **kwargs)
|
|
661
|
+
elif (
|
|
662
|
+
(format is not None and format.lower() == "nemoh")
|
|
663
|
+
):
|
|
664
|
+
from capytaine.io.legacy import write_dataset_as_tecplot_files
|
|
665
|
+
write_dataset_as_tecplot_files(filename, dataset, **kwargs)
|
|
666
|
+
else:
|
|
667
|
+
raise ValueError("`export_dataset` could not infer export format based on filename or `format` argument.\n"
|
|
668
|
+
f"provided filename: {filename}\nprovided format: {format}")
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""This module implements several classes describing matrices defined by blocks.
|
|
2
|
+
These matrices can be nested to recursively define Hierarchical matrices.
|
|
3
|
+
"""
|
|
4
|
+
# Copyright (C) 2017-2019 Matthieu Ancellin
|
|
5
|
+
# See LICENSE file at <https://github.com/mancellin/capytaine>
|
|
6
|
+
|
|
7
|
+
from capytaine.matrices.block import BlockMatrix
|
|
8
|
+
from capytaine.matrices.block_toeplitz import (
|
|
9
|
+
BlockToeplitzMatrix, BlockSymmetricToeplitzMatrix,
|
|
10
|
+
BlockCirculantMatrix, EvenBlockSymmetricCirculantMatrix, OddBlockSymmetricCirculantMatrix,
|
|
11
|
+
)
|
|
12
|
+
from capytaine.matrices.builders import (
|
|
13
|
+
cut_matrix, random_block_matrix,
|
|
14
|
+
full_like, zeros_like, ones_like, identity_like,
|
|
15
|
+
)
|
|
16
|
+
from capytaine.matrices.low_rank import LowRankMatrix
|