capytaine 2.2__cp38-cp38-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. capytaine/.dylibs/libgcc_s.1.1.dylib +0 -0
  2. capytaine/.dylibs/libgfortran.5.dylib +0 -0
  3. capytaine/.dylibs/libquadmath.0.dylib +0 -0
  4. capytaine/__about__.py +16 -0
  5. capytaine/__init__.py +35 -0
  6. capytaine/bem/__init__.py +0 -0
  7. capytaine/bem/airy_waves.py +106 -0
  8. capytaine/bem/engines.py +441 -0
  9. capytaine/bem/problems_and_results.py +545 -0
  10. capytaine/bem/solver.py +497 -0
  11. capytaine/bodies/__init__.py +4 -0
  12. capytaine/bodies/bodies.py +1185 -0
  13. capytaine/bodies/dofs.py +19 -0
  14. capytaine/bodies/predefined/__init__.py +6 -0
  15. capytaine/bodies/predefined/cylinders.py +151 -0
  16. capytaine/bodies/predefined/rectangles.py +109 -0
  17. capytaine/bodies/predefined/spheres.py +70 -0
  18. capytaine/green_functions/__init__.py +2 -0
  19. capytaine/green_functions/abstract_green_function.py +12 -0
  20. capytaine/green_functions/delhommeau.py +432 -0
  21. capytaine/green_functions/libs/Delhommeau_float32.cpython-38-darwin.so +0 -0
  22. capytaine/green_functions/libs/Delhommeau_float64.cpython-38-darwin.so +0 -0
  23. capytaine/green_functions/libs/__init__.py +0 -0
  24. capytaine/io/__init__.py +0 -0
  25. capytaine/io/bemio.py +141 -0
  26. capytaine/io/legacy.py +328 -0
  27. capytaine/io/mesh_loaders.py +1085 -0
  28. capytaine/io/mesh_writers.py +692 -0
  29. capytaine/io/meshio.py +38 -0
  30. capytaine/io/xarray.py +516 -0
  31. capytaine/matrices/__init__.py +16 -0
  32. capytaine/matrices/block.py +590 -0
  33. capytaine/matrices/block_toeplitz.py +325 -0
  34. capytaine/matrices/builders.py +89 -0
  35. capytaine/matrices/linear_solvers.py +232 -0
  36. capytaine/matrices/low_rank.py +393 -0
  37. capytaine/meshes/__init__.py +6 -0
  38. capytaine/meshes/clipper.py +464 -0
  39. capytaine/meshes/collections.py +324 -0
  40. capytaine/meshes/geometry.py +409 -0
  41. capytaine/meshes/meshes.py +868 -0
  42. capytaine/meshes/predefined/__init__.py +6 -0
  43. capytaine/meshes/predefined/cylinders.py +314 -0
  44. capytaine/meshes/predefined/rectangles.py +261 -0
  45. capytaine/meshes/predefined/spheres.py +62 -0
  46. capytaine/meshes/properties.py +242 -0
  47. capytaine/meshes/quadratures.py +80 -0
  48. capytaine/meshes/quality.py +448 -0
  49. capytaine/meshes/surface_integrals.py +63 -0
  50. capytaine/meshes/symmetric.py +383 -0
  51. capytaine/post_pro/__init__.py +6 -0
  52. capytaine/post_pro/free_surfaces.py +88 -0
  53. capytaine/post_pro/impedance.py +92 -0
  54. capytaine/post_pro/kochin.py +54 -0
  55. capytaine/post_pro/rao.py +60 -0
  56. capytaine/tools/__init__.py +0 -0
  57. capytaine/tools/cache_on_disk.py +26 -0
  58. capytaine/tools/deprecation_handling.py +18 -0
  59. capytaine/tools/lists_of_points.py +52 -0
  60. capytaine/tools/lru_cache.py +49 -0
  61. capytaine/tools/optional_imports.py +27 -0
  62. capytaine/tools/prony_decomposition.py +94 -0
  63. capytaine/tools/symbolic_multiplication.py +107 -0
  64. capytaine/ui/__init__.py +0 -0
  65. capytaine/ui/cli.py +28 -0
  66. capytaine/ui/rich.py +5 -0
  67. capytaine/ui/vtk/__init__.py +3 -0
  68. capytaine/ui/vtk/animation.py +329 -0
  69. capytaine/ui/vtk/body_viewer.py +28 -0
  70. capytaine/ui/vtk/helpers.py +82 -0
  71. capytaine/ui/vtk/mesh_viewer.py +461 -0
  72. capytaine-2.2.dist-info/LICENSE +674 -0
  73. capytaine-2.2.dist-info/METADATA +751 -0
  74. capytaine-2.2.dist-info/RECORD +76 -0
  75. capytaine-2.2.dist-info/WHEEL +4 -0
  76. capytaine-2.2.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,26 @@
1
+ """
2
+ Adapted from https://github.com/platformdirs/platformdirs (MIT Licensed)
3
+ """
4
+ import os
5
+ import sys
6
+ from pathlib import Path
7
+
8
+ from capytaine import __version__
9
+
10
+
11
+ def cache_directory():
12
+ if "CAPYTAINE_CACHE_DIR" in os.environ:
13
+ path = os.path.join(os.environ["CAPYTAINE_CACHE_DIR"], __version__)
14
+ elif sys.platform == "win32": # Windows
15
+ path = os.path.normpath(os.environ.get("LOCALAPPDATA"))
16
+ path = os.path.join(path, "capytaine", "Cache", __version__)
17
+ elif sys.platform == "darwin": # MacOS
18
+ path = os.path.expanduser("~/Library/Caches")
19
+ path = os.path.join(path, "capytaine", __version__)
20
+ else:
21
+ path = os.environ.get("XDG_CACHE_HOME", "")
22
+ if path.strip() == "":
23
+ path = os.path.expanduser("~/.cache")
24
+ path = os.path.join(path, "capytaine", __version__)
25
+ Path(path).mkdir(parents=True, exist_ok=True)
26
+ return path
@@ -0,0 +1,18 @@
1
+ import logging
2
+
3
+ import numpy as np
4
+
5
+ LOG = logging.getLogger(__name__)
6
+
7
+ def _get_water_depth(free_surface, water_depth, sea_bottom, default_water_depth=np.inf):
8
+ if water_depth is None and sea_bottom is None:
9
+ return default_water_depth
10
+ elif water_depth is not None and sea_bottom is None:
11
+ if water_depth <= 0.0:
12
+ raise ValueError(f"`water_depth` should be strictly positive. Received value: {water_depth}")
13
+ return float(water_depth)
14
+ elif water_depth is None and sea_bottom is not None:
15
+ LOG.warning("To uniformize notations througouth Capytaine, setting `water_depth` is preferred to `sea_bottom` since version 2.0.")
16
+ return float(free_surface - sea_bottom)
17
+ else:
18
+ raise ValueError("Cannot give both a `water_depth` and a `sea_bottom`.")
@@ -0,0 +1,52 @@
1
+ import numpy as np
2
+ from capytaine.bodies import FloatingBody
3
+ from capytaine.post_pro.free_surfaces import FreeSurface
4
+ from capytaine.meshes import Mesh, CollectionOfMeshes
5
+
6
+
7
+ def _normalize_points(points, keep_mesh=False):
8
+ if isinstance(points, (FloatingBody, FreeSurface)):
9
+ if keep_mesh:
10
+ return points.mesh, (points.mesh.nb_faces,)
11
+ else:
12
+ return points.mesh.faces_centers, (points.mesh.nb_faces,)
13
+
14
+ if isinstance(points, (Mesh, CollectionOfMeshes)):
15
+ if keep_mesh:
16
+ return points, (points.nb_faces,)
17
+ else:
18
+ return points.faces_centers, (points.nb_faces,)
19
+
20
+ points = np.asarray(points)
21
+
22
+ if points.ndim == 1: # A single point has been provided
23
+ output_shape = (1,)
24
+ points = points.reshape((1, points.shape[0]))
25
+
26
+ elif points.ndim == 2:
27
+ output_shape = (points.shape[0],)
28
+
29
+ elif points.ndim > 2:
30
+ # `points` is expected to be the results of a meshgrid. Points has shape (d, nx, ny, ...)
31
+ output_shape = points.shape[1:]
32
+ points = points.reshape(points.shape[0], -1).transpose()
33
+ # points is now a (nx*ny*... , d) array
34
+
35
+ else:
36
+ raise ValueError(f"Expected a list of points or a mesh, but got instead: {points}")
37
+
38
+ return points, output_shape
39
+
40
+ def _normalize_free_surface_points(points, keep_mesh=False):
41
+ if keep_mesh and isinstance(points, (FloatingBody, FreeSurface)):
42
+ return points.mesh, (points.mesh.nb_faces,)
43
+
44
+ if keep_mesh and isinstance(points, (Mesh, CollectionOfMeshes)):
45
+ return points, (points.nb_faces,)
46
+
47
+ points, output_shape = _normalize_points(points, keep_mesh)
48
+
49
+ if points.ndim == 2 and points.shape[1] == 2: # Only x and y have been provided
50
+ points = np.concatenate([points, np.zeros((points.shape[0], 1))], axis=1)
51
+
52
+ return points, output_shape
@@ -0,0 +1,49 @@
1
+ # Copyright (C) 2017-2024 Matthieu Ancellin
2
+ # See LICENSE file at <https://github.com/capytaine/capytaine>
3
+ """Tools for memoization of functions."""
4
+ from collections import OrderedDict
5
+ from functools import wraps
6
+
7
+ import logging
8
+
9
+ LOG = logging.getLogger(__name__)
10
+
11
+
12
+ def lru_cache_with_strict_maxsize(maxsize=1):
13
+ """Behaves mostly like functools.lru_cache(), but the oldest data in the cache is
14
+ deleted *before* computing a new one, in order to *never* have more that
15
+ `maxsize` items in memory.
16
+ This is useful to limit RAM usage when stored objects are big, like the interaction
17
+ matrices of Capytaine."""
18
+
19
+ def decorator(f):
20
+ cache = OrderedDict()
21
+
22
+ @wraps(f)
23
+ def decorated_f(*args, **kwargs):
24
+ hashable_kwargs = tuple((k, v) for (k, v) in kwargs.items())
25
+ # Might miss a cache hit if the order of kwargs is changed.
26
+ # But at least unlike a previous version, should not return a wrong value.
27
+
28
+ if (args, hashable_kwargs) in cache:
29
+ # Get item in cache
30
+ LOG.debug("Get cached version of %s(%s, %s)", f.__name__, args, hashable_kwargs)
31
+ return cache[(args, hashable_kwargs)]
32
+
33
+ if len(cache) + 1 > maxsize:
34
+ # Drop oldest item in cache.
35
+ cache.popitem(last=False)
36
+
37
+ # Compute and store
38
+ LOG.debug("Computing %s(%s, %s)", f.__name__, args, hashable_kwargs)
39
+ result = f(*args, **kwargs)
40
+ cache[(args, hashable_kwargs)] = result
41
+
42
+ return result
43
+
44
+ return decorated_f
45
+
46
+ return decorator
47
+
48
+
49
+ delete_first_lru_cache = lru_cache_with_strict_maxsize # For backward compatibility...
@@ -0,0 +1,27 @@
1
+ """Tool to import optional dependencies. Inspired by similar code in pandas."""
2
+
3
+ import importlib
4
+
5
+ def import_optional_dependency(module_name: str, package_name: str = None):
6
+ try:
7
+ module = importlib.import_module(module_name)
8
+ except ImportError:
9
+ if package_name is None:
10
+ package_name = module_name
11
+
12
+ message = (
13
+ f"Missing optional dependency '{module_name}'. "
14
+ f"Use pip or conda to install {package_name}."
15
+ )
16
+ raise ImportError(message) from None
17
+
18
+ return module
19
+
20
+ def silently_import_optional_dependency(module_name: str):
21
+ # Same as above, except it does not raise a exception when the module is not found.
22
+ # Instead, simply returns None.
23
+ try:
24
+ module = importlib.import_module(module_name)
25
+ except ImportError:
26
+ module = None
27
+ return module
@@ -0,0 +1,94 @@
1
+ """Prony decomposition: tool to approximate a function as a sum of exponentials.
2
+ Used in particular in the finite depth Green function.
3
+ """
4
+ # Copyright (C) 2017-2019 Matthieu Ancellin
5
+ # See LICENSE file at <https://github.com/mancellin/capytaine>
6
+
7
+ import logging
8
+
9
+ import numpy as np
10
+ from numpy.polynomial import polynomial
11
+ from scipy.optimize import curve_fit
12
+ from scipy.linalg import toeplitz
13
+
14
+ LOG = logging.getLogger(__name__)
15
+
16
+
17
+ def exponential_decomposition(X, F, m):
18
+ """Use Prony's method to approximate the sampled real function F=f(X) as a sum of m
19
+ exponential functions x → Σ a_i exp(lamda_i x).
20
+
21
+ Parameters
22
+ ----------
23
+ X: 1D array
24
+ sampling points.
25
+ F: 1D array (same size as X)
26
+ values of the function to approximate at the points of x.
27
+ m: integer
28
+ number of exponential functions
29
+
30
+ Return
31
+ ------
32
+ a: 1D array (size m)
33
+ coefficients of the exponentials
34
+ lamda: 1D array (size m)
35
+ growth rate of the exponentials
36
+ """
37
+ assert X.shape == F.shape
38
+
39
+ # Compute the coefficients of the polynomials of Prony's method
40
+ A = toeplitz(c=F[m-1:-1], r=F[:m][::-1])
41
+ P, *_ = np.linalg.lstsq(A, F[m:], rcond=None)
42
+
43
+ # Build and solve polynomial function
44
+ coeffs = np.ones(m+1)
45
+ # coeffs[:m] = -P[::-1]
46
+ for i in range(m):
47
+ coeffs[m-i-1] = -P[i]
48
+ roots = polynomial.polyroots(coeffs)
49
+
50
+ # Discard values where log is undefined
51
+ roots = roots[np.logical_or(np.imag(roots) != 0.0, np.real(roots) >= 0.0)]
52
+
53
+ # Deduce lamda and keep only interesting values
54
+ lamda = np.real(np.log(roots)/(X[1] - X[0]))
55
+ lamda = np.unique(lamda)
56
+ lamda = lamda[np.logical_and(-20.0 < lamda, lamda < 0.0)]
57
+
58
+ # Fit the values of 'a' on the curve
59
+ def f(x, *ar):
60
+ ar = np.asarray(ar)[:, np.newaxis]
61
+ la = lamda[:, np.newaxis]
62
+ return np.sum(ar * np.exp(la * x), axis=0)
63
+ a, *_ = curve_fit(f, X, F, p0=np.zeros(lamda.shape))
64
+
65
+ return a, lamda
66
+
67
+
68
+ def error_exponential_decomposition(X, F, a, lamda):
69
+ """Compare exponential decomposition defined by the coefficients a and lamda to the reference
70
+ values in F.
71
+
72
+ Parameters
73
+ ----------
74
+ X: 1D array
75
+ sampling points
76
+ F: 1D array (same size as X)
77
+ reference values
78
+ a: 1D array
79
+ coefficients of the exponentials
80
+ lamda: 1D array (same size as a)
81
+ growth rate of the exponentials
82
+
83
+ Returns
84
+ -------
85
+ error: float
86
+ mean square error of the decomposition
87
+ """
88
+ a = np.asarray(a)[:, np.newaxis]
89
+ lamda = np.asarray(lamda)[:, np.newaxis]
90
+
91
+ def f(x):
92
+ return np.sum(a * np.exp(lamda*x), axis=0)
93
+
94
+ return np.square(f(X) - F).mean()
@@ -0,0 +1,107 @@
1
+ import numpy as np
2
+ from functools import wraps, total_ordering
3
+
4
+ @total_ordering
5
+ class SymbolicMultiplication:
6
+ def __init__(self, symbol, value=1.0):
7
+ self.symbol = symbol
8
+ self.value = value
9
+
10
+ def __format__(self, format_spec):
11
+ return f"{self.symbol}×{self.value.__format__(format_spec)}"
12
+
13
+ __array_priority__ = 1.0
14
+
15
+ def __array_function__(self, func, types, *args, **kwargs):
16
+ if func in {np.real, np.imag, np.sum}:
17
+ return SymbolicMultiplication(self.symbol, func(self.value))
18
+ else:
19
+ return NotImplemented
20
+
21
+ def __str__(self):
22
+ return f"{self.symbol}×{self.value}"
23
+
24
+ def __repr__(self):
25
+ return f"SymbolicMultiplication(\"{self.symbol}\", {repr(self.value)})"
26
+
27
+ def __add__(self, x):
28
+ return self._concretize() + x
29
+
30
+ def __radd__(self, x):
31
+ return x + self._concretize()
32
+
33
+ def __mul__(self, x):
34
+ return SymbolicMultiplication(self.symbol, self.value * x)
35
+
36
+ def __rmul__(self, x):
37
+ return SymbolicMultiplication(self.symbol, x * self.value)
38
+
39
+ def __pow__(self, n):
40
+ if n == 2:
41
+ return self * self
42
+ else:
43
+ raise NotImplementedError
44
+
45
+ def __truediv__(self, x):
46
+ if hasattr(x, 'symbol') and self.symbol == x.symbol:
47
+ return self.value / x.value
48
+ else:
49
+ return SymbolicMultiplication(self.symbol, self.value / x)
50
+
51
+ def __rtruediv__(self, x):
52
+ if hasattr(x, 'symbol') and self.symbol == x.symbol:
53
+ return x.value / self.value
54
+ elif self.symbol == "0":
55
+ return SymbolicMultiplication("∞", x/self.value)
56
+ elif self.symbol == "∞":
57
+ return SymbolicMultiplication("0", x/self.value)
58
+ else:
59
+ raise NotImplementedError
60
+
61
+ def __matmul__(self, x):
62
+ return SymbolicMultiplication(self.symbol, self.value @ x)
63
+
64
+ def __rmatmul__(self, x):
65
+ return SymbolicMultiplication(self.symbol, x @ self.value)
66
+
67
+ def __getitem__(self, item):
68
+ return SymbolicMultiplication(self.symbol, self.value[item])
69
+
70
+ def __eq__(self, x):
71
+ return float(self) == x
72
+
73
+ def __lt__(self, x):
74
+ return float(self) < x
75
+
76
+ def __hash__(self):
77
+ return hash((self.symbol, self.value))
78
+
79
+ def _concretize(self):
80
+ if isinstance(self.value, np.ndarray):
81
+ if self.symbol == "0":
82
+ return np.zeros_like(self.value)
83
+ elif self.symbol == "∞":
84
+ return np.full_like(self.value, np.inf)
85
+ else:
86
+ return float(self)
87
+
88
+ def __float__(self):
89
+ if self.symbol == "0":
90
+ return 0.0
91
+ elif self.symbol == "∞":
92
+ return np.inf
93
+ else:
94
+ raise NotImplementedError
95
+
96
+ def reshape(self, *args):
97
+ return SymbolicMultiplication(self.symbol, self.value.reshape(*args))
98
+
99
+
100
+ def supporting_symbolic_multiplication(f):
101
+ @wraps(f)
102
+ def wrapped_f(a, x):
103
+ if hasattr(x, 'symbol'):
104
+ return SymbolicMultiplication(x.symbol, f(a, x.value))
105
+ else:
106
+ return f(a, x)
107
+ return wrapped_f
File without changes
capytaine/ui/cli.py ADDED
@@ -0,0 +1,28 @@
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+ """Experimental command-line interface for Capytaine."""
4
+ # Copyright (C) 2017-2023 Matthieu Ancellin
5
+ # See LICENSE file at <https://github.com/capytaine/capytaine>
6
+
7
+ import argparse
8
+
9
+ import capytaine as cpt
10
+ from capytaine.io.legacy import run_cal_file
11
+
12
+ cpt.set_logging()
13
+
14
+ parser = argparse.ArgumentParser(description="Command-line interface for Capytaine taking Nemoh.cal files as input and returning Tecplots files.")
15
+ parser.add_argument('paramfiles',
16
+ default=['./Nemoh.cal'],
17
+ nargs='*',
18
+ help='path of parameters files (default: ./Nemoh.cal)')
19
+
20
+
21
+ def main():
22
+ args = parser.parse_args()
23
+ for paramfile in args.paramfiles:
24
+ run_cal_file(paramfile)
25
+
26
+
27
+ if __name__ == '__main__':
28
+ main()
capytaine/ui/rich.py ADDED
@@ -0,0 +1,5 @@
1
+ import logging
2
+ from rich.logging import RichHandler
3
+
4
+ def set_logging(level="INFO"):
5
+ logging.basicConfig(level=level, format="%(message)s", handlers=[RichHandler(level=level, log_time_format="[%X]", show_path=False)], force=True)
@@ -0,0 +1,3 @@
1
+ from capytaine.ui.vtk.mesh_viewer import MeshViewer
2
+ from capytaine.ui.vtk.body_viewer import FloatingBodyViewer
3
+ from capytaine.ui.vtk.animation import Animation