lisaanalysistools 1.0.0__cp312-cp312-macosx_10_9_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lisaanalysistools might be problematic. Click here for more details.

Files changed (37) hide show
  1. lisaanalysistools-1.0.0.dist-info/LICENSE +201 -0
  2. lisaanalysistools-1.0.0.dist-info/METADATA +80 -0
  3. lisaanalysistools-1.0.0.dist-info/RECORD +37 -0
  4. lisaanalysistools-1.0.0.dist-info/WHEEL +5 -0
  5. lisaanalysistools-1.0.0.dist-info/top_level.txt +2 -0
  6. lisatools/__init__.py +0 -0
  7. lisatools/_version.py +4 -0
  8. lisatools/analysiscontainer.py +438 -0
  9. lisatools/cutils/detector.cpython-312-darwin.so +0 -0
  10. lisatools/datacontainer.py +292 -0
  11. lisatools/detector.py +410 -0
  12. lisatools/diagnostic.py +976 -0
  13. lisatools/glitch.py +193 -0
  14. lisatools/sampling/__init__.py +0 -0
  15. lisatools/sampling/likelihood.py +882 -0
  16. lisatools/sampling/moves/__init__.py +0 -0
  17. lisatools/sampling/moves/gbgroupstretch.py +53 -0
  18. lisatools/sampling/moves/gbmultipletryrj.py +1287 -0
  19. lisatools/sampling/moves/gbspecialgroupstretch.py +671 -0
  20. lisatools/sampling/moves/gbspecialstretch.py +1836 -0
  21. lisatools/sampling/moves/mbhspecialmove.py +286 -0
  22. lisatools/sampling/moves/placeholder.py +16 -0
  23. lisatools/sampling/moves/skymodehop.py +110 -0
  24. lisatools/sampling/moves/specialforegroundmove.py +564 -0
  25. lisatools/sampling/prior.py +508 -0
  26. lisatools/sampling/stopping.py +320 -0
  27. lisatools/sampling/utility.py +324 -0
  28. lisatools/sensitivity.py +888 -0
  29. lisatools/sources/__init__.py +0 -0
  30. lisatools/sources/emri/__init__.py +1 -0
  31. lisatools/sources/emri/tdiwaveform.py +72 -0
  32. lisatools/stochastic.py +291 -0
  33. lisatools/utils/__init__.py +0 -0
  34. lisatools/utils/constants.py +40 -0
  35. lisatools/utils/multigpudataholder.py +730 -0
  36. lisatools/utils/pointeradjust.py +106 -0
  37. lisatools/utils/utility.py +240 -0
@@ -0,0 +1,292 @@
1
+ import warnings
2
+ from abc import ABC
3
+ from typing import Any, Tuple, Optional, List
4
+
5
+ import math
6
+ import numpy as np
7
+ from numpy.typing import ArrayLike
8
+ from scipy import interpolate
9
+ import matplotlib.pyplot as plt
10
+
11
+ try:
12
+ import cupy as cp
13
+
14
+ except (ModuleNotFoundError, ImportError):
15
+ import numpy as cp
16
+
17
+ from . import detector as lisa_models
18
+ from .utils.utility import AET, get_array_module
19
+ from .utils.constants import *
20
+ from .stochastic import (
21
+ StochasticContribution,
22
+ FittedHyperbolicTangentGalacticForeground,
23
+ )
24
+ from .sensitivity import SensitivityMatrix
25
+
26
+
27
+ class DataResidualArray:
28
+ pass
29
+
30
+
31
+ class DataResidualArray:
32
+ """Container to hold sensitivity information.
33
+
34
+ Args:
35
+ f: Frequency array.
36
+ sens_mat: Input sensitivity list. The shape of the nested lists should represent the shape of the
37
+ desired matrix. Each entry in the list must be an array, :class:`Sensitivity`-derived object,
38
+ or a string corresponding to the :class:`Sensitivity` object.
39
+ **sens_kwargs: Keyword arguments to pass to :method:`Sensitivity.get_Sn`.
40
+
41
+ """
42
+
43
+ def __init__(
44
+ self,
45
+ data_res_in: List[np.ndarray] | np.ndarray | DataResidualArray,
46
+ dt: Optional[float] = None,
47
+ f_arr: Optional[np.ndarray] = None,
48
+ df: Optional[float] = None,
49
+ **kwargs: dict,
50
+ ) -> None:
51
+ if isinstance(data_res_in, DataResidualArray):
52
+ for key, item in data_res_in.__dict__.items():
53
+ setattr(self, key, item)
54
+
55
+ else:
56
+ self._check_inputs(dt=dt, f_arr=f_arr, df=df)
57
+ self.data_res_arr = data_res_in
58
+ self._store_time_and_frequency_information(dt=dt, f_arr=f_arr, df=df)
59
+
60
+ @property
61
+ def init_kwargs(self) -> dict:
62
+ """Initial dt, df, f_arr"""
63
+ return self._init_kwargs
64
+
65
+ @init_kwargs.setter
66
+ def init_kwargs(self, init_kwargs: dict) -> None:
67
+ """Set initial kwargs."""
68
+ self._init_kwargs = init_kwargs
69
+
70
+ def _check_inputs(
71
+ self,
72
+ dt: Optional[float] = None,
73
+ f_arr: Optional[np.ndarray] = None,
74
+ df: Optional[float] = None,
75
+ ):
76
+ number_of_none = 0
77
+
78
+ number_of_none += 1 if dt is None else 0
79
+ number_of_none += 1 if f_arr is None else 0
80
+ number_of_none += 1 if df is None else 0
81
+
82
+ if number_of_none == 3:
83
+ raise ValueError("Must provide either df, dt, or f_arr.")
84
+
85
+ elif number_of_none == 1:
86
+ raise ValueError(
87
+ "Can only provide one of dt, f_arr, or df. Not more than one."
88
+ )
89
+ self.init_kwargs = dict(dt=dt, f_arr=f_arr, df=df)
90
+
91
+ def _store_time_and_frequency_information(
92
+ self,
93
+ dt: Optional[float] = None,
94
+ f_arr: Optional[np.ndarray] = None,
95
+ df: Optional[float] = None,
96
+ ):
97
+ if dt is not None:
98
+ self._dt = dt
99
+ self._Tobs = self.data_length * dt
100
+ self._df = 1 / self._Tobs
101
+ self._fmax = 1 / (2 * dt)
102
+ self._f_arr = np.fft.rfftfreq(self.data_length, dt)
103
+
104
+ # transform data
105
+ tmp = (
106
+ get_array_module(self.data_res_arr).fft.rfft(self.data_res_arr, axis=-1)
107
+ * self._dt
108
+ )
109
+ del self._data_res_arr
110
+ self._data_res_arr = tmp
111
+ self.data_length = self._data_res_arr.shape[-1]
112
+
113
+ elif df is not None:
114
+ self._df = df
115
+ self._Tobs = 1 / self._df
116
+ self._fmax = (self.data_length - 1) * df
117
+ self._dt = 1 / (2 * self._fmax)
118
+ self._f_arr = np.arange(0.0, self._fmax, self._df)
119
+
120
+ elif f_arr is not None:
121
+ self._f_arr = f_arr
122
+ self._fmax = f_arr.max()
123
+ # constant spacing
124
+ if np.all(np.diff(f_arr) == np.diff(f_arr)[0]):
125
+ self._df = np.diff(f_arr)[0].item()
126
+
127
+ if f_arr[0] == 0.0:
128
+ # could be fft because of constant spacing and f_arr[0] == 0.0
129
+ self._Tobs = 1 / self._df
130
+ self._dt = 1 / (2 * self._fmax)
131
+
132
+ else:
133
+ # cannot be fft basis
134
+ self._Tobs = None
135
+ self._dt = None
136
+
137
+ else:
138
+ self._df = None
139
+ self._Tobs = None
140
+ self._dt = None
141
+
142
+ if len(self.f_arr) != self.data_length:
143
+ raise ValueError(
144
+ "Entered or determined f_arr does not have the same length as the data channel inputs."
145
+ )
146
+
147
+ @property
148
+ def fmax(self):
149
+ return self._fmax
150
+
151
+ @property
152
+ def f_arr(self):
153
+ return self._f_arr
154
+
155
+ @property
156
+ def dt(self):
157
+ if self._dt is None:
158
+ raise ValueError("dt cannot be determined from this f_arr input.")
159
+
160
+ return self._dt
161
+
162
+ @property
163
+ def Tobs(self):
164
+ if self._Tobs is None:
165
+ raise ValueError("Tobs cannot be determined from this f_arr input.")
166
+
167
+ return self._Tobs
168
+
169
+ @property
170
+ def df(self):
171
+ if self._df is None:
172
+ raise ValueError("df cannot be determined from this f_arr input.")
173
+
174
+ return self._df
175
+
176
+ @property
177
+ def frequency_arr(self) -> np.ndarray:
178
+ return self._f_arr
179
+
180
+ @property
181
+ def data_res_arr(self) -> np.ndarray:
182
+ return self._data_res_arr
183
+
184
+ @data_res_arr.setter
185
+ def data_res_arr(self, data_res_arr: List[np.ndarray] | np.ndarray) -> None:
186
+ self._data_res_arr_input = data_res_arr
187
+
188
+ if (
189
+ isinstance(data_res_arr, np.ndarray) or isinstance(data_res_arr, cp.ndarray)
190
+ ) and data_res_arr.ndim == 1:
191
+ data_res_arr = [data_res_arr]
192
+
193
+ elif (
194
+ isinstance(data_res_arr, np.ndarray) or isinstance(data_res_arr, cp.ndarray)
195
+ ) and data_res_arr.ndim == 2:
196
+ data_res_arr = list(data_res_arr)
197
+
198
+ new_out = np.full(len(data_res_arr), None, dtype=object)
199
+ self.data_length = None
200
+ for i in range(len(data_res_arr)):
201
+ current_data = data_res_arr[i]
202
+ if isinstance(current_data, np.ndarray) or isinstance(
203
+ current_data, cp.ndarray
204
+ ):
205
+ if self.data_length is None:
206
+ self.data_length = len(current_data)
207
+ else:
208
+ assert len(current_data) == self.data_length
209
+
210
+ new_out[i] = current_data
211
+ else:
212
+ raise ValueError
213
+
214
+ self.nchannels = len(new_out)
215
+ self._data_res_arr = np.asarray(list(new_out), dtype=complex)
216
+
217
+ def __getitem__(self, index: tuple) -> np.ndarray:
218
+ return self.data_res_arr[index]
219
+
220
+ @property
221
+ def ndim(self) -> int:
222
+ return self.data_res_arr.ndim
223
+
224
+ def flatten(self) -> np.ndarray:
225
+ return self.data_res_arr.flatten()
226
+
227
+ @property
228
+ def shape(self) -> tuple:
229
+ return self.data_res_arr.shape
230
+
231
+ def loglog(
232
+ self,
233
+ ax: Optional[List[plt.Axes] | plt.Axes] = None,
234
+ fig: Optional[plt.Figure] = None,
235
+ inds: Optional[List[int] | int] = None,
236
+ char_strain: Optional[bool] = False,
237
+ **kwargs: dict,
238
+ ) -> Tuple[plt.Figure, plt.Axes]:
239
+ """Produce a log-log plot of the data.
240
+
241
+ Args:
242
+ ax: Matplotlib Axes objects to add plots. Either a list of Axes objects or a single Axes object.
243
+ fig: Matplotlib figure object.
244
+ inds: Integer index to select out which data to add to a single access.
245
+ A list can be provided if ax is a list. They must be the same length.
246
+ char_strain: If ``True`` return plot in characteristic strain representation.
247
+ **kwargs: Keyword arguments to be passed to ``loglog`` function in matplotlib.
248
+
249
+ Returns:
250
+ Matplotlib figure and axes objects in a 2-tuple.
251
+
252
+
253
+ """
254
+ if ax is None and fig is None:
255
+ nrows = 1
256
+ ncols = self.shape[0]
257
+
258
+ fig, ax = plt.subplots(nrows, ncols, sharex=True, sharey=True)
259
+ ax = ax.ravel()
260
+ inds_list = range(len(ax))
261
+
262
+ elif ax is not None:
263
+ if isinstance(ax, list):
264
+ assert len(ax) == np.prod(self.shape[:-1])
265
+ if inds is None:
266
+ inds_list = list(np.arange(np.prod(self.shape[:-1])))
267
+ else:
268
+ assert isinstance(inds, list) and len(inds) == len(ax)
269
+ inds_list = inds
270
+
271
+ elif isinstance(ax, plt.Axes):
272
+ assert inds is not None and (
273
+ isinstance(inds, tuple) or isinstance(inds, int)
274
+ )
275
+ ax = [ax]
276
+ inds_list = [inds]
277
+
278
+ elif fig is not None:
279
+ raise NotImplementedError
280
+
281
+ for i, ax_tmp in zip(inds_list, ax):
282
+ plot_in = np.abs(self.data_res_arr[i])
283
+ if char_strain:
284
+ plot_in *= self.frequency_arr
285
+ ax_tmp.loglog(self.frequency_arr, plot_in, **kwargs)
286
+
287
+ return (fig, ax)
288
+
289
+ @property
290
+ def char_strain(self) -> np.ndarray:
291
+ """Characteristic strain representation of the data."""
292
+ return np.sqrt(self.f_arr) * np.abs(self.data_res_arr)
lisatools/detector.py ADDED
@@ -0,0 +1,410 @@
1
+ import os
2
+ from abc import ABC, abstractmethod
3
+ from typing import Any, List, Tuple, Optional
4
+ from dataclasses import dataclass
5
+
6
+ import numpy as np
7
+ import h5py
8
+ from scipy import interpolate
9
+
10
+ from lisatools.cutils.detector import pycppDetector
11
+
12
+ SC = [1, 2, 3]
13
+ LINKS = [12, 23, 31, 13, 32, 21]
14
+
15
+ LINEAR_INTERP_TIMESTEP = 600.00 # sec (0.25 hr)
16
+
17
+
18
+ class Orbits(ABC):
19
+ """LISA Orbit Base Class
20
+
21
+ Args:
22
+ filename: File name. File should be in the style of LISAOrbits
23
+
24
+
25
+ """
26
+
27
+ def __init__(self, filename: str) -> None:
28
+ self.filename = filename
29
+ self._setup()
30
+ self.configured = False
31
+
32
+ @property
33
+ def LINKS(self) -> List[int]:
34
+ """Link order."""
35
+ return LINKS
36
+
37
+ @property
38
+ def SC(self) -> List[int]:
39
+ """Spacecraft order."""
40
+ return SC
41
+
42
+ @property
43
+ def link_space_craft_r(self) -> List[int]:
44
+ """Receiver (first) spacecraft"""
45
+ return [int(str(link_i)[0]) for link_i in self.LINKS]
46
+
47
+ @property
48
+ def link_space_craft_e(self) -> List[int]:
49
+ """Sender (second) spacecraft"""
50
+ return [int(str(link_i)[1]) for link_i in self.LINKS]
51
+
52
+ def _setup(self) -> None:
53
+ with self.open() as f:
54
+ for key in f.attrs.keys():
55
+ setattr(self, key + "_base", f.attrs[key])
56
+
57
+ @property
58
+ def filename(self) -> str:
59
+ """Orbit file name."""
60
+ return self._filename
61
+
62
+ @filename.setter
63
+ def filename(self, filename: str) -> None:
64
+ """Set file name."""
65
+ assert isinstance(filename, str)
66
+ assert os.path.exists(filename)
67
+ self._filename = filename
68
+
69
+ def open(self) -> h5py.File:
70
+ """Opens the h5 file in the proper mode.
71
+
72
+ Returns:
73
+ H5 file object: Opened file.
74
+
75
+ Raises:
76
+ RuntimeError: If backend is opened for writing when it is read-only.
77
+
78
+ """
79
+ f = h5py.File(self.filename, "r")
80
+ return f
81
+
82
+ @property
83
+ def t_base(self) -> np.ndarray:
84
+ """Light travel times along links from file."""
85
+ with self.open() as f:
86
+ t_base = np.arange(self.size_base) * self.dt_base
87
+ return t_base
88
+
89
+ @property
90
+ def ltt_base(self) -> np.ndarray:
91
+ """Light travel times along links from file."""
92
+ with self.open() as f:
93
+ ltt = f["tcb"]["ltt"][:]
94
+ return ltt
95
+
96
+ @property
97
+ def n_base(self) -> np.ndarray:
98
+ """Normal unit vectors towards receiver along links from file."""
99
+ with self.open() as f:
100
+ n = f["tcb"]["n"][:]
101
+ return n
102
+
103
+ @property
104
+ def x_base(self) -> np.ndarray:
105
+ """Light travel times along links from file."""
106
+ with self.open() as f:
107
+ x = f["tcb"]["x"][:]
108
+ return x
109
+
110
+ @property
111
+ def v_base(self) -> np.ndarray:
112
+ """Light travel times along links from file."""
113
+ with self.open() as f:
114
+ v = f["tcb"]["v"][:]
115
+ return v
116
+
117
+ @property
118
+ def t(self) -> np.ndarray:
119
+ """Configured time array."""
120
+ self._check_configured()
121
+ return self._t
122
+
123
+ @t.setter
124
+ def t(self, t: np.ndarray):
125
+ """Set configured time array."""
126
+ assert isinstance(t, np.ndarray) and t.ndim == 1
127
+ self._t = t
128
+
129
+ @property
130
+ def ltt(self) -> np.ndarray:
131
+ """Light travel time."""
132
+ self._check_configured()
133
+ return self._ltt
134
+
135
+ @ltt.setter
136
+ def ltt(self, ltt: np.ndarray) -> np.ndarray:
137
+ """Set light travel time."""
138
+ assert ltt.shape[0] == len(self.t)
139
+
140
+ @property
141
+ def n(self) -> np.ndarray:
142
+ """Light travel time."""
143
+ self._check_configured()
144
+ return self._n
145
+
146
+ @n.setter
147
+ def n(self, n: np.ndarray) -> np.ndarray:
148
+ """Set light travel time."""
149
+ return self._n
150
+
151
+ @property
152
+ def x(self) -> np.ndarray:
153
+ """Light travel time."""
154
+ self._check_configured()
155
+ return self._x
156
+
157
+ @x.setter
158
+ def x(self, x: np.ndarray) -> np.ndarray:
159
+ """Set light travel time."""
160
+ return self._x
161
+
162
+ @property
163
+ def v(self) -> np.ndarray:
164
+ """Light travel time."""
165
+ self._check_configured()
166
+ return self._v
167
+
168
+ @v.setter
169
+ def v(self, v: np.ndarray) -> np.ndarray:
170
+ """Set light travel time."""
171
+ return self._v
172
+
173
+ def configure(
174
+ self,
175
+ t_arr: Optional[np.ndarray] = None,
176
+ dt: Optional[float] = None,
177
+ linear_interp_setup: Optional[bool] = False,
178
+ ) -> None:
179
+ """Configure the orbits to match the signal response generator time basis.
180
+
181
+ The base orbits will be scaled up or down as needed using Cubic Spline interpolation.
182
+ The higherarchy of consideration to each keyword argument if multiple are given:
183
+ ``linear_interp_setup``, ``t_arr``, ``dt``.
184
+
185
+ If nothing is provided, the base points are used.
186
+
187
+ Args:
188
+ t_arr: New time array.
189
+ dt: New time step. Will take the time duration to be that of the input data.
190
+ linear_interp_setup: If ``True``, it will create a dense grid designed for linear interpolation with a constant time step.
191
+
192
+ """
193
+
194
+ x_orig = self.t_base
195
+
196
+ if linear_interp_setup:
197
+ make_cpp = True
198
+ dt = LINEAR_INTERP_TIMESTEP
199
+ Tobs = self.t_base[-1]
200
+ Nobs = int(Tobs / dt)
201
+ t_arr = np.arange(Nobs) * dt
202
+ if t_arr[-1] < self.t_base[-1]:
203
+ t_arr = np.concatenate([t_arr, self.t_base[-1:]])
204
+ elif t_arr is not None:
205
+ assert np.all(t_arr >= self.t_base[0]) and np.all(t_arr <= self.t_base[-1])
206
+
207
+ elif dt is not None:
208
+ make_cpp = True
209
+ Tobs = self.t_base[-1]
210
+ Nobs = int(Tobs / dt)
211
+ t_arr = np.arange(Nobs) * dt
212
+ if t_arr[-1] < self.t_base[-1]:
213
+ t_arr = np.concatenate([t_arr, self.t_base[-1:]])
214
+
215
+ else:
216
+ make_cpp = False
217
+ t_arr = self.t_base
218
+
219
+ x_new = t_arr.copy()
220
+ self.t = t_arr.copy()
221
+
222
+ for which in ["ltt", "x", "n", "v"]:
223
+ arr = getattr(self, which + "_base")
224
+ arr_tmp = arr.reshape(self.size_base, -1)
225
+ arr_out_tmp = np.zeros((len(x_new), arr_tmp.shape[-1]))
226
+ for i in range(arr_tmp.shape[-1]):
227
+ arr_out_tmp[:, i] = interpolate.CubicSpline(x_orig, arr_tmp[:, i])(
228
+ x_new
229
+ )
230
+ arr_out = arr_out_tmp.reshape((len(x_new),) + arr.shape[1:])
231
+ setattr(self, "_" + which, arr_out)
232
+
233
+ self.configured = True
234
+
235
+ lsr = np.asarray(self.link_space_craft_r).copy().astype(np.int32)
236
+ lse = np.asarray(self.link_space_craft_e).copy().astype(np.int32)
237
+ ll = np.asarray(self.LINKS).copy().astype(np.int32)
238
+
239
+ if make_cpp:
240
+ self.pycppdetector_args = (
241
+ dt,
242
+ len(self.t),
243
+ self.n.flatten().copy(),
244
+ self.ltt.flatten().copy(),
245
+ self.x.flatten().copy(),
246
+ ll,
247
+ lsr,
248
+ lse,
249
+ )
250
+ self.dt = dt
251
+ else:
252
+ self.pycppdetector_args = None
253
+ self.dt = dt
254
+
255
+ @property
256
+ def dt(self) -> float:
257
+ """new time step if it exists"""
258
+ if self._dt is None:
259
+ raise ValueError("dt not available for t_arr only.")
260
+ return self._dt
261
+
262
+ @dt.setter
263
+ def dt(self, dt: float) -> None:
264
+ self._dt = dt
265
+
266
+ @property
267
+ def pycppdetector(self) -> pycppDetector:
268
+ """C++ class"""
269
+ if self._pycppdetector_args is None:
270
+ raise ValueError(
271
+ "Asking for c++ class. Need to set linear_interp_setup = True when configuring."
272
+ )
273
+ self._pycppdetector = pycppDetector(*self._pycppdetector_args)
274
+ return self._pycppdetector
275
+
276
+ @property
277
+ def pycppdetector_args(self) -> tuple:
278
+ return self._pycppdetector_args
279
+
280
+ @pycppdetector_args.setter
281
+ def pycppdetector_args(self, pycppdetector_args: tuple) -> None:
282
+ self._pycppdetector_args = pycppdetector_args
283
+
284
+ @property
285
+ def size(self) -> int:
286
+ """Number of time points."""
287
+ self._check_configured()
288
+ return len(self.t)
289
+
290
+ def _check_configured(self) -> None:
291
+ if not self.configured:
292
+ raise ValueError(
293
+ "Cannot request property. Need to use configure() method first."
294
+ )
295
+
296
+ def get_light_travel_times(
297
+ self, t: float | np.ndarray, link: int
298
+ ) -> float | np.ndarray:
299
+ return self.pycppdetector.get_light_travel_time(t, link)
300
+
301
+ def get_normal_unit_vec(self, t: float | np.ndarray, link: int) -> np.ndarray:
302
+ return self.pycppdetector.get_normal_unit_vec(t, link)
303
+
304
+ def get_pos(self, t: float | np.ndarray, sc: int) -> np.ndarray:
305
+ return self.pycppdetector.get_pos(t, sc)
306
+
307
+ @property
308
+ def ptr(self) -> int:
309
+ """pointer to c-class"""
310
+ return self.pycppdetector.ptr
311
+
312
+
313
+ class EqualArmlengthOrbits(Orbits):
314
+ """Equal Armlength Orbits"""
315
+
316
+ def __init__(self):
317
+ # TODO: fix this up
318
+ super().__init__(
319
+ "/Users/mlkatz1/Research/LISAanalysistools/examples/equalarmlength-orbits.h5"
320
+ )
321
+
322
+
323
+ class DefaultOrbits(EqualArmlengthOrbits):
324
+ """Set default orbit class to Equal Arm Length orbits for now."""
325
+
326
+ pass
327
+
328
+
329
+ @dataclass
330
+ class LISAModelSettings:
331
+ """Required LISA model settings:
332
+
333
+ TODO: rename these
334
+
335
+ Args:
336
+ Soms_d: OMS displacement noise.
337
+ Sa_a: Acceleration noise.
338
+ name: Name of model.
339
+
340
+ """
341
+
342
+ Soms_d: float
343
+ Sa_a: float
344
+ orbits: Orbits
345
+ name: str
346
+
347
+
348
+ class LISAModel(LISAModelSettings, ABC):
349
+ """Model for the LISA Constellation"""
350
+
351
+ def __str__(self) -> str:
352
+ out = "LISA Constellation Configurations Settings:\n"
353
+ for key, item in self.__dict__.items():
354
+ out += f"{key}: {item}\n"
355
+ return out
356
+
357
+
358
+ scirdv1 = LISAModel((15.0e-12) ** 2, (3.0e-15) ** 2, DefaultOrbits(), "scirdv1")
359
+ proposal = LISAModel((10.0e-12) ** 2, (3.0e-15) ** 2, DefaultOrbits(), "proposal")
360
+ mrdv1 = LISAModel((10.0e-12) ** 2, (2.4e-15) ** 2, DefaultOrbits(), "mrdv1")
361
+ sangria = LISAModel((10.0e-12) ** 2, (2.4e-15) ** 2, DefaultOrbits(), "sangria")
362
+
363
+ __stock_list_models__ = [scirdv1, proposal, mrdv1, sangria]
364
+ __stock_list_models_name__ = [tmp.name for tmp in __stock_list_models__]
365
+
366
+
367
+ def get_available_default_lisa_models() -> List[LISAModel]:
368
+ """Get list of default LISA models
369
+
370
+ Returns:
371
+ List of LISA models.
372
+
373
+ """
374
+ return __stock_list_models__
375
+
376
+
377
+ def get_default_lisa_model_from_str(model: str) -> LISAModel:
378
+ """Return a LISA model from a ``str`` input.
379
+
380
+ Args:
381
+ model: Model indicated with a ``str``.
382
+
383
+ Returns:
384
+ LISA model associated to that ``str``.
385
+
386
+ """
387
+ if model not in __stock_list_models_name__:
388
+ raise ValueError(
389
+ "Requested string model is not available. See lisatools.detector documentation."
390
+ )
391
+ return globals()[model]
392
+
393
+
394
+ def check_lisa_model(model: Any) -> LISAModel:
395
+ """Check input LISA model.
396
+
397
+ Args:
398
+ model: LISA model to check.
399
+
400
+ Returns:
401
+ LISA Model checked. Adjusted from ``str`` if ``str`` input.
402
+
403
+ """
404
+ if isinstance(model, str):
405
+ model = get_default_lisa_model_from_str(model)
406
+
407
+ if not isinstance(model, LISAModel):
408
+ raise ValueError("model argument not given correctly.")
409
+
410
+ return model