lisaanalysistools 1.1.6__cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lisaanalysistools might be problematic. Click here for more details.

Files changed (42) hide show
  1. lisaanalysistools/git_version.py +7 -0
  2. lisaanalysistools-1.1.6.dist-info/METADATA +295 -0
  3. lisaanalysistools-1.1.6.dist-info/RECORD +42 -0
  4. lisaanalysistools-1.1.6.dist-info/WHEEL +6 -0
  5. lisaanalysistools-1.1.6.dist-info/licenses/LICENSE +201 -0
  6. lisatools/__init__.py +58 -0
  7. lisatools/_version.py +34 -0
  8. lisatools/analysiscontainer.py +474 -0
  9. lisatools/cutils/__init__.py +126 -0
  10. lisatools/datacontainer.py +312 -0
  11. lisatools/detector.py +704 -0
  12. lisatools/diagnostic.py +990 -0
  13. lisatools/git_version.py.in +7 -0
  14. lisatools/orbit_files/equalarmlength-orbits-best-fit-to-esa.h5 +0 -0
  15. lisatools/orbit_files/equalarmlength-orbits.h5 +0 -0
  16. lisatools/orbit_files/esa-trailing-orbits.h5 +0 -0
  17. lisatools/sampling/__init__.py +0 -0
  18. lisatools/sampling/likelihood.py +882 -0
  19. lisatools/sampling/moves/__init__.py +0 -0
  20. lisatools/sampling/moves/skymodehop.py +110 -0
  21. lisatools/sampling/prior.py +646 -0
  22. lisatools/sampling/stopping.py +320 -0
  23. lisatools/sampling/utility.py +411 -0
  24. lisatools/sensitivity.py +972 -0
  25. lisatools/sources/__init__.py +6 -0
  26. lisatools/sources/bbh/__init__.py +1 -0
  27. lisatools/sources/bbh/waveform.py +106 -0
  28. lisatools/sources/defaultresponse.py +36 -0
  29. lisatools/sources/emri/__init__.py +1 -0
  30. lisatools/sources/emri/waveform.py +79 -0
  31. lisatools/sources/gb/__init__.py +1 -0
  32. lisatools/sources/gb/waveform.py +69 -0
  33. lisatools/sources/utils.py +459 -0
  34. lisatools/sources/waveformbase.py +41 -0
  35. lisatools/stochastic.py +327 -0
  36. lisatools/utils/__init__.py +0 -0
  37. lisatools/utils/constants.py +54 -0
  38. lisatools/utils/exceptions.py +95 -0
  39. lisatools/utils/parallelbase.py +11 -0
  40. lisatools/utils/utility.py +245 -0
  41. lisatools_backend_cpu/git_version.py +7 -0
  42. lisatools_backend_cpu/pycppdetector.cpython-310-x86_64-linux-gnu.so +0 -0
@@ -0,0 +1,312 @@
1
+ from __future__ import annotations
2
+ import warnings
3
+ from abc import ABC
4
+ from typing import Any, Tuple, Optional, List
5
+
6
+ import math
7
+ import numpy as np
8
+ from scipy import interpolate
9
+ import matplotlib.pyplot as plt
10
+
11
+ try:
12
+ import cupy as cp
13
+
14
+ except (ModuleNotFoundError, ImportError):
15
+ import numpy as cp
16
+
17
+ from . import detector as lisa_models
18
+ from .utils.utility import AET, get_array_module
19
+ from .utils.constants import *
20
+ from .stochastic import (
21
+ StochasticContribution,
22
+ FittedHyperbolicTangentGalacticForeground,
23
+ )
24
+ from .sensitivity import SensitivityMatrix
25
+
26
+
27
+ class DataResidualArray:
28
+ pass
29
+
30
+
31
+ class DataResidualArray:
32
+ """Container to hold Data, residual, or template information.
33
+
34
+ This class abstracts the connection with the sensitivity matrices to make this analysis
35
+ as generic as possible for the user frontend, while handling
36
+ special computations in the backend.
37
+
38
+ Args:
39
+ data_res_in: Data, residual, or template input information. Can be a list, numpy array
40
+ or another :class:`DataResidualArray`.
41
+ dt: Timestep in seconds.
42
+ f_arr: Frequency array.
43
+ df: Delta f in frequency domain.
44
+ **kwargs: For future compatibility.
45
+
46
+ """
47
+
48
+ def __init__(
49
+ self,
50
+ data_res_in: List[np.ndarray] | np.ndarray | DataResidualArray,
51
+ dt: Optional[float] = None,
52
+ f_arr: Optional[np.ndarray] = None,
53
+ df: Optional[float] = None,
54
+ **kwargs: dict,
55
+ ) -> None:
56
+ if isinstance(data_res_in, DataResidualArray):
57
+ for key, item in data_res_in.__dict__.items():
58
+ setattr(self, key, item)
59
+
60
+ else:
61
+ self._check_inputs(dt=dt, f_arr=f_arr, df=df)
62
+ self.data_res_arr = data_res_in
63
+ self._store_time_and_frequency_information(dt=dt, f_arr=f_arr, df=df)
64
+
65
+ @property
66
+ def init_kwargs(self) -> dict:
67
+ """Initial dt, df, f_arr"""
68
+ return self._init_kwargs
69
+
70
+ @init_kwargs.setter
71
+ def init_kwargs(self, init_kwargs: dict) -> None:
72
+ """Set initial kwargs."""
73
+ self._init_kwargs = init_kwargs
74
+
75
+ def _check_inputs(
76
+ self,
77
+ dt: Optional[float] = None,
78
+ f_arr: Optional[np.ndarray] = None,
79
+ df: Optional[float] = None,
80
+ ):
81
+ number_of_none = 0
82
+
83
+ number_of_none += 1 if dt is None else 0
84
+ number_of_none += 1 if f_arr is None else 0
85
+ number_of_none += 1 if df is None else 0
86
+
87
+ if number_of_none == 3:
88
+ raise ValueError("Must provide either df, dt, or f_arr.")
89
+
90
+ elif number_of_none == 1:
91
+ raise ValueError(
92
+ "Can only provide one of dt, f_arr, or df. Not more than one."
93
+ )
94
+ self.init_kwargs = dict(dt=dt, f_arr=f_arr, df=df)
95
+
96
+ def _store_time_and_frequency_information(
97
+ self,
98
+ dt: Optional[float] = None,
99
+ f_arr: Optional[np.ndarray] = None,
100
+ df: Optional[float] = None,
101
+ ):
102
+ if dt is not None:
103
+ self._dt = dt
104
+ self._Tobs = self.data_length * dt
105
+ self._df = 1 / self._Tobs
106
+ self._fmax = 1 / (2 * dt)
107
+ xp = get_array_module(self.data_res_arr)
108
+ self._f_arr = xp.asarray(np.fft.rfftfreq(self.data_length, dt))
109
+
110
+ # transform data
111
+ tmp = xp.fft.rfft(self.data_res_arr, axis=-1) * self._dt
112
+ del self._data_res_arr
113
+ self._data_res_arr = tmp
114
+ self.data_length = self._data_res_arr.shape[-1]
115
+
116
+ elif df is not None:
117
+ self._df = df
118
+ self._Tobs = 1 / self._df
119
+ self._fmax = (self.data_length - 1) * df
120
+ self._dt = 1 / (2 * self._fmax)
121
+ self._f_arr = np.arange(0.0, self._fmax, self._df)
122
+
123
+ elif f_arr is not None:
124
+ self._f_arr = f_arr
125
+ self._fmax = f_arr.max()
126
+ # constant spacing
127
+ if np.all(np.diff(f_arr) == np.diff(f_arr)[0]):
128
+ self._df = np.diff(f_arr)[0].item()
129
+
130
+ if f_arr[0] == 0.0:
131
+ # could be fft because of constant spacing and f_arr[0] == 0.0
132
+ self._Tobs = 1 / self._df
133
+ self._dt = 1 / (2 * self._fmax)
134
+
135
+ else:
136
+ # cannot be fft basis
137
+ self._Tobs = None
138
+ self._dt = None
139
+
140
+ else:
141
+ self._df = None
142
+ self._Tobs = None
143
+ self._dt = None
144
+
145
+ if len(self.f_arr) != self.data_length:
146
+ raise ValueError(
147
+ "Entered or determined f_arr does not have the same length as the data channel inputs."
148
+ )
149
+
150
+ @property
151
+ def fmax(self):
152
+ """Maximum frequency."""
153
+ return self._fmax
154
+
155
+ @property
156
+ def f_arr(self):
157
+ """Frequency array."""
158
+ return self._f_arr
159
+
160
+ @property
161
+ def dt(self):
162
+ """Time step in seconds."""
163
+ if self._dt is None:
164
+ raise ValueError("dt cannot be determined from this f_arr input.")
165
+
166
+ return self._dt
167
+
168
+ @property
169
+ def Tobs(self):
170
+ """Observation time in seconds"""
171
+ if self._Tobs is None:
172
+ raise ValueError("Tobs cannot be determined from this f_arr input.")
173
+
174
+ return self._Tobs
175
+
176
+ @property
177
+ def df(self):
178
+ """Delta f in the frequency domain."""
179
+ if self._df is None:
180
+ raise ValueError("df cannot be determined from this f_arr input.")
181
+
182
+ return self._df
183
+
184
+ @property
185
+ def frequency_arr(self) -> np.ndarray:
186
+ """Frequency array"""
187
+ return self._f_arr
188
+
189
+ @property
190
+ def data_res_arr(self) -> np.ndarray:
191
+ """Actual data residual array"""
192
+ return self._data_res_arr
193
+
194
+ @data_res_arr.setter
195
+ def data_res_arr(self, data_res_arr: List[np.ndarray] | np.ndarray) -> None:
196
+ """Set ``data_res_arr``."""
197
+ self._data_res_arr_input = data_res_arr
198
+
199
+ if (
200
+ isinstance(data_res_arr, np.ndarray) or isinstance(data_res_arr, cp.ndarray)
201
+ ) and data_res_arr.ndim == 1:
202
+ data_res_arr = [data_res_arr]
203
+
204
+ elif (
205
+ isinstance(data_res_arr, np.ndarray) or isinstance(data_res_arr, cp.ndarray)
206
+ ) and data_res_arr.ndim == 2:
207
+ data_res_arr = list(data_res_arr)
208
+
209
+ new_out = np.full(len(data_res_arr), None, dtype=object)
210
+ self.data_length = None
211
+ for i in range(len(data_res_arr)):
212
+ current_data = data_res_arr[i]
213
+ if isinstance(current_data, np.ndarray) or isinstance(
214
+ current_data, cp.ndarray
215
+ ):
216
+ if self.data_length is None:
217
+ self.data_length = len(current_data)
218
+ else:
219
+ assert len(current_data) == self.data_length
220
+
221
+ new_out[i] = current_data
222
+ else:
223
+ raise ValueError
224
+
225
+ self.nchannels = len(new_out)
226
+ xp = get_array_module(new_out[0])
227
+ self._data_res_arr = xp.asarray(list(new_out), dtype=new_out[0].dtype)
228
+
229
+ def __getitem__(self, index: tuple) -> np.ndarray:
230
+ """Index this class directly in ``self.data_res_arr``."""
231
+ return self.data_res_arr[index]
232
+
233
+ def __setitem__(self, index: tuple, value: float | np.ndarray) -> np.ndarray:
234
+ """Index this class directly in ``self.data_res_arr``."""
235
+ self.data_res_arr[index] = value
236
+
237
+ @property
238
+ def ndim(self) -> int:
239
+ """Number of dimensions in the `data_res_arr`."""
240
+ return self.data_res_arr.ndim
241
+
242
+ def flatten(self) -> np.ndarray:
243
+ """Flatten the ``data_res_arr``."""
244
+ return self.data_res_arr.flatten()
245
+
246
+ @property
247
+ def shape(self) -> tuple:
248
+ """Shape of ``data_res_arr``."""
249
+ return self.data_res_arr.shape
250
+
251
+ def loglog(
252
+ self,
253
+ ax: Optional[List[plt.Axes] | plt.Axes] = None,
254
+ fig: Optional[plt.Figure] = None,
255
+ inds: Optional[List[int] | int] = None,
256
+ char_strain: Optional[bool] = False,
257
+ **kwargs: dict,
258
+ ) -> Tuple[plt.Figure, plt.Axes]:
259
+ """Produce a log-log plot of the data.
260
+
261
+ Args:
262
+ ax: Matplotlib Axes objects to add plots. Either a list of Axes objects or a single Axes object.
263
+ fig: Matplotlib figure object.
264
+ inds: Integer index to select out which data to add to a single access.
265
+ A list can be provided if ax is a list. They must be the same length.
266
+ char_strain: If ``True`` return plot in characteristic strain representation.
267
+ **kwargs: Keyword arguments to be passed to ``loglog`` function in matplotlib.
268
+
269
+ Returns:
270
+ Matplotlib figure and axes objects in a 2-tuple.
271
+
272
+
273
+ """
274
+ if ax is None and fig is None:
275
+ nrows = 1
276
+ ncols = self.shape[0]
277
+
278
+ fig, ax = plt.subplots(nrows, ncols, sharex=True, sharey=True)
279
+ ax = ax.ravel()
280
+ inds_list = range(len(ax))
281
+
282
+ elif ax is not None:
283
+ if isinstance(ax, list):
284
+ assert len(ax) == np.prod(self.shape[:-1])
285
+ if inds is None:
286
+ inds_list = list(np.arange(np.prod(self.shape[:-1])))
287
+ else:
288
+ assert isinstance(inds, list) and len(inds) == len(ax)
289
+ inds_list = inds
290
+
291
+ elif isinstance(ax, plt.Axes):
292
+ assert inds is not None and (
293
+ isinstance(inds, tuple) or isinstance(inds, int)
294
+ )
295
+ ax = [ax]
296
+ inds_list = [inds]
297
+
298
+ elif fig is not None:
299
+ raise NotImplementedError
300
+
301
+ for i, ax_tmp in zip(inds_list, ax):
302
+ plot_in = np.abs(self.data_res_arr[i])
303
+ if char_strain:
304
+ plot_in *= self.frequency_arr
305
+ ax_tmp.loglog(self.frequency_arr, plot_in, **kwargs)
306
+
307
+ return (fig, ax)
308
+
309
+ @property
310
+ def char_strain(self) -> np.ndarray:
311
+ """Characteristic strain representation of the data."""
312
+ return np.sqrt(self.f_arr) * np.abs(self.data_res_arr)