archeo 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
archeo-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,55 @@
1
+ Metadata-Version: 2.1
2
+ Name: archeo
3
+ Version: 0.1.0
4
+ Summary:
5
+ License: MIT
6
+ Author: wyhwong
7
+ Author-email: wyhwong@link.cuhk.edu.hk
8
+ Requires-Python: >=3.11,<4.0
9
+ Classifier: License :: OSI Approved :: MIT License
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Programming Language :: Python :: 3.11
12
+ Classifier: Programming Language :: Python :: 3.12
13
+ Classifier: Programming Language :: Python :: 3.13
14
+ Requires-Dist: corner (>=2.2.2,<3.0.0)
15
+ Requires-Dist: gwsurrogate (==0.5.0)
16
+ Requires-Dist: matplotlib (>=3.8.2,<4.0.0)
17
+ Requires-Dist: mpl-scatter-density (>=0.7,<0.8)
18
+ Requires-Dist: numpy (>=1.26.3,<2.0.0)
19
+ Requires-Dist: pandas (>=2.1.4,<3.0.0)
20
+ Requires-Dist: pyarrow (>=15.0.0,<16.0.0)
21
+ Requires-Dist: pyyaml (>=6.0.1,<7.0.0)
22
+ Requires-Dist: seaborn (>=0.13.1,<0.14.0)
23
+ Requires-Dist: setuptools (>=70.3.0,<71.0.0)
24
+ Requires-Dist: surfinbh (>=1.2.3,<2.0.0)
25
+ Requires-Dist: tqdm (>=4.66.4,<5.0.0)
26
+ Description-Content-Type: text/markdown
27
+
28
+ ## ARCHEO - Inferring the natal kick and parental masses posterior of black holes in Pair-instability Supernova (PISN) gap.
29
+
30
+ ## Basic Usage
31
+
32
+ The following example demonstrates how to use the package to visualize the prior and posterior distributions of a single event.
33
+
34
+ ```python
35
+ import archeo
36
+
37
+ # Load the mass/spin samples from a file
38
+ # They are expected to be a list of floats
39
+ mass_posterior = [68.0, 71.4, ..., 91.4]
40
+ spin_posterior = [0.31, 0.54, ..., 0.64]
41
+
42
+ # Create a prior (preset priors are "precessing" and "aligned_spin")
43
+ prior = archeo.Prior.from_prior_config("precessing")
44
+ # Create a posterior from the samples and the prior
45
+ posterior = prior.to_posterior(mass_posterior, spin_posterior)
46
+
47
+ # Visualize the prior and the posterior
48
+ archeo.visualize_prior_distribution(prior, output_dir="./")
49
+ archeo.visualize_posterior_estimation({"GW190521": posterior}, output_dir="./")
50
+ ```
51
+
52
+ ## Configure your own prior
53
+
54
+ Check out the preset priors in [precessing.py](./archeo/preset/precessing.py) and [aligned_spin.py](./archeo/preset/aligned_spin.py). From that, one should be able to create their own prior by following the same structure.
55
+
archeo-0.1.0/README.md ADDED
@@ -0,0 +1,27 @@
1
+ ## ARCHEO - Inferring the natal kick and parental masses posterior of black holes in Pair-instability Supernova (PISN) gap.
2
+
3
+ ## Basic Usage
4
+
5
+ The following example demonstrates how to use the package to visualize the prior and posterior distributions of a single event.
6
+
7
+ ```python
8
+ import archeo
9
+
10
+ # Load the mass/spin samples from a file
11
+ # They are expected to be a list of floats
12
+ mass_posterior = [68.0, 71.4, ..., 91.4]
13
+ spin_posterior = [0.31, 0.54, ..., 0.64]
14
+
15
+ # Create a prior (preset priors are "precessing" and "aligned_spin")
16
+ prior = archeo.Prior.from_prior_config("precessing")
17
+ # Create a posterior from the samples and the prior
18
+ posterior = prior.to_posterior(mass_posterior, spin_posterior)
19
+
20
+ # Visualize the prior and the posterior
21
+ archeo.visualize_prior_distribution(prior, output_dir="./")
22
+ archeo.visualize_posterior_estimation({"GW190521": posterior}, output_dir="./")
23
+ ```
24
+
25
+ ## Configure your own prior
26
+
27
+ Check out the preset priors in [precessing.py](./archeo/preset/precessing.py) and [aligned_spin.py](./archeo/preset/aligned_spin.py). From that, one should be able to create their own prior by following the same structure.
@@ -0,0 +1,5 @@
1
+ from archeo.constants import Fits
2
+ from archeo.core.prior import Prior
3
+ from archeo.preset import get_prior_config
4
+ from archeo.schema import PriorConfig
5
+ from archeo.visualization import visualize_posterior_estimation, visualize_prior_distribution
File without changes
@@ -0,0 +1,98 @@
1
+ import enum
2
+
3
+ import archeo.logger
4
+
5
+
6
+ local_logger = archeo.logger.get_logger(__name__)
7
+
8
+ SPEED_OF_LIGHT = 299792.458 # km/s
9
+
10
+
11
+ class Fits(enum.Enum):
12
+ """Surrogate models for binary black hole merger simulations.
13
+
14
+ Attributes:
15
+ NRSur3dq8Remnant: non precessing BHs with mass ratio<=8, anti-/aligned spin <= 0.8.
16
+ NRSur7dq4Remnant: precessing BHs with mass ratio<=4, generic spin <= 0.8.
17
+ surfinBH7dq2: precessing BHs with mass ratio <= 2, generic spin <= 0.8.
18
+
19
+ Details please refer to https://pypi.org/project/surfinBH/.
20
+ """
21
+
22
+ NRSUR3DQ8REMNANT = "NRSur3dq8Remnant"
23
+ NRSUR7DQ4REMNANT = "NRSur7dq4Remnant"
24
+ SURFINBH7DQ2 = "surfinBH7dq2"
25
+
26
+ def load(self):
27
+ """Load a surfinBH fits.
28
+
29
+ Returns:
30
+ fits (surfinBH.surfinBH.SurFinBH): The loaded fits.
31
+ """
32
+
33
+ import surfinBH # pylint: disable=import-outside-toplevel
34
+
35
+ local_logger.info(
36
+ "Loading surfinBH %s, description: %s.",
37
+ self.value,
38
+ surfinBH.fits_collection[self.value].desc,
39
+ )
40
+ return surfinBH.LoadFits(self.value)
41
+
42
+
43
+ class EscapeVelocity(enum.Enum):
44
+ """Escape velocity (Unit in km s^-1)"""
45
+
46
+ GLOBULAR_CLUSTER = 50.0
47
+ MILKY_WAY = 600.0
48
+ NUCLEAR_STAR_CLUSTER = 1500.0
49
+ ELLIPTICAL_GALAXY = 2500.0
50
+
51
+ def label(self):
52
+ """Return the escape velocity label"""
53
+
54
+ if self is EscapeVelocity.GLOBULAR_CLUSTER:
55
+ return "$v_{esc, GC}$"
56
+
57
+ if self is EscapeVelocity.MILKY_WAY:
58
+ return "$v_{esc, MW}$"
59
+
60
+ if self is EscapeVelocity.NUCLEAR_STAR_CLUSTER:
61
+ return "$v_{esc, NSC}$"
62
+
63
+ if self is EscapeVelocity.ELLIPTICAL_GALAXY:
64
+ return "$v_{esc, EG}$"
65
+
66
+ raise ValueError(f"Unknown escape velocity {self}")
67
+
68
+ @classmethod
69
+ def to_vlines(cls) -> dict[str, float]:
70
+ """Return a dictionary for vlines plotting
71
+
72
+ Returns:
73
+ vlines (Dict[str, float]): The escape velocity vlines.
74
+ """
75
+
76
+ return {esc_vel.label(): esc_vel.value for esc_vel in cls}
77
+
78
+
79
+ class Columns(str, enum.Enum):
80
+ """Columns in the prior dataframe"""
81
+
82
+ HEAVIER_BH_MASS = "m_1"
83
+ HEAVIER_BH_SPIN = "a_1"
84
+ HEAVIER_BH_CHI = "chi_1"
85
+ LIGHTER_BH_MASS = "m_2"
86
+ LIGHTER_BH_SPIN = "a_2"
87
+ LIGHTER_BH_CHI = "chi_2"
88
+ MASS_RATIO = "q"
89
+ RETAINED_MASS = "m_ret"
90
+ LIKELIHOOD = "l"
91
+ RECOVERY_RATE = "r_rec"
92
+ BH_MASS = "m_f"
93
+ BH_KICK = "k_f"
94
+ BH_VEL = "v_f"
95
+ BH_CHI = "chi_f"
96
+ BH_SPIN = "a_f"
97
+ BH_EFF_SPIN = "a_eff"
98
+ BH_PREC_SPIN = "a_prec"
File without changes
@@ -0,0 +1,51 @@
1
+ from typing import Callable
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+
6
+ import archeo.logger
7
+ from archeo.schema import Domain
8
+
9
+
10
+ local_logger = archeo.logger.get_logger(__name__)
11
+
12
+
13
+ def get_mahapatra_mass_fn(mass: Domain, alpha=2.3, dm=4.83, n_samples=500000) -> Callable:
14
+ """Get a mass function from Mahapatra's mass distribution.
15
+ NOTE: For details, see https://arxiv.org/abs/2209.05766.
16
+
17
+ Args:
18
+ mass (Domain): Mass domain.
19
+ alpha (float): Power law index.
20
+ dm (float): Tapering parameter.
21
+ n_samples (int): Number of samples to generate.
22
+
23
+ Returns:
24
+ mass_fn (Callable): Mass function.
25
+ """
26
+
27
+ def _f(ds: pd.Series) -> pd.Series:
28
+ """Calculate the function f in Mahapatra's paper"""
29
+
30
+ mp = ds - mass.low
31
+ return np.exp(dm / mp + dm / (mp - dm))
32
+
33
+ def smoothing_func(ds: pd.Series) -> pd.Series:
34
+ """Smoothing function."""
35
+
36
+ probis = ds.copy()
37
+ probis[ds < mass.low + dm] = 1 / (_f(ds[ds < mass.low + dm]) + 1) # type: ignore
38
+ probis[ds > mass.low + dm] = 1 # type: ignore
39
+ probis *= ds ** (-alpha)
40
+ return probis
41
+
42
+ masses = pd.Series(np.random.uniform(mass.low, mass.high, size=n_samples))
43
+ probis = smoothing_func(masses)
44
+ probis /= probis.sum()
45
+
46
+ def mass_from_mahapatra() -> float:
47
+ """Generate a mass from Mahapatra's mass distribution."""
48
+
49
+ return np.random.choice(masses, p=probis)
50
+
51
+ return mass_from_mahapatra
@@ -0,0 +1,257 @@
1
+ from typing import Union
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+
6
+ import archeo.logger
7
+ from archeo.constants import SPEED_OF_LIGHT
8
+ from archeo.constants import Columns as C
9
+ from archeo.core.simulator import Simulator
10
+ from archeo.preset import get_prior_config
11
+ from archeo.schema import PriorConfig
12
+ from archeo.utils.executor import MultiThreadExecutor
13
+
14
+
15
+ local_logger = archeo.logger.get_logger(__name__)
16
+
17
+
18
+ class Prior(pd.DataFrame):
19
+ """A class to represent the prior distribution."""
20
+
21
+ def __init__(
22
+ self,
23
+ *args,
24
+ ignore_simulated_mass: bool = False,
25
+ sample_ratio: int = 1,
26
+ spin_tolerance: float = 0.05, # unit: dimensionless
27
+ mass_tolerance: float = 1.0, # unit: solar mass
28
+ **kwargs,
29
+ ) -> None:
30
+ """Construct a prior dataframe.
31
+
32
+ Args:
33
+ ignore_simulated_mass: Whether to ignore the simulated mass
34
+ sample_ratio (int): The number of samples to be sampled each time
35
+ spin_tolerance (float): The tolerance of the spin
36
+ mass_tolerance (float): The tolerance of the mass
37
+ """
38
+
39
+ super().__init__(*args, **kwargs)
40
+
41
+ self._ignore_simulated_mass = ignore_simulated_mass
42
+ self._sample_ratio = sample_ratio
43
+ self._spin_tolerance = spin_tolerance
44
+ self._mass_tolerance = mass_tolerance
45
+
46
+ def _sample_from_possible_samples(self, df: pd.DataFrame) -> pd.DataFrame:
47
+ """Sample from a dataframe.
48
+
49
+ Args:
50
+ df (pd.DataFrame): The dataframe to sample from.
51
+
52
+ Returns:
53
+ df (pd.DataFrame): The sampled dataframe.
54
+ """
55
+
56
+ if df.empty:
57
+ local_logger.warning("No similar samples in the prior.")
58
+ else:
59
+ df = df.sample(self._sample_ratio, replace=True)
60
+ return df
61
+
62
+ def retrieve_samples(self, spin_measure: float, mass_measure: float) -> pd.DataFrame:
63
+ """Retrieve the samples from prior.
64
+
65
+ Args:
66
+ spin_measure (float): The measured spin
67
+ mass_measure (float): The measured mass
68
+
69
+ Returns:
70
+ pd.DataFrame: The sampled dataframe
71
+ """
72
+
73
+ if not self._ignore_simulated_mass:
74
+ # Find the possible samples in the prior
75
+ # Based on:
76
+ # 1. mass_prior - tol < mass_measure < mass_prior + tol
77
+ # 2. spin_prior - tol < spin_measure < spin_prior + tol
78
+ possible_samples = self.loc[
79
+ ((self[C.BH_MASS] - mass_measure).abs() < self._mass_tolerance)
80
+ & ((self[C.BH_SPIN] - spin_measure).abs() < self._spin_tolerance)
81
+ ]
82
+ likelihood = len(possible_samples) / len(self)
83
+
84
+ # Sample n_sample samples from the possible samples
85
+ samples = self._sample_from_possible_samples(possible_samples)
86
+ samples[C.LIKELIHOOD] = likelihood
87
+ return samples
88
+
89
+ # Find the possible samples in the prior
90
+ # Based on:
91
+ # 1. spin_prior - tol < spin_measure < spin_prior + tol
92
+ possible_samples = self.loc[(self[C.BH_SPIN] - spin_measure).abs() < self._spin_tolerance]
93
+ likelihood = len(possible_samples) / len(self)
94
+
95
+ # Sample n_sample samples from the possible samples
96
+ samples = self._sample_from_possible_samples(possible_samples)
97
+
98
+ # Calculate the mass parameters (for mass not injected case)
99
+ samples[C.HEAVIER_BH_MASS] = (
100
+ mass_measure / samples[C.RETAINED_MASS] * samples[C.MASS_RATIO] / (1 + samples[C.MASS_RATIO])
101
+ )
102
+ samples[C.LIGHTER_BH_MASS] = mass_measure / samples[C.RETAINED_MASS] / (1 + samples[C.MASS_RATIO])
103
+ samples[C.BH_MASS] = mass_measure
104
+ samples[C.LIKELIHOOD] = likelihood
105
+ return samples
106
+
107
+ @property
108
+ def _constructor(self):
109
+ """Return the constructor of the class."""
110
+
111
+ return pd.DataFrame
112
+
113
+ @classmethod
114
+ def from_feather(cls, path: str, **kwargs) -> "Prior":
115
+ """Read the feather file.
116
+
117
+ Args:
118
+ path (str): The path to the feather file.
119
+ **kwargs: The keyword arguments for parental class (pd.DataFrame).
120
+
121
+ Returns:
122
+ Prior: The prior distribution.
123
+ """
124
+
125
+ return cls(pd.read_feather(path), **kwargs)
126
+
127
+ @classmethod
128
+ def from_csv(cls, path: str, **kwargs) -> "Prior":
129
+ """Read the csv file.
130
+
131
+ Args:
132
+ path (str): The path to the csv file.
133
+ **kwargs: The keyword arguments for parental class (pd.DataFrame).
134
+
135
+ Returns:
136
+ Prior: The prior distribution
137
+ """
138
+
139
+ return cls(pd.read_csv(path), **kwargs)
140
+
141
+ @classmethod
142
+ def from_parquet(cls, path: str, **kwargs) -> "Prior":
143
+ """Read the parquet file.
144
+
145
+ Args:
146
+ path (str): The path to the parquet file.
147
+ **kwargs: The keyword arguments for parental class (pd.DataFrame).
148
+
149
+ Returns:
150
+ Prior: The prior distribution.
151
+ """
152
+
153
+ return cls(pd.read_parquet(path), **kwargs)
154
+
155
+ @classmethod
156
+ def from_prior_config(cls, prior_config: Union[PriorConfig, str], use_threads=True, **kwargs) -> "Prior":
157
+ """Generate the prior from the prior config.
158
+
159
+ Args:
160
+ prior_config (PriorConfig): The prior configuration.
161
+ use_threads (bool): Whether to use threads.
162
+ **kwargs: The keyword arguments for the class.
163
+
164
+ Returns:
165
+ Prior: The prior distribution.
166
+ """
167
+
168
+ if isinstance(prior_config, str):
169
+ prior_config = get_prior_config(prior_config)
170
+
171
+ simulator = Simulator(prior_config)
172
+ return cls.from_simulator(simulator, use_threads=use_threads, **kwargs)
173
+
174
+ @classmethod
175
+ def from_simulator(cls, simulator: Simulator, use_threads=True, **kwargs) -> "Prior":
176
+ """Generate the prior from the simulator.
177
+
178
+ Args:
179
+ simulator (Simulator): The simulator.
180
+ use_threads (bool): Whether to use threads.
181
+ **kwargs: The keyword arguments for the class.
182
+
183
+ Returns:
184
+ Prior: The prior distribution.
185
+ """
186
+
187
+ df = cls(simulator.simulate(use_threads=use_threads), **kwargs)
188
+
189
+ # Extract more information from the samples
190
+
191
+ # Define nan recovery rate
192
+ df[C.RECOVERY_RATE] = float("nan")
193
+
194
+ # Calculate the mass ratio
195
+ m1, m2 = df[C.HEAVIER_BH_MASS], df[C.LIGHTER_BH_MASS]
196
+ df[C.MASS_RATIO] = q = m1 / m2
197
+
198
+ # Calculate the remnant mass
199
+ df[C.BH_MASS] = df[C.RETAINED_MASS] * (m1 + m2)
200
+
201
+ # Calculate the BH kick velocity
202
+ df[C.BH_KICK] = df[C.BH_VEL].apply(lambda vf: np.sqrt(np.dot(vf, vf)) * SPEED_OF_LIGHT)
203
+
204
+ # Calculate the BH spin
205
+ df[C.BH_SPIN] = df[C.BH_CHI].apply(lambda vf: np.sqrt(np.dot(vf, vf)))
206
+
207
+ # Calculate the parental spins
208
+ df[C.HEAVIER_BH_SPIN] = df[C.HEAVIER_BH_CHI].apply(lambda chi: np.sqrt(np.dot(chi, chi)))
209
+ df[C.LIGHTER_BH_SPIN] = df[C.LIGHTER_BH_CHI].apply(lambda chi: np.sqrt(np.dot(chi, chi)))
210
+
211
+ # Calculate the effective spin
212
+ a1z = df[C.HEAVIER_BH_CHI].apply(lambda chi: chi[-1])
213
+ a2z = df[C.LIGHTER_BH_CHI].apply(lambda chi: chi[-1])
214
+ df[C.BH_EFF_SPIN] = (m1 * a1z + m2 * a2z) / (m1 + m2)
215
+
216
+ # Calculate the precession spin
217
+ a1h = df[C.HEAVIER_BH_CHI].apply(lambda chi: np.sqrt(chi[0] ** 2 + chi[1] ** 2))
218
+ a2h = df[C.LIGHTER_BH_CHI].apply(lambda chi: np.sqrt(chi[0] ** 2 + chi[1] ** 2))
219
+ df[C.BH_PREC_SPIN] = np.maximum(a1h, (4 / q + 3) / (3 / q + 4) / q * a2h)
220
+
221
+ return df
222
+
223
+ def to_posterior(
224
+ self,
225
+ mass_posterior: list[float],
226
+ spin_posterior: list[float],
227
+ use_threads=True,
228
+ ) -> pd.DataFrame:
229
+ """Convert the prior to the posterior.
230
+
231
+ Args:
232
+ mass_posterior (list[float]): The posterior mass.
233
+ spin_posterior (list[float]): The posterior spin.
234
+ use_threads (bool): Whether to use threads.
235
+
236
+ Returns:
237
+ pd.DataFrame: The posterior distribution.
238
+ """
239
+
240
+ if use_threads:
241
+ exc = MultiThreadExecutor()
242
+ input_kwargs = [
243
+ dict(spin_measure=spin_measure, mass_measure=mass_measure)
244
+ for spin_measure, mass_measure in zip(spin_posterior, mass_posterior)
245
+ ]
246
+ samples = exc.run(func=self.retrieve_samples, input_kwargs=input_kwargs)
247
+
248
+ else:
249
+ samples = [
250
+ self.retrieve_samples(spin_measure=spin_measure, mass_measure=mass_measure)
251
+ for spin_measure, mass_measure in zip(spin_posterior, mass_posterior)
252
+ ]
253
+
254
+ df_posterior = pd.concat(samples)
255
+ df_posterior[C.RECOVERY_RATE] = len(df_posterior) / (len(mass_posterior) * self._sample_ratio)
256
+
257
+ return df_posterior
@@ -0,0 +1,127 @@
1
+ from dataclasses import asdict
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+ from tqdm import tqdm
6
+
7
+ from archeo.core.mahapatra import get_mahapatra_mass_fn
8
+ from archeo.schema import Binary, Event, PriorConfig
9
+ from archeo.utils.executor import MultiThreadExecutor
10
+ from archeo.utils.math import sph2cart
11
+
12
+
13
+ class Simulator:
14
+ """Simulator class to simulate binary black hole mergers"""
15
+
16
+ def __init__(self, prior_config: PriorConfig):
17
+ """Initializes the simulator with the given prior configuration
18
+
19
+ Args:
20
+ prior_config (PriorConfig): The prior configuration to use
21
+ """
22
+
23
+ self._prior_config = prior_config
24
+
25
+ self._fits = self._prior_config.fits.load()
26
+ self._n_samples = self._prior_config.n_samples
27
+
28
+ if self._prior_config.is_mahapatra:
29
+ self._mass_fn = get_mahapatra_mass_fn(mass=self._prior_config.mass)
30
+ else:
31
+ self._mass_fn = self._prior_config.mass.draw
32
+
33
+ self._theta_fn = self._prior_config.theta.draw
34
+ self._phi_fn = self._prior_config.phi.draw
35
+ self._spin_fn = self._prior_config.spin.draw
36
+
37
+ self._q_bounds = self._prior_config.mass_ratio
38
+
39
+ self._is_spin_aligned = self._prior_config.is_spin_aligned
40
+ self._only_up_aligned_spin = self._prior_config.is_only_up_aligned_spin
41
+
42
+ def __call__(self) -> Event:
43
+ """Simulates a binary black hole merger event
44
+
45
+ Returns:
46
+ Event: The simulated event
47
+ """
48
+
49
+ b = self._get_binary()
50
+
51
+ q = b.m_1 / b.m_2
52
+ v_f, _ = self._fits.vf(q, b.chi_1, b.chi_2)
53
+ chi_f, _ = self._fits.chif(q, b.chi_1, b.chi_2)
54
+ m_ret, _ = self._fits.mf(q, b.chi_1, b.chi_2)
55
+
56
+ return Event(
57
+ m_1=b.m_1,
58
+ m_2=b.m_2,
59
+ m_ret=m_ret,
60
+ v_f=v_f,
61
+ chi_1=b.chi_1,
62
+ chi_2=b.chi_2,
63
+ chi_f=chi_f,
64
+ )
65
+
66
+ def simulate(self, use_threads=True) -> pd.DataFrame:
67
+ """Simulates multiple binary black hole merger events"""
68
+
69
+ if use_threads:
70
+ exc = MultiThreadExecutor()
71
+ events = exc.run(self, [{} for _ in range(self._n_samples)])
72
+ else:
73
+ events = [self() for _ in tqdm(range(self._n_samples))]
74
+
75
+ df = pd.DataFrame([asdict(event) for event in events])
76
+ return df
77
+
78
+ def _get_binary(self) -> Binary:
79
+ """Draws a binary from the prior distribution
80
+
81
+ Returns:
82
+ Binary: The drawn binary
83
+ """
84
+
85
+ m_1, m_2 = self._get_masses()
86
+ chi_1, chi_2 = self._get_spin(), self._get_spin()
87
+
88
+ return Binary(m_1=m_1, m_2=m_2, chi_1=chi_1, chi_2=chi_2)
89
+
90
+ def _get_spin(self) -> tuple[float, float, float]:
91
+ """Draws the spin of the binary
92
+
93
+ Returns:
94
+ tuple[float, float, float]: The drawn spin
95
+ """
96
+
97
+ spin = self._spin_fn()
98
+
99
+ if self._is_spin_aligned:
100
+ if self._only_up_aligned_spin:
101
+ return (0, 0, spin)
102
+
103
+ direction = np.random.choice([-1, 1])
104
+ return (0, 0, direction * spin)
105
+
106
+ theta = np.arccos(-1 + 2 * self._theta_fn())
107
+ phi = self._phi_fn() * np.pi
108
+ univ = sph2cart(theta=theta, phi=phi)
109
+ return tuple(spin * univ)
110
+
111
+ def _get_masses(self) -> tuple[float, float]:
112
+ """Draws the masses of the binary
113
+
114
+ Returns:
115
+ tuple[float, float]: The drawn masses
116
+ """
117
+
118
+ masses = (self._mass_fn(), self._mass_fn())
119
+ m_1, m_2 = sorted(masses, reverse=True)
120
+
121
+ # Check whether the mass ratio is in the domain
122
+ # If not, resample the masses (recursion)
123
+ q = m_1 / m_2
124
+ if not self._q_bounds.contain(q):
125
+ return self._get_masses()
126
+
127
+ return (m_1, m_2)
@@ -0,0 +1,14 @@
1
+ import os
2
+ import zoneinfo
3
+
4
+
5
+ # For logging
6
+ STREAMING_LOG_LEVEL = int(os.getenv("STREAMING_LOG_LEVEL", "30"))
7
+ FILE_LOG_LEVEL = int(os.getenv("FILE_LOG_LEVEL", "10"))
8
+ LOG_FILEPATH = os.getenv("LOG_FILEPATH", "./runtime.log")
9
+ LOG_FMT = os.getenv("LOG_FMT", "%(asctime)s [%(name)s | %(levelname)s]: %(message)s")
10
+ LOG_DATEFMT = os.getenv("LOG_DATEFMT", "%Y-%m-%dT%H:%M:%SZ")
11
+ TZ = zoneinfo.ZoneInfo(os.getenv("TZ") or "UTC")
12
+
13
+ # For simulation
14
+ MAX_MULTITHREAD_WORKER = int(os.getenv("MAX_MULTITHREAD_WORKER", "20"))