arvi 0.2.9__py3-none-any.whl → 0.2.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arvi/dace_wrapper.py +5 -2
- arvi/instrument_specific.py +23 -9
- arvi/kepmodel_wrapper.py +296 -0
- arvi/nasaexo_wrapper.py +7 -3
- arvi/reports.py +108 -1
- arvi/stats.py +30 -5
- arvi/timeseries.py +176 -43
- arvi/utils.py +46 -0
- {arvi-0.2.9.dist-info → arvi-0.2.12.dist-info}/METADATA +1 -1
- {arvi-0.2.9.dist-info → arvi-0.2.12.dist-info}/RECORD +13 -12
- {arvi-0.2.9.dist-info → arvi-0.2.12.dist-info}/WHEEL +0 -0
- {arvi-0.2.9.dist-info → arvi-0.2.12.dist-info}/licenses/LICENSE +0 -0
- {arvi-0.2.9.dist-info → arvi-0.2.12.dist-info}/top_level.txt +0 -0
arvi/dace_wrapper.py
CHANGED
|
@@ -193,12 +193,13 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
|
|
|
193
193
|
except TypeError:
|
|
194
194
|
msg = f'no {instrument} observations for {star}'
|
|
195
195
|
raise ValueError(msg) from None
|
|
196
|
+
|
|
196
197
|
if (isinstance(instrument, str)):
|
|
197
198
|
filters = {
|
|
198
199
|
"ins_name": {"contains": [instrument]},
|
|
199
200
|
"obj_id_daceid": {"contains": [dace_id]}
|
|
200
201
|
}
|
|
201
|
-
elif (isinstance(instrument, list)):
|
|
202
|
+
elif (isinstance(instrument, (list, tuple, np.ndarray))):
|
|
202
203
|
filters = {
|
|
203
204
|
"ins_name": {"contains": instrument},
|
|
204
205
|
"obj_id_daceid": {"contains": [dace_id]}
|
|
@@ -240,7 +241,9 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
|
|
|
240
241
|
'rv': result['spectro_ccf_rv'][mask3],
|
|
241
242
|
'rv_err': result['spectro_ccf_rv_err'][mask3],
|
|
242
243
|
'berv': result['spectro_cal_berv'][mask3],
|
|
243
|
-
'ccf_noise':
|
|
244
|
+
'ccf_noise': np.sqrt(
|
|
245
|
+
np.square(result['spectro_ccf_rv_err'][mask3]) - np.square(result['spectro_cal_drift_noise'][mask3])
|
|
246
|
+
),
|
|
244
247
|
'rhk': result['spectro_analysis_rhk'][mask3],
|
|
245
248
|
'rhk_err': result['spectro_analysis_rhk_err'][mask3],
|
|
246
249
|
'contrast': result['spectro_ccf_contrast'][mask3],
|
arvi/instrument_specific.py
CHANGED
|
@@ -123,7 +123,10 @@ def HARPS_commissioning(self, mask=True, plot=True):
|
|
|
123
123
|
if check(self, 'HARPS') is None:
|
|
124
124
|
return
|
|
125
125
|
|
|
126
|
-
affected =
|
|
126
|
+
affected = np.logical_and(
|
|
127
|
+
self.instrument_array == 'HARPS03',
|
|
128
|
+
self.time < HARPS_start
|
|
129
|
+
)
|
|
127
130
|
total_affected = affected.sum()
|
|
128
131
|
|
|
129
132
|
if self.verbose:
|
|
@@ -133,7 +136,7 @@ def HARPS_commissioning(self, mask=True, plot=True):
|
|
|
133
136
|
|
|
134
137
|
if mask:
|
|
135
138
|
self.mask[affected] = False
|
|
136
|
-
self._propagate_mask_changes()
|
|
139
|
+
self._propagate_mask_changes(_remove_instrument=False)
|
|
137
140
|
|
|
138
141
|
if plot:
|
|
139
142
|
self.plot(show_masked=True)
|
|
@@ -155,7 +158,14 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
|
|
|
155
158
|
if check(self, 'HARPS') is None:
|
|
156
159
|
return
|
|
157
160
|
|
|
158
|
-
affected =
|
|
161
|
+
affected = np.logical_and(
|
|
162
|
+
self.time >= HARPS_technical_intervention_range[0],
|
|
163
|
+
self.time <= HARPS_technical_intervention_range[1]
|
|
164
|
+
)
|
|
165
|
+
affected = np.logical_and(
|
|
166
|
+
affected,
|
|
167
|
+
np.char.find(self.instrument_array, 'HARPS') == 0
|
|
168
|
+
)
|
|
159
169
|
total_affected = affected.sum()
|
|
160
170
|
|
|
161
171
|
if self.verbose:
|
|
@@ -165,7 +175,7 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
|
|
|
165
175
|
|
|
166
176
|
if mask:
|
|
167
177
|
self.mask[affected] = False
|
|
168
|
-
self._propagate_mask_changes()
|
|
178
|
+
self._propagate_mask_changes(_remove_instrument=False)
|
|
169
179
|
|
|
170
180
|
if plot:
|
|
171
181
|
self.plot(show_masked=True)
|
|
@@ -187,7 +197,10 @@ def ESPRESSO_commissioning(self, mask=True, plot=True):
|
|
|
187
197
|
if check(self, 'ESPRESSO') is None:
|
|
188
198
|
return
|
|
189
199
|
|
|
190
|
-
affected =
|
|
200
|
+
affected = np.logical_and(
|
|
201
|
+
self.instrument_array == 'ESPRESSO18',
|
|
202
|
+
self.time < ESPRESSO_start
|
|
203
|
+
)
|
|
191
204
|
total_affected = affected.sum()
|
|
192
205
|
|
|
193
206
|
if self.verbose:
|
|
@@ -197,7 +210,7 @@ def ESPRESSO_commissioning(self, mask=True, plot=True):
|
|
|
197
210
|
|
|
198
211
|
if mask:
|
|
199
212
|
self.mask[affected] = False
|
|
200
|
-
self._propagate_mask_changes()
|
|
213
|
+
self._propagate_mask_changes(_remove_instrument=False)
|
|
201
214
|
|
|
202
215
|
if plot and total_affected > 0:
|
|
203
216
|
self.plot(show_masked=True)
|
|
@@ -246,7 +259,7 @@ def ADC_issues(self, mask=True, plot=True, check_headers=False):
|
|
|
246
259
|
|
|
247
260
|
if mask:
|
|
248
261
|
self.mask[intersect] = False
|
|
249
|
-
self._propagate_mask_changes()
|
|
262
|
+
self._propagate_mask_changes(_remove_instrument=False)
|
|
250
263
|
|
|
251
264
|
if plot:
|
|
252
265
|
self.plot(show_masked=True)
|
|
@@ -282,7 +295,7 @@ def blue_cryostat_issues(self, mask=True, plot=True):
|
|
|
282
295
|
|
|
283
296
|
if mask:
|
|
284
297
|
self.mask[intersect] = False
|
|
285
|
-
self._propagate_mask_changes()
|
|
298
|
+
self._propagate_mask_changes(_remove_instrument=False)
|
|
286
299
|
|
|
287
300
|
if plot:
|
|
288
301
|
self.plot(show_masked=True)
|
|
@@ -330,7 +343,7 @@ def qc_scired_issues(self, plot=False, **kwargs):
|
|
|
330
343
|
return
|
|
331
344
|
|
|
332
345
|
self.mask[affected] = False
|
|
333
|
-
self._propagate_mask_changes()
|
|
346
|
+
self._propagate_mask_changes(_remove_instrument=False)
|
|
334
347
|
|
|
335
348
|
if plot:
|
|
336
349
|
self.plot(show_masked=True)
|
|
@@ -364,6 +377,7 @@ class ISSUES:
|
|
|
364
377
|
logger.error('are the data binned? cannot proceed to mask these points...')
|
|
365
378
|
|
|
366
379
|
results = list(filter(lambda x: x is not None, results))
|
|
380
|
+
self._propagate_mask_changes()
|
|
367
381
|
|
|
368
382
|
try:
|
|
369
383
|
return np.logical_or.reduce(results)
|
arvi/kepmodel_wrapper.py
ADDED
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
import io
|
|
2
|
+
from contextlib import redirect_stdout, contextmanager
|
|
3
|
+
from string import ascii_lowercase
|
|
4
|
+
from matplotlib import pyplot as plt
|
|
5
|
+
import numpy as np
|
|
6
|
+
|
|
7
|
+
from kepmodel.rv import RvModel
|
|
8
|
+
from spleaf.cov import merge_series
|
|
9
|
+
from spleaf.term import Error, InstrumentJitter
|
|
10
|
+
|
|
11
|
+
from .setup_logger import setup_logger
|
|
12
|
+
from .utils import timer, adjust_lightness
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class model:
|
|
16
|
+
logger = setup_logger()
|
|
17
|
+
# periodogram settings
|
|
18
|
+
Pmin, Pmax, nfreq = 1.5, 10_000, 100_000
|
|
19
|
+
|
|
20
|
+
@contextmanager
|
|
21
|
+
def ew(self):
|
|
22
|
+
for name in self.model.keplerian:
|
|
23
|
+
self.model.set_keplerian_param(name, param=['P', 'la0', 'K', 'e', 'w'])
|
|
24
|
+
try:
|
|
25
|
+
yield
|
|
26
|
+
finally:
|
|
27
|
+
for name in self.model.keplerian:
|
|
28
|
+
self.model.set_keplerian_param(name, param=['P', 'la0', 'K', 'esinw', 'ecosw'])
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def nu0(self):
|
|
32
|
+
return 2 * np.pi / self.Pmax
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def dnu(self):
|
|
36
|
+
return (2 * np.pi / self.Pmin - self.nu0) / (self.nfreq - 1)
|
|
37
|
+
|
|
38
|
+
def __init__(self, s):
|
|
39
|
+
self.s = s
|
|
40
|
+
self.instruments = s.instruments
|
|
41
|
+
self.Pmax = 2 * np.ptp(s.time)
|
|
42
|
+
ts = self.ts = self.s._mtime_sorter
|
|
43
|
+
|
|
44
|
+
# t, y_ ye, series_index = merge_series(
|
|
45
|
+
# [_s.mtime for _s in s],
|
|
46
|
+
# [_s.mvrad for _s in s],
|
|
47
|
+
# [_s.msvrad for _s in s],
|
|
48
|
+
# )
|
|
49
|
+
|
|
50
|
+
inst_jit = self._get_jitters()
|
|
51
|
+
|
|
52
|
+
self.model = RvModel(self.s.mtime[ts], self.s.mvrad[ts],
|
|
53
|
+
err=Error(self.s.msvrad[ts]), **inst_jit)
|
|
54
|
+
self.np = 0
|
|
55
|
+
self._add_means()
|
|
56
|
+
|
|
57
|
+
def _add_means(self):
|
|
58
|
+
for inst in self.s.instruments:
|
|
59
|
+
# if inst not in self.s.instrument_array[self.s.mask]:
|
|
60
|
+
# continue
|
|
61
|
+
mask = self.s.instrument_array[self.s.mask][self.ts] == inst
|
|
62
|
+
self.model.add_lin(
|
|
63
|
+
derivative=1.0 * mask,
|
|
64
|
+
name=f"offset_{inst}",
|
|
65
|
+
value=getattr(self.s, inst).mvrad.mean(),
|
|
66
|
+
)
|
|
67
|
+
self.model.fit_lin()
|
|
68
|
+
|
|
69
|
+
def _get_jitters(self):
|
|
70
|
+
inst_jit = {}
|
|
71
|
+
for inst in self.s.instruments:
|
|
72
|
+
inst_jit[f'jit_{inst}'] = InstrumentJitter(
|
|
73
|
+
indices=self.s.instrument_array[self.s.mask] == inst,
|
|
74
|
+
sig=self.s.svrad[self.s.mask].min()
|
|
75
|
+
)
|
|
76
|
+
return inst_jit
|
|
77
|
+
|
|
78
|
+
def _set_jitters(self, value=0.0):
|
|
79
|
+
for par in self.model.cov.param:
|
|
80
|
+
if 'jit' in par:
|
|
81
|
+
self.model.set_param(value, f'cov.{par}')
|
|
82
|
+
# self.model.fit()
|
|
83
|
+
|
|
84
|
+
def _equal_coralie_offsets(self):
|
|
85
|
+
if self.s._check_instrument('CORALIE') is None:
|
|
86
|
+
return
|
|
87
|
+
mask = np.char.find(self.s.instrument_array, 'CORALIE') == 0
|
|
88
|
+
mean = self.s.vrad[mask].mean()
|
|
89
|
+
for inst in self.instruments:
|
|
90
|
+
if 'CORALIE' in inst:
|
|
91
|
+
self.model.set_param(mean, f'lin.offset_{inst}')
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def __repr__(self):
|
|
95
|
+
with self.ew():
|
|
96
|
+
with io.StringIO() as buf, redirect_stdout(buf):
|
|
97
|
+
self.model.show_param()
|
|
98
|
+
output = buf.getvalue()
|
|
99
|
+
return output
|
|
100
|
+
|
|
101
|
+
def to_table(self, **kwargs):
|
|
102
|
+
from .utils import pretty_print_table
|
|
103
|
+
lines = repr(self)
|
|
104
|
+
lines = lines.replace(' [deg]', '_[deg]')
|
|
105
|
+
lines = lines.encode().replace(b'\xc2\xb1', b'').decode()
|
|
106
|
+
lines = lines.split('\n')
|
|
107
|
+
lines = [line.split() for line in lines]
|
|
108
|
+
lines = [[col.replace('_[deg]', ' [deg]') for col in line] for line in lines]
|
|
109
|
+
pretty_print_table(lines[:-2], **kwargs)
|
|
110
|
+
|
|
111
|
+
@property
|
|
112
|
+
def fit_param(self):
|
|
113
|
+
return self.model.fit_param
|
|
114
|
+
|
|
115
|
+
def fit(self):
|
|
116
|
+
# fit offsets
|
|
117
|
+
self.model.fit_param = [f'lin.offset_{inst}' for inst in self.instruments]
|
|
118
|
+
# fit jitters
|
|
119
|
+
self.model.fit_param += [f'cov.{par}' for par in self.model.cov.param]
|
|
120
|
+
# fit keplerian(s)
|
|
121
|
+
self.model.fit_param += [
|
|
122
|
+
f'kep.{k}.{p}'
|
|
123
|
+
for k, v in self.model.keplerian.items()
|
|
124
|
+
for p in v._param
|
|
125
|
+
]
|
|
126
|
+
# if self.np == 0:
|
|
127
|
+
# self._set_jitters(0.1 * np.std(self.model.y - self.model.model()))
|
|
128
|
+
try:
|
|
129
|
+
self.model.fit()
|
|
130
|
+
except Exception as e:
|
|
131
|
+
print(e)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def plot(self, **kwargs):
|
|
135
|
+
fig, ax = self.s.plot(**kwargs)
|
|
136
|
+
tt = self.s._tt()
|
|
137
|
+
time_offset = 50000 if 'remove_50000' in kwargs else 0
|
|
138
|
+
|
|
139
|
+
for i, inst in enumerate(self.s):
|
|
140
|
+
inst_name = inst.instruments[0].replace('-', '_')
|
|
141
|
+
val = self.model.get_param(f'lin.offset_{inst_name}')
|
|
142
|
+
x = np.array([inst.mtime.min(), inst.mtime.max()]) - time_offset
|
|
143
|
+
y = [val, val]
|
|
144
|
+
ax.plot(x, y, ls='--', color=f'C{i}')
|
|
145
|
+
mask = (tt > inst.mtime.min()) & (tt < inst.mtime.max())
|
|
146
|
+
color = adjust_lightness(f'C{i}', 1.2)
|
|
147
|
+
ax.plot(tt[mask] - time_offset,
|
|
148
|
+
val + self.model.keplerian_model(tt)[mask],
|
|
149
|
+
color=color)
|
|
150
|
+
|
|
151
|
+
return fig, ax
|
|
152
|
+
|
|
153
|
+
def plot_phasefolding(self, planets=None, ax=None):
|
|
154
|
+
t = self.model.t
|
|
155
|
+
res = self.model.residuals()
|
|
156
|
+
sig = np.sqrt(self.model.cov.A)
|
|
157
|
+
|
|
158
|
+
Msmooth = np.linspace(0, 360, 1000)
|
|
159
|
+
|
|
160
|
+
if planets is None:
|
|
161
|
+
planets = list(self.model.keplerian.keys())
|
|
162
|
+
|
|
163
|
+
if ax is None:
|
|
164
|
+
fig, axs = plt.subplots(
|
|
165
|
+
1, len(planets), sharex=True, sharey=True, constrained_layout=True,
|
|
166
|
+
squeeze=False
|
|
167
|
+
)
|
|
168
|
+
else:
|
|
169
|
+
axs = np.atleast_1d(ax)
|
|
170
|
+
fig = axs[0].figure
|
|
171
|
+
|
|
172
|
+
for p, ax in zip(planets, axs.flat):
|
|
173
|
+
self.model.set_keplerian_param(p, param=['P', 'la0', 'K', 'e', 'w'])
|
|
174
|
+
kep = self.model.keplerian[p]
|
|
175
|
+
P = self.model.get_param(f'kep.{p}.P')
|
|
176
|
+
M0 = (180 / np.pi * (self.model.get_param(f'kep.{p}.la0') - self.model.get_param(f'kep.{p}.w')))
|
|
177
|
+
M = (M0 + 360 / P * t) % 360
|
|
178
|
+
reskep = res + kep.rv(t)
|
|
179
|
+
tsmooth = (Msmooth - M0) * P / 360
|
|
180
|
+
mod = kep.rv(tsmooth)
|
|
181
|
+
|
|
182
|
+
ax.plot([0, 360], [0, 0], '--', c='gray', lw=1)
|
|
183
|
+
ax.plot(Msmooth, mod, 'k-', lw=3, rasterized=True)
|
|
184
|
+
for inst in self.instruments:
|
|
185
|
+
sel = self.s.instrument_array[self.s.mask] == inst
|
|
186
|
+
ax.errorbar(M[sel], reskep[sel], sig[sel], fmt='.',
|
|
187
|
+
rasterized=True, alpha=0.7)
|
|
188
|
+
|
|
189
|
+
ax.set(ylabel='RV [m/s]', xlabel='Mean anomaly [deg]',
|
|
190
|
+
xticks=np.arange(0, 361, 90))
|
|
191
|
+
ax.minorticks_on()
|
|
192
|
+
self.model.set_keplerian_param(p, param=['P', 'la0', 'K', 'esinw', 'ecosw'])
|
|
193
|
+
|
|
194
|
+
# handles, labels = ax.get_legend_handles_labels()
|
|
195
|
+
# fig.legend(handles, labels, loc='center left', bbox_to_anchor=(0.9, 0.5))
|
|
196
|
+
return fig, axs
|
|
197
|
+
|
|
198
|
+
def add_planet_from_period(self, period):
|
|
199
|
+
self.model.add_keplerian_from_period(period, fit=True)
|
|
200
|
+
self.model.fit()
|
|
201
|
+
self.np += 1
|
|
202
|
+
|
|
203
|
+
def _plot_periodogram(self, P=None, power=None, kmax=None, faplvl=None,
|
|
204
|
+
**kwargs):
|
|
205
|
+
if P is None and power is None:
|
|
206
|
+
with timer('periodogram'):
|
|
207
|
+
nu, power = self.model.periodogram(self.nu0, self.dnu, self.nfreq)
|
|
208
|
+
P = 2 * np.pi / nu
|
|
209
|
+
|
|
210
|
+
if 'ax' in kwargs:
|
|
211
|
+
ax = kwargs.pop('ax')
|
|
212
|
+
fig = ax.figure
|
|
213
|
+
else:
|
|
214
|
+
fig, ax = plt.subplots(1, 1, constrained_layout=True)
|
|
215
|
+
ax.semilogx(P, power, 'k', lw=1, rasterized=True)
|
|
216
|
+
ax.set_ylim(0, 1.2 * power.max())
|
|
217
|
+
ax.set(xlabel='Period [days]', ylabel='Normalized power')
|
|
218
|
+
|
|
219
|
+
if kmax is None:
|
|
220
|
+
kmax = np.argmax(power)
|
|
221
|
+
ax.plot(P[kmax], power[kmax], 'or', ms=4)
|
|
222
|
+
ax.text(P[kmax], power[kmax] * 1.1, f'{P[kmax]:.3f} d',
|
|
223
|
+
ha='right', va='center', color='r')
|
|
224
|
+
|
|
225
|
+
if faplvl is None:
|
|
226
|
+
faplvl = self.model.fap(power[kmax], nu.max())
|
|
227
|
+
ax.text(0.99, 0.95, f'FAP = {faplvl:.2g}', transform=ax.transAxes,
|
|
228
|
+
ha='right', va='top')
|
|
229
|
+
|
|
230
|
+
return fig, ax
|
|
231
|
+
|
|
232
|
+
def add_keplerian_from_periodogram(self, fap_max=0.001, plot=False,
|
|
233
|
+
fit_first=True):
|
|
234
|
+
if fit_first and self.np == 0:
|
|
235
|
+
self.fit()
|
|
236
|
+
|
|
237
|
+
self._equal_coralie_offsets()
|
|
238
|
+
|
|
239
|
+
with timer('periodogram'):
|
|
240
|
+
nu, power = self.model.periodogram(self.nu0, self.dnu, self.nfreq)
|
|
241
|
+
|
|
242
|
+
P = 2 * np.pi / nu
|
|
243
|
+
# Compute FAP
|
|
244
|
+
kmax = np.argmax(power)
|
|
245
|
+
faplvl = self.model.fap(power[kmax], nu.max())
|
|
246
|
+
self.logger.info('highest periodogram peak:')
|
|
247
|
+
self.logger.info(f'P={P[kmax]:.4f} d, power={power[kmax]:.3f}, FAP={faplvl:.2e}')
|
|
248
|
+
if plot:
|
|
249
|
+
self._plot_periodogram(P, power, kmax, faplvl)
|
|
250
|
+
|
|
251
|
+
if faplvl > fap_max:
|
|
252
|
+
print('non-significant peak')
|
|
253
|
+
self.fit()
|
|
254
|
+
return False
|
|
255
|
+
|
|
256
|
+
# add new planet
|
|
257
|
+
letter = ascii_lowercase[1:][self.np]
|
|
258
|
+
self.model.add_keplerian_from_period(P[kmax], name=letter,
|
|
259
|
+
guess_kwargs={'emax': 0.8})
|
|
260
|
+
# self.model.set_keplerian_param(letter, param=['P', 'la0', 'K', 'e', 'w'])
|
|
261
|
+
self.model.set_keplerian_param(letter, param=['P', 'la0', 'K', 'esinw', 'ecosw'])
|
|
262
|
+
self.np += 1
|
|
263
|
+
self.fit()
|
|
264
|
+
|
|
265
|
+
if plot:
|
|
266
|
+
self.plot()
|
|
267
|
+
|
|
268
|
+
return True
|
|
269
|
+
|
|
270
|
+
@property
|
|
271
|
+
def offsets(self):
|
|
272
|
+
names = [f'lin.offset_{inst}' for inst in self.instruments]
|
|
273
|
+
return {
|
|
274
|
+
name.replace('lin.', ''): self.model.get_param(name)
|
|
275
|
+
for name in names
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
@property
|
|
279
|
+
def jitters(self):
|
|
280
|
+
names = [f'cov.{par}' for par in self.model.cov.param]
|
|
281
|
+
return {
|
|
282
|
+
name.replace('cov.', '').replace('.sig', ''): self.model.get_param(name)
|
|
283
|
+
for name in names
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
@property
|
|
287
|
+
def keplerians(self):
|
|
288
|
+
keps = {name: {} for name in self.model.keplerian.keys()}
|
|
289
|
+
for name in keps:
|
|
290
|
+
params = self.model.keplerian[name]._param
|
|
291
|
+
pars = [f'kep.{name}.{p}' for p in params]
|
|
292
|
+
keps[name] = {
|
|
293
|
+
par.replace(f'kep.{name}.', ''): self.model.get_param(par)
|
|
294
|
+
for par in pars
|
|
295
|
+
}
|
|
296
|
+
return keps
|
arvi/nasaexo_wrapper.py
CHANGED
|
@@ -6,7 +6,7 @@ from io import StringIO
|
|
|
6
6
|
import numpy as np
|
|
7
7
|
from astropy.timeseries import LombScargle
|
|
8
8
|
|
|
9
|
-
from .setup_logger import
|
|
9
|
+
from .setup_logger import setup_logger
|
|
10
10
|
from kepmodel.rv import RvModel
|
|
11
11
|
from spleaf.term import Error
|
|
12
12
|
|
|
@@ -32,6 +32,7 @@ def run_query(query):
|
|
|
32
32
|
|
|
33
33
|
class Planets:
|
|
34
34
|
def __init__(self, system):
|
|
35
|
+
logger = setup_logger()
|
|
35
36
|
self.s = system
|
|
36
37
|
self.verbose = system.verbose
|
|
37
38
|
|
|
@@ -163,6 +164,7 @@ class Planets:
|
|
|
163
164
|
self.model.show_param()
|
|
164
165
|
|
|
165
166
|
def fit_all(self, adjust_data=False):
|
|
167
|
+
logger = setup_logger()
|
|
166
168
|
self.model.fit()
|
|
167
169
|
|
|
168
170
|
newP = np.array([self.model.get_param(f'kep.{i}.P') for i in range(self.np)])
|
|
@@ -187,5 +189,7 @@ class Planets:
|
|
|
187
189
|
self.s._build_arrays()
|
|
188
190
|
|
|
189
191
|
def __repr__(self):
|
|
190
|
-
|
|
191
|
-
|
|
192
|
+
P = list(map(float, self.P))
|
|
193
|
+
K = list(map(float, self.K))
|
|
194
|
+
e = list(map(float, self.e))
|
|
195
|
+
return f'{self.star}({self.np} planets, {P=}, {K=}, {e=})'
|
arvi/reports.py
CHANGED
|
@@ -40,6 +40,7 @@ class REPORTS:
|
|
|
40
40
|
rows.append([self.star] + [''] * len(self.instruments) + [''])
|
|
41
41
|
rows.append([''] + self.instruments + ['full'])
|
|
42
42
|
rows.append(['N'] + list(self.NN.values()) + [self.N])
|
|
43
|
+
rows.append(['T span'] + [np.ptp(s.mtime).round(1) for s in self] + [np.ptp(self.mtime).round(1)])
|
|
43
44
|
rows.append(['RV span'] + [np.ptp(s.mvrad).round(3) for s in self] + [np.ptp(self.mvrad).round(3)])
|
|
44
45
|
rows.append(['RV std'] + [s.mvrad.std().round(3) for s in self] + [self.mvrad.std().round(3)])
|
|
45
46
|
rows.append(['eRV mean'] + [s.msvrad.mean().round(3) for s in self] + [self.msvrad.mean().round(3)])
|
|
@@ -201,4 +202,110 @@ class REPORTS:
|
|
|
201
202
|
pdf.savefig(fig)
|
|
202
203
|
# os.system(f'evince {save} &')
|
|
203
204
|
|
|
204
|
-
return fig
|
|
205
|
+
return fig
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def kepmodel_report(self, fit_keplerians=3, save=None, nasaexo_title=False):
|
|
209
|
+
import matplotlib.pyplot as plt
|
|
210
|
+
import matplotlib.gridspec as gridspec
|
|
211
|
+
from matplotlib.backends.backend_pdf import PdfPages
|
|
212
|
+
logger = setup_logger()
|
|
213
|
+
|
|
214
|
+
def set_align_for_column(table, col, align="left"):
|
|
215
|
+
cells = [key for key in table._cells if key[1] == col]
|
|
216
|
+
for cell in cells:
|
|
217
|
+
table._cells[cell]._loc = align
|
|
218
|
+
table._cells[cell]._text.set_horizontalalignment(align)
|
|
219
|
+
|
|
220
|
+
from .kepmodel_wrapper import model
|
|
221
|
+
m = model(self)
|
|
222
|
+
|
|
223
|
+
while fit_keplerians > 0:
|
|
224
|
+
if m.add_keplerian_from_periodogram():
|
|
225
|
+
fit_keplerians -= 1
|
|
226
|
+
else:
|
|
227
|
+
break
|
|
228
|
+
|
|
229
|
+
m.fit()
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
# size = A4
|
|
233
|
+
size = 8.27, 11.69
|
|
234
|
+
fig = plt.figure(figsize=size, constrained_layout=True)
|
|
235
|
+
gs = gridspec.GridSpec(5, 3, figure=fig, height_ratios=[2, 1, 1, 1, 1])
|
|
236
|
+
|
|
237
|
+
# first row, all columns
|
|
238
|
+
ax1 = plt.subplot(gs[0, :])
|
|
239
|
+
|
|
240
|
+
if nasaexo_title:
|
|
241
|
+
title = str(self.planets).replace('(', '\n').replace(')', '')
|
|
242
|
+
star, planets = title.split('\n')
|
|
243
|
+
planets = planets.replace('planets,', 'known planets\n')
|
|
244
|
+
ax1.set_title(star, loc='left', fontsize=14)
|
|
245
|
+
ax1.set_title(planets, loc='right', fontsize=10)
|
|
246
|
+
else:
|
|
247
|
+
title = f'{self.star}'
|
|
248
|
+
ax1.set_title(title, loc='left', fontsize=14)
|
|
249
|
+
# ax1.set_title(r"\href{http://www.google.com}{link}", color='blue',
|
|
250
|
+
# loc='center')
|
|
251
|
+
|
|
252
|
+
m.plot(ax=ax1, N_in_label=True, tooltips=False, remove_50000=True)
|
|
253
|
+
|
|
254
|
+
ax1.legend().remove()
|
|
255
|
+
legend_ax = plt.subplot(gs[1, -1])
|
|
256
|
+
legend_ax.axis('off')
|
|
257
|
+
leg = plt.legend(*ax1.get_legend_handles_labels(),
|
|
258
|
+
prop={'family': 'monospace'})
|
|
259
|
+
legend_ax.add_artist(leg)
|
|
260
|
+
|
|
261
|
+
ax2 = plt.subplot(gs[1, :-1])
|
|
262
|
+
m._plot_periodogram(ax=ax2)
|
|
263
|
+
|
|
264
|
+
ax3 = plt.subplot(gs[2, 0])
|
|
265
|
+
ax3.axis('off')
|
|
266
|
+
items = list(m.offsets.items())
|
|
267
|
+
items = [[item[0].replace('offset_', 'offet '), item[1].round(3)] for item in items]
|
|
268
|
+
table = ax3.table(items, loc='center', edges='open')
|
|
269
|
+
table.auto_set_font_size(False)
|
|
270
|
+
table.set_fontsize(9)
|
|
271
|
+
set_align_for_column(table, 1, align="left")
|
|
272
|
+
|
|
273
|
+
ax4 = plt.subplot(gs[2, 1])
|
|
274
|
+
ax4.axis('off')
|
|
275
|
+
items = list(m.jitters.items())
|
|
276
|
+
items = [[item[0].replace('jit_', 'jitter '), item[1].round(3)] for item in items]
|
|
277
|
+
table = ax4.table(items, loc='center', edges='open')
|
|
278
|
+
table.auto_set_font_size(False)
|
|
279
|
+
table.set_fontsize(9)
|
|
280
|
+
set_align_for_column(table, 1, align="left")
|
|
281
|
+
|
|
282
|
+
ax5 = plt.subplot(gs[2, 2])
|
|
283
|
+
ax5.axis('off')
|
|
284
|
+
items = [
|
|
285
|
+
['N', m.model.n],
|
|
286
|
+
[r'N$_{\rm free}$', len(m.model.fit_param)],
|
|
287
|
+
[r'$\chi^2$', round(m.model.chi2(), 2)],
|
|
288
|
+
[r'$\chi^2_r$', round(m.model.chi2() / (m.model.n - len(m.model.fit_param)), 2)],
|
|
289
|
+
[r'$\log L$', round(m.model.loglike(), 2)],
|
|
290
|
+
]
|
|
291
|
+
table = ax5.table(items, loc='center', edges='open')
|
|
292
|
+
table.auto_set_font_size(False)
|
|
293
|
+
table.set_fontsize(9)
|
|
294
|
+
set_align_for_column(table, 1, align="left")
|
|
295
|
+
|
|
296
|
+
for i, name in enumerate(m.keplerians):
|
|
297
|
+
ax = plt.subplot(gs[3, i])
|
|
298
|
+
m.plot_phasefolding(planets=name, ax=ax)
|
|
299
|
+
|
|
300
|
+
ax = plt.subplot(gs[4, i])
|
|
301
|
+
ax.axis('off')
|
|
302
|
+
with m.ew():
|
|
303
|
+
items = list(m.keplerians[name].items())
|
|
304
|
+
items = [[item[0], item[1].round(3)] for item in items]
|
|
305
|
+
table = ax.table(items, loc='center', edges='open')
|
|
306
|
+
table.auto_set_font_size(False)
|
|
307
|
+
table.set_fontsize(9)
|
|
308
|
+
set_align_for_column(table, 1, align="left")
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
return fig, m
|
arvi/stats.py
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
|
+
from functools import partial
|
|
1
2
|
import numpy as np
|
|
3
|
+
from scipy.stats import norm
|
|
2
4
|
|
|
3
5
|
def wmean(a, e):
|
|
4
6
|
"""Weighted mean of array `a`, with uncertainties given by `e`.
|
|
@@ -50,16 +52,39 @@ def wrms(a, e, ignore_nans=False):
|
|
|
50
52
|
w = 1 / e**2
|
|
51
53
|
return np.sqrt(np.sum(w * (a - np.average(a, weights=w))**2) / sum(w))
|
|
52
54
|
|
|
55
|
+
# from https://stackoverflow.com/questions/20601872/numpy-or-scipy-to-calculate-weighted-median
|
|
56
|
+
def weighted_quantiles_interpolate(values, weights, quantiles):
|
|
57
|
+
i = np.argsort(values)
|
|
58
|
+
c = np.cumsum(weights[i])
|
|
59
|
+
q = np.searchsorted(c, quantiles * c[-1])
|
|
60
|
+
# Ensure right-end isn't out of bounds. Thanks @Jeromino!
|
|
61
|
+
q_plus1 = np.clip(q + 1, a_min=None, a_max=values.shape[0] - 1)
|
|
62
|
+
return np.where(
|
|
63
|
+
c[q] / c[-1] == quantiles,
|
|
64
|
+
0.5 * (values[i[q]] + values[i[q_plus1]]),
|
|
65
|
+
values[i[q]],
|
|
66
|
+
)
|
|
53
67
|
|
|
54
|
-
|
|
68
|
+
weighted_median = partial(weighted_quantiles_interpolate, quantiles=0.5)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def sigmaclip_median(a, low=4.0, high=4.0, k=1/norm.ppf(3/4)):
|
|
55
73
|
"""
|
|
56
74
|
Same as scipy.stats.sigmaclip but using the median and median absolute
|
|
57
75
|
deviation instead of the mean and standard deviation.
|
|
58
76
|
|
|
59
77
|
Args:
|
|
60
|
-
a (array):
|
|
61
|
-
|
|
62
|
-
|
|
78
|
+
a (array):
|
|
79
|
+
Array containing data
|
|
80
|
+
low (float):
|
|
81
|
+
Number of MAD to use for the lower clipping limit
|
|
82
|
+
high (float):
|
|
83
|
+
Number of MAD to use for the upper clipping limit
|
|
84
|
+
k (float):
|
|
85
|
+
Scale factor for the MAD to be an estimator of the standard
|
|
86
|
+
deviation. Depends on the (assumed) distribution of the data.
|
|
87
|
+
Default value is for the normal distribution (=1/norm.ppf(3/4)).
|
|
63
88
|
Returns:
|
|
64
89
|
SigmaclipResult: Object with the following attributes:
|
|
65
90
|
- `clipped`: Masked array of data
|
|
@@ -71,7 +96,7 @@ def sigmaclip_median(a, low=4.0, high=4.0):
|
|
|
71
96
|
c = np.asarray(a).ravel()
|
|
72
97
|
delta = 1
|
|
73
98
|
while delta:
|
|
74
|
-
c_mad = median_abs_deviation(c)
|
|
99
|
+
c_mad = median_abs_deviation(c) * k
|
|
75
100
|
c_median = np.median(c)
|
|
76
101
|
size = c.size
|
|
77
102
|
critlower = c_median - c_mad * low
|
arvi/timeseries.py
CHANGED
|
@@ -4,10 +4,9 @@ from typing import Union
|
|
|
4
4
|
from functools import partial, partialmethod
|
|
5
5
|
from glob import glob
|
|
6
6
|
import warnings
|
|
7
|
-
from copy import deepcopy
|
|
7
|
+
from copy import copy, deepcopy
|
|
8
8
|
from datetime import datetime, timezone
|
|
9
9
|
|
|
10
|
-
# import lazy_loader as lazy
|
|
11
10
|
import numpy as np
|
|
12
11
|
|
|
13
12
|
from .setup_logger import setup_logger
|
|
@@ -26,7 +25,8 @@ from .HZ import getHZ_period
|
|
|
26
25
|
from .instrument_specific import ISSUES
|
|
27
26
|
from .reports import REPORTS
|
|
28
27
|
from .utils import sanitize_path, strtobool, there_is_internet, timer, chdir
|
|
29
|
-
|
|
28
|
+
from .setup_logger import setup_logger
|
|
29
|
+
logger = setup_logger()
|
|
30
30
|
|
|
31
31
|
# units = lazy_import('astropy.units')
|
|
32
32
|
# units = lazy.load('astropy.units')
|
|
@@ -411,35 +411,71 @@ class RV(ISSUES, REPORTS):
|
|
|
411
411
|
self._did_correct_berv = False
|
|
412
412
|
self.__post_init__()
|
|
413
413
|
|
|
414
|
-
def snapshot(self, directory=None, delete_others=False):
|
|
415
|
-
|
|
414
|
+
def snapshot(self, directory=None, delete_others=False, compress=False):
|
|
415
|
+
if compress:
|
|
416
|
+
try:
|
|
417
|
+
import compress_pickle as pickle
|
|
418
|
+
except ImportError:
|
|
419
|
+
logger.warning('compress_pickle not installed, not compressing')
|
|
420
|
+
import pickle
|
|
421
|
+
compress = False
|
|
422
|
+
else:
|
|
423
|
+
import pickle
|
|
424
|
+
import re
|
|
416
425
|
from datetime import datetime
|
|
426
|
+
|
|
417
427
|
ts = datetime.now().timestamp()
|
|
418
428
|
star_name = self.star.replace(' ', '')
|
|
419
429
|
file = f'{star_name}_{ts}.pkl'
|
|
420
430
|
|
|
431
|
+
server = None
|
|
421
432
|
if directory is None:
|
|
422
433
|
directory = '.'
|
|
423
434
|
else:
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
import re
|
|
430
|
-
other_pkls = [
|
|
431
|
-
f for f in os.listdir(directory)
|
|
432
|
-
if re.search(fr'{star_name}_\d+.\d+.pkl', f)
|
|
433
|
-
]
|
|
434
|
-
for pkl in other_pkls:
|
|
435
|
-
os.remove(os.path.join(directory, pkl))
|
|
435
|
+
if ':' in directory:
|
|
436
|
+
server, directory = directory.split(':')
|
|
437
|
+
delete_others = False
|
|
438
|
+
else:
|
|
439
|
+
os.makedirs(directory, exist_ok=True)
|
|
436
440
|
|
|
437
441
|
metadata = {
|
|
438
442
|
'star': self.star,
|
|
439
443
|
'timestamp': ts,
|
|
440
444
|
'description': 'arvi snapshot'
|
|
441
445
|
}
|
|
442
|
-
|
|
446
|
+
|
|
447
|
+
|
|
448
|
+
if server:
|
|
449
|
+
import posixpath
|
|
450
|
+
from .utils import server_sftp, server_file
|
|
451
|
+
with server_sftp(server=server) as sftp:
|
|
452
|
+
try:
|
|
453
|
+
sftp.chdir(directory)
|
|
454
|
+
except FileNotFoundError:
|
|
455
|
+
sftp.mkdir(directory)
|
|
456
|
+
finally:
|
|
457
|
+
sftp.chdir(directory)
|
|
458
|
+
with sftp.open(file, 'wb') as f:
|
|
459
|
+
print('saving snapshot to server...', end='', flush=True)
|
|
460
|
+
pickle.dump((self, metadata), f, protocol=0)
|
|
461
|
+
print('done')
|
|
462
|
+
file = posixpath.join(directory, file)
|
|
463
|
+
else:
|
|
464
|
+
if delete_others:
|
|
465
|
+
other_pkls = [
|
|
466
|
+
f for f in os.listdir(directory)
|
|
467
|
+
if re.search(fr'{star_name}_\d+.\d+.pkl', f)
|
|
468
|
+
]
|
|
469
|
+
for pkl in other_pkls:
|
|
470
|
+
os.remove(os.path.join(directory, pkl))
|
|
471
|
+
|
|
472
|
+
file = os.path.join(directory, file)
|
|
473
|
+
|
|
474
|
+
if compress:
|
|
475
|
+
file += '.gz'
|
|
476
|
+
|
|
477
|
+
with open(file, 'wb') as f:
|
|
478
|
+
pickle.dump((self, metadata), f)
|
|
443
479
|
|
|
444
480
|
if self.verbose:
|
|
445
481
|
logger.info(f'saved snapshot to {file}')
|
|
@@ -514,6 +550,15 @@ class RV(ISSUES, REPORTS):
|
|
|
514
550
|
def instrument_array(self):
|
|
515
551
|
return np.concatenate([[i] * n for i, n in self.NN.items()])
|
|
516
552
|
|
|
553
|
+
def _instrument_mask(self, instrument):
|
|
554
|
+
if isinstance(instrument, str):
|
|
555
|
+
return np.char.find(self.instrument_array, instrument) == 0
|
|
556
|
+
elif isinstance(instrument, (list, tuple, np.ndarray)):
|
|
557
|
+
m = np.full_like(self.time, False, dtype=bool)
|
|
558
|
+
for i in instrument:
|
|
559
|
+
m |= np.char.find(self.instrument_array, i) == 0
|
|
560
|
+
return m
|
|
561
|
+
|
|
517
562
|
@property
|
|
518
563
|
def rms(self) -> float:
|
|
519
564
|
""" Weighted rms of the (masked) radial velocities """
|
|
@@ -540,6 +585,11 @@ class RV(ISSUES, REPORTS):
|
|
|
540
585
|
def _mtime_sorter(self):
|
|
541
586
|
return np.argsort(self.mtime)
|
|
542
587
|
|
|
588
|
+
@property
|
|
589
|
+
def timespan(self):
|
|
590
|
+
""" Total time span of the (masked) observations """
|
|
591
|
+
return np.ptp(self.mtime)
|
|
592
|
+
|
|
543
593
|
def _index_from_instrument_index(self, index, instrument):
|
|
544
594
|
ind = np.where(self.instrument_array == instrument)[0]
|
|
545
595
|
return ind[getattr(self, instrument).mask][index]
|
|
@@ -633,22 +683,28 @@ class RV(ISSUES, REPORTS):
|
|
|
633
683
|
import pickle
|
|
634
684
|
from datetime import datetime
|
|
635
685
|
if star is None:
|
|
636
|
-
assert file.endswith('.pkl'), 'expected a .pkl file'
|
|
637
|
-
|
|
686
|
+
assert file.endswith(('.pkl', '.pkl.gz')), 'expected a .pkl file'
|
|
687
|
+
basefile = os.path.basename(file)
|
|
688
|
+
star, timestamp = basefile.replace('.pkl.gz', '').replace('.pkl', '').split('_')
|
|
638
689
|
else:
|
|
639
690
|
try:
|
|
640
|
-
file = sorted(glob(f'{star}_*.*.pkl'))[-1]
|
|
691
|
+
file = sorted(glob(f'{star}_*.*.pkl*'))[-1]
|
|
641
692
|
except IndexError:
|
|
642
693
|
raise ValueError(f'cannot find any file matching {star}_*.pkl')
|
|
643
|
-
star, timestamp = file.replace('.pkl', '').split('_')
|
|
694
|
+
star, timestamp = file.replace('.pkl.gz', '').replace('.pkl', '').split('_')
|
|
644
695
|
|
|
645
696
|
dt = datetime.fromtimestamp(float(timestamp))
|
|
646
697
|
if verbose:
|
|
647
698
|
logger.info(f'reading snapshot of {star} from {dt}')
|
|
648
699
|
|
|
649
|
-
|
|
700
|
+
with open(file, 'rb') as f:
|
|
701
|
+
if file.endswith('.gz'):
|
|
702
|
+
import compress_pickle as pickle
|
|
703
|
+
s = pickle.load(f)
|
|
704
|
+
|
|
650
705
|
if isinstance(s, tuple) and len(s) == 2:
|
|
651
706
|
s, _metadata = s
|
|
707
|
+
|
|
652
708
|
s._snapshot = file
|
|
653
709
|
return s
|
|
654
710
|
|
|
@@ -1508,7 +1564,7 @@ class RV(ISSUES, REPORTS):
|
|
|
1508
1564
|
""" Remove all observations that satisfy a condition
|
|
1509
1565
|
|
|
1510
1566
|
Args:
|
|
1511
|
-
condition (
|
|
1567
|
+
condition (ndarray):
|
|
1512
1568
|
Boolean array of the same length as the observations
|
|
1513
1569
|
"""
|
|
1514
1570
|
if self.verbose:
|
|
@@ -1601,6 +1657,16 @@ class RV(ISSUES, REPORTS):
|
|
|
1601
1657
|
for inst in singles:
|
|
1602
1658
|
self.remove_instrument(inst, strict=True)
|
|
1603
1659
|
|
|
1660
|
+
def remove_more_than_n_per_night(self, n=2):
|
|
1661
|
+
""" Remove whenever there are more than `n` observations per night """
|
|
1662
|
+
ind = np.array([], dtype=int)
|
|
1663
|
+
for s in self:
|
|
1664
|
+
n_night = (np.abs(s.time[:, None] - s.time[None, :]) < 0.5).sum(axis=0)
|
|
1665
|
+
ind_s = np.where(n_night >= n)[0]
|
|
1666
|
+
ind = np.r_[ind, self._index_from_instrument_index(ind_s, s.instruments[0])]
|
|
1667
|
+
if len(ind) > 0:
|
|
1668
|
+
self.remove_point(ind)
|
|
1669
|
+
|
|
1604
1670
|
def remove_prog_id(self, prog_id):
|
|
1605
1671
|
""" Remove observations from a given program ID """
|
|
1606
1672
|
from glob import has_magic
|
|
@@ -1668,16 +1734,18 @@ class RV(ISSUES, REPORTS):
|
|
|
1668
1734
|
self._propagate_mask_changes()
|
|
1669
1735
|
|
|
1670
1736
|
|
|
1671
|
-
def _propagate_mask_changes(self):
|
|
1737
|
+
def _propagate_mask_changes(self, _remove_instrument=True):
|
|
1672
1738
|
""" link self.mask with each self.`instrument`.mask """
|
|
1673
1739
|
masked = np.where(~self.mask)[0]
|
|
1674
1740
|
for m in masked:
|
|
1675
1741
|
inst = self.instruments[self.obs[m] - 1]
|
|
1676
1742
|
n_before = (self.obs < self.obs[m]).sum()
|
|
1677
1743
|
getattr(self, inst).mask[m - n_before] = False
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
1744
|
+
if _remove_instrument:
|
|
1745
|
+
instruments = copy(self.instruments)
|
|
1746
|
+
for inst in instruments:
|
|
1747
|
+
if getattr(self, inst).mtime.size == 0:
|
|
1748
|
+
self.remove_instrument(inst, strict=True)
|
|
1681
1749
|
|
|
1682
1750
|
def secular_acceleration(self, epoch=None, just_compute=False, force_simbad=False):
|
|
1683
1751
|
"""
|
|
@@ -1695,9 +1763,12 @@ class RV(ISSUES, REPORTS):
|
|
|
1695
1763
|
force_simbad (bool, optional):
|
|
1696
1764
|
Use Simbad proper motions even if Gaia is available
|
|
1697
1765
|
"""
|
|
1698
|
-
|
|
1766
|
+
# don't do it twice
|
|
1767
|
+
if self._did_secular_acceleration and not just_compute:
|
|
1699
1768
|
return
|
|
1700
1769
|
|
|
1770
|
+
from astropy import units
|
|
1771
|
+
|
|
1701
1772
|
#as_yr = units.arcsec / units.year
|
|
1702
1773
|
mas_yr = units.milliarcsecond / units.year
|
|
1703
1774
|
mas = units.milliarcsecond
|
|
@@ -1829,15 +1900,21 @@ class RV(ISSUES, REPORTS):
|
|
|
1829
1900
|
|
|
1830
1901
|
self._did_secular_acceleration = False
|
|
1831
1902
|
|
|
1832
|
-
def sigmaclip(self, sigma=5, instrument=None,
|
|
1903
|
+
def sigmaclip(self, sigma=5, quantity='vrad', instrument=None,
|
|
1904
|
+
strict=True):
|
|
1833
1905
|
"""
|
|
1834
|
-
Sigma-clip RVs (per instrument!), by MAD away from
|
|
1906
|
+
Sigma-clip RVs or other quantities (per instrument!), by MAD away from
|
|
1907
|
+
the median.
|
|
1835
1908
|
|
|
1836
1909
|
Args:
|
|
1837
1910
|
sigma (float):
|
|
1838
|
-
Number of MADs
|
|
1911
|
+
Number of MADs away from the median
|
|
1912
|
+
quantity (str):
|
|
1913
|
+
Quantity to sigma-clip (by default the RVs)
|
|
1839
1914
|
instrument (str, list):
|
|
1840
1915
|
Instrument(s) to sigma-clip
|
|
1916
|
+
strict (bool):
|
|
1917
|
+
Passed directly to self._check_instrument
|
|
1841
1918
|
"""
|
|
1842
1919
|
#from scipy.stats import sigmaclip as dosigmaclip
|
|
1843
1920
|
from .stats import sigmaclip_median as dosigmaclip
|
|
@@ -1846,20 +1923,26 @@ class RV(ISSUES, REPORTS):
|
|
|
1846
1923
|
return
|
|
1847
1924
|
|
|
1848
1925
|
instruments = self._check_instrument(instrument, strict)
|
|
1926
|
+
if instruments is None:
|
|
1927
|
+
return
|
|
1849
1928
|
changed_instruments = []
|
|
1850
1929
|
|
|
1851
1930
|
for inst in instruments:
|
|
1852
1931
|
m = self.instrument_array == inst
|
|
1853
|
-
|
|
1932
|
+
d = getattr(self, quantity)
|
|
1933
|
+
|
|
1934
|
+
if np.isnan(d[m]).all():
|
|
1935
|
+
continue
|
|
1936
|
+
|
|
1937
|
+
result = dosigmaclip(d[m & self.mask], low=sigma, high=sigma)
|
|
1854
1938
|
# n = self.vrad[m].size - result.clipped.size
|
|
1855
1939
|
|
|
1856
|
-
ind = m & self.mask &
|
|
1857
|
-
((self.vrad < result.lower) | (self.vrad > result.upper))
|
|
1940
|
+
ind = m & self.mask & ((d < result.lower) | (d > result.upper))
|
|
1858
1941
|
n = ind.sum()
|
|
1859
1942
|
|
|
1860
1943
|
if self.verbose and n > 0:
|
|
1861
1944
|
s = 's' if (n == 0 or n > 1) else ''
|
|
1862
|
-
logger.warning(f'sigma-clip
|
|
1945
|
+
logger.warning(f'sigma-clip {quantity} will remove {n} point{s} for {inst}')
|
|
1863
1946
|
|
|
1864
1947
|
if n > 0:
|
|
1865
1948
|
self.mask[ind] = False
|
|
@@ -1884,21 +1967,32 @@ class RV(ISSUES, REPORTS):
|
|
|
1884
1967
|
if config.return_self:
|
|
1885
1968
|
return self
|
|
1886
1969
|
|
|
1887
|
-
def clip_maxerror(self, maxerror:float):
|
|
1888
|
-
"""
|
|
1970
|
+
def clip_maxerror(self, maxerror:float, instrument=None):
|
|
1971
|
+
"""
|
|
1972
|
+
Mask out points with RV error larger than a given value. If `instrument`
|
|
1973
|
+
is given, mask only observations from that instrument.
|
|
1889
1974
|
|
|
1890
1975
|
Args:
|
|
1891
1976
|
maxerror (float): Maximum error to keep.
|
|
1977
|
+
instrument (str, list, tuple, ndarray): Instrument(s) to clip
|
|
1892
1978
|
"""
|
|
1893
1979
|
if self._child:
|
|
1894
1980
|
return
|
|
1895
1981
|
|
|
1896
1982
|
self.maxerror = maxerror
|
|
1983
|
+
|
|
1984
|
+
if instrument is None:
|
|
1985
|
+
inst_mask = np.ones_like(self.svrad, dtype=bool)
|
|
1986
|
+
else:
|
|
1987
|
+
inst_mask = self._instrument_mask(instrument)
|
|
1988
|
+
|
|
1897
1989
|
above = self.svrad > maxerror
|
|
1898
|
-
|
|
1899
|
-
|
|
1990
|
+
old_mask = self.mask.copy()
|
|
1991
|
+
|
|
1992
|
+
self.mask[inst_mask & above] = False
|
|
1900
1993
|
|
|
1901
1994
|
if self.verbose and above.sum() > 0:
|
|
1995
|
+
n = (above[inst_mask] & old_mask[inst_mask]).sum()
|
|
1902
1996
|
s = 's' if (n == 0 or n > 1) else ''
|
|
1903
1997
|
logger.warning(f'clip_maxerror ({maxerror} {self.units}) removed {n} point' + s)
|
|
1904
1998
|
|
|
@@ -1906,6 +2000,36 @@ class RV(ISSUES, REPORTS):
|
|
|
1906
2000
|
if config.return_self:
|
|
1907
2001
|
return self
|
|
1908
2002
|
|
|
2003
|
+
def sigmaclip_ew(self, sigma=5):
|
|
2004
|
+
""" Sigma-clip EW (FWHM x contrast), by MAD away from the median """
|
|
2005
|
+
from .stats import sigmaclip_median as dosigmaclip, weighted_median
|
|
2006
|
+
|
|
2007
|
+
S = deepcopy(self)
|
|
2008
|
+
for _s in S:
|
|
2009
|
+
m = _s.mask
|
|
2010
|
+
_s.fwhm -= weighted_median(_s.fwhm[m], 1 / _s.fwhm_err[m])
|
|
2011
|
+
_s.contrast -= weighted_median(_s.contrast[m], 1 / _s.contrast_err[m])
|
|
2012
|
+
S._build_arrays()
|
|
2013
|
+
ew = S.fwhm * S.contrast
|
|
2014
|
+
ew_err = np.hypot(S.fwhm_err * S.contrast, S.fwhm * S.contrast_err)
|
|
2015
|
+
|
|
2016
|
+
wmed = weighted_median(ew[S.mask], 1 / ew_err[S.mask])
|
|
2017
|
+
data = (ew - wmed) / ew_err
|
|
2018
|
+
result = dosigmaclip(data, low=sigma, high=sigma)
|
|
2019
|
+
ind = (data < result.lower) | (data > result.upper)
|
|
2020
|
+
self.mask[ind] = False
|
|
2021
|
+
|
|
2022
|
+
if self.verbose and ind.sum() > 0:
|
|
2023
|
+
n = ind.sum()
|
|
2024
|
+
s = 's' if (n == 0 or n > 1) else ''
|
|
2025
|
+
logger.warning(f'sigmaclip_ew removed {n} point' + s)
|
|
2026
|
+
|
|
2027
|
+
self._propagate_mask_changes()
|
|
2028
|
+
if config.return_self:
|
|
2029
|
+
return self
|
|
2030
|
+
|
|
2031
|
+
|
|
2032
|
+
|
|
1909
2033
|
def bin(self):
|
|
1910
2034
|
"""
|
|
1911
2035
|
Nightly bin the observations.
|
|
@@ -1949,7 +2073,8 @@ class RV(ISSUES, REPORTS):
|
|
|
1949
2073
|
|
|
1950
2074
|
# treat ccf_mask specially, doing a 'unique' bin
|
|
1951
2075
|
if q == 'ccf_mask':
|
|
1952
|
-
|
|
2076
|
+
ccf_mask = getattr(s, q)[s.mask]
|
|
2077
|
+
setattr(s, q, bin_ccf_mask(s.mtime, ccf_mask))
|
|
1953
2078
|
continue
|
|
1954
2079
|
|
|
1955
2080
|
if Q.dtype != np.float64:
|
|
@@ -2317,7 +2442,7 @@ class RV(ISSUES, REPORTS):
|
|
|
2317
2442
|
self.units = new_units
|
|
2318
2443
|
|
|
2319
2444
|
|
|
2320
|
-
def put_at_systemic_velocity(self, factor=1.0):
|
|
2445
|
+
def put_at_systemic_velocity(self, factor=1.0, ignore=None):
|
|
2321
2446
|
"""
|
|
2322
2447
|
For instruments in which mean(RV) < `factor` * ptp(RV), "move" RVs to
|
|
2323
2448
|
the systemic velocity from simbad. This is useful if some instruments
|
|
@@ -2327,6 +2452,9 @@ class RV(ISSUES, REPORTS):
|
|
|
2327
2452
|
"""
|
|
2328
2453
|
changed = False
|
|
2329
2454
|
for inst in self.instruments:
|
|
2455
|
+
if ignore is not None:
|
|
2456
|
+
if inst in ignore or any([i in inst for i in ignore]):
|
|
2457
|
+
continue
|
|
2330
2458
|
changed_inst = False
|
|
2331
2459
|
s = getattr(self, inst)
|
|
2332
2460
|
if s.mask.any():
|
|
@@ -2360,6 +2488,11 @@ class RV(ISSUES, REPORTS):
|
|
|
2360
2488
|
self.instruments = sorted(self.instruments, key=lambda i: getattr(self, i).time.max())
|
|
2361
2489
|
self._build_arrays()
|
|
2362
2490
|
|
|
2491
|
+
def put_instrument_last(self, instrument):
|
|
2492
|
+
if not self._check_instrument(instrument, strict=True, log=True):
|
|
2493
|
+
return
|
|
2494
|
+
self.instruments = [i for i in self.instruments if i != instrument] + [instrument]
|
|
2495
|
+
self._build_arrays()
|
|
2363
2496
|
|
|
2364
2497
|
def save(self, directory=None, instrument=None, format='rdb',
|
|
2365
2498
|
indicators=False, join_instruments=False, postfix=None,
|
|
@@ -2383,7 +2516,7 @@ class RV(ISSUES, REPORTS):
|
|
|
2383
2516
|
Postfix to add to the filenames ([star]_[instrument]_[postfix].rdb).
|
|
2384
2517
|
save_masked (bool, optional)
|
|
2385
2518
|
If True, also save masked observations (those for which
|
|
2386
|
-
self.mask ==
|
|
2519
|
+
self.mask == False)
|
|
2387
2520
|
save_nans (bool, optional)
|
|
2388
2521
|
Whether to save NaN values in the indicators, if they exist. If
|
|
2389
2522
|
False, the full observation which contains NaN values is not saved.
|
arvi/utils.py
CHANGED
|
@@ -2,6 +2,8 @@ import os
|
|
|
2
2
|
import sys
|
|
3
3
|
import time
|
|
4
4
|
from contextlib import contextmanager
|
|
5
|
+
from functools import partial
|
|
6
|
+
from collections import defaultdict
|
|
5
7
|
|
|
6
8
|
try:
|
|
7
9
|
from unittest.mock import patch
|
|
@@ -68,6 +70,50 @@ def all_logging_disabled():
|
|
|
68
70
|
logging.disable(previous_level)
|
|
69
71
|
|
|
70
72
|
|
|
73
|
+
class record_removals:
|
|
74
|
+
def __init__(self, s, storage=None):
|
|
75
|
+
"""
|
|
76
|
+
A simple context manager to record removed files
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
s (RV):
|
|
80
|
+
An `RV` object
|
|
81
|
+
storage (dict):
|
|
82
|
+
A dictionary to store the removed files, with keys 'raw_file'
|
|
83
|
+
and 'reason' as lists.
|
|
84
|
+
|
|
85
|
+
Examples:
|
|
86
|
+
>>> with record_removals(s) as rec:
|
|
87
|
+
: s.remove_instrument('HARPS')
|
|
88
|
+
: rec.store('removed HARPS')
|
|
89
|
+
>>> rec.storage
|
|
90
|
+
"""
|
|
91
|
+
self.s = s
|
|
92
|
+
if storage is None:
|
|
93
|
+
self.storage = defaultdict(list)
|
|
94
|
+
else:
|
|
95
|
+
if 'raw_file' not in storage:
|
|
96
|
+
storage['raw_file'] = []
|
|
97
|
+
if 'reason' not in storage:
|
|
98
|
+
storage['reason'] = []
|
|
99
|
+
self.storage = storage
|
|
100
|
+
self.raw_file_start = self.s.raw_file.copy()
|
|
101
|
+
|
|
102
|
+
def store(self, reason):
|
|
103
|
+
missing = ~ np.isin(self.raw_file_start, self.s.raw_file[self.s.mask])
|
|
104
|
+
if missing.any():
|
|
105
|
+
lost = self.raw_file_start[missing]
|
|
106
|
+
self.storage['raw_file'].extend(lost)
|
|
107
|
+
self.storage['reason'].extend(len(lost) * [reason])
|
|
108
|
+
self.raw_file_start = self.s.raw_file[self.s.mask].copy()
|
|
109
|
+
|
|
110
|
+
def __enter__(self):
|
|
111
|
+
return self
|
|
112
|
+
|
|
113
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
|
114
|
+
pass
|
|
115
|
+
|
|
116
|
+
|
|
71
117
|
@contextmanager
|
|
72
118
|
def timer(name=None):
|
|
73
119
|
""" A simple context manager to time a block of code """
|
|
@@ -4,35 +4,36 @@ arvi/ariadne_wrapper.py,sha256=YvilopJa9T4NwPcj3Nah_U8smSeSAU5-HYZMb_GJ-BQ,2232
|
|
|
4
4
|
arvi/berv.py,sha256=eKnpuPC1w45UrUEyFRbs9F9j3bXz3kxYzNXbnRgvFQM,17596
|
|
5
5
|
arvi/binning.py,sha256=NK9y9bUrdyWCbh79LkcRABHG-n5MtlETMHMvLj1z-OM,15437
|
|
6
6
|
arvi/config.py,sha256=JkHSwF-EEqwwbcc8thGgbFc9udDZPjQH-9XFjqDepBY,2337
|
|
7
|
-
arvi/dace_wrapper.py,sha256=
|
|
7
|
+
arvi/dace_wrapper.py,sha256=i3aIRKSc2MylEzM5WbbU-BnuLbz8JlG1ez6sK_vvQH0,26125
|
|
8
8
|
arvi/exofop_wrapper.py,sha256=8S7UEcrBAgANIweMV0-CvaWaVTPgGVo8vQQk_KRa0nU,2414
|
|
9
9
|
arvi/extra_data.py,sha256=Xi65pI5kkzqlMmHGl9xFoumtH699611pJJ5PV-a_IfU,3397
|
|
10
10
|
arvi/gaia_wrapper.py,sha256=HTuigIduin3raWfSC7QYuQxDk2dEXYH_4egRkzzg7Xw,4379
|
|
11
11
|
arvi/headers.py,sha256=uvdJebw1M5YkGjE3vJJwYBOnLikib75uuZE9FXB5JJM,1673
|
|
12
|
-
arvi/instrument_specific.py,sha256=
|
|
12
|
+
arvi/instrument_specific.py,sha256=94oMb6UeH6tp7H8YXnXHpxEhIz2evz0iYsT_HrNOCTo,12105
|
|
13
|
+
arvi/kepmodel_wrapper.py,sha256=mmHudetAZ4cBxKDwzQzgUydzkjhomCWw5VVuyiKfXq8,10288
|
|
13
14
|
arvi/kima_wrapper.py,sha256=GrAZWkDCg8ukhW41M1VTadSbab0GBa6BIzjtAtvjk58,3891
|
|
14
15
|
arvi/lbl_wrapper.py,sha256=_ViGVkpakvuBR_xhu9XJRV5EKHpj5Go6jBZGJZMIS2Y,11850
|
|
15
|
-
arvi/nasaexo_wrapper.py,sha256=
|
|
16
|
+
arvi/nasaexo_wrapper.py,sha256=ZKY3IUClqsJuysxDv0Gu51EnzMX7101zQb7UQy_urhI,7431
|
|
16
17
|
arvi/plots.py,sha256=U4VUNyIx4h_rEFd7ZWgBcawUcIGcURES0A4VXIBKp3U,35240
|
|
17
18
|
arvi/programs.py,sha256=M8o8hXr6W22dMiIX3Nxz4pgb8lsJXApDlq7HStyTfqs,9047
|
|
18
|
-
arvi/reports.py,sha256=
|
|
19
|
+
arvi/reports.py,sha256=a38EZNhyGoSSzJh63wBQCAt3_xhqbpVGcDOXaZWTLXs,11127
|
|
19
20
|
arvi/setup_logger.py,sha256=dHzO2gPjw6CaKWpYZd2f83z09tmxgi--qpp7k1jROjI,615
|
|
20
21
|
arvi/simbad_wrapper.py,sha256=uZc8mcfNijXsQi29LReRTabZb2hRPhYdLsDLMgq1OEI,9927
|
|
21
22
|
arvi/sophie_wrapper.py,sha256=KUeWccXud5_Lrx72S1HSemHIZRdjd2oLvqyofwsL0QQ,3440
|
|
22
23
|
arvi/spectra.py,sha256=ebF1ocodTastLx0CyqLSpE8EZNDXBF8riyfxMr3L6H0,7491
|
|
23
|
-
arvi/stats.py,sha256=
|
|
24
|
+
arvi/stats.py,sha256=gvMkKzP83AV8_Oi71JHmA8QH8Y1z1viYykV9ELVDqZI,3547
|
|
24
25
|
arvi/stellar.py,sha256=GQ7yweuBRnfkJ0M5eWjvLd8uvGq_by81PbXfidBvWis,4918
|
|
25
|
-
arvi/timeseries.py,sha256=
|
|
26
|
+
arvi/timeseries.py,sha256=K33MmNG_xyAfr5Jw5lR_Yq0JJjCwEGPreaeMMoQoE9M,105145
|
|
26
27
|
arvi/translations.py,sha256=PUSrn4zvYO2MqGzUxlFGwev_tBkgJaJrIYs6NKHzbWo,951
|
|
27
|
-
arvi/utils.py,sha256=
|
|
28
|
+
arvi/utils.py,sha256=MuAgjyXr297Sm_T6QmB1riVUktyT9ud1qngGMgKlXMc,10863
|
|
28
29
|
arvi/data/info.svg,sha256=0IMI6W-eFoTD8acnury79WJJakpBwLa4qKS4JWpsXiI,489
|
|
29
30
|
arvi/data/obs_affected_ADC_issues.dat,sha256=tn93uOL0eCTYhireqp1wG-_c3CbxPA7C-Rf-pejVY8M,10853
|
|
30
31
|
arvi/data/obs_affected_blue_cryostat_issues.dat,sha256=z4AK17xfz8tGTDv1FjRvQFnio4XA6PNNfDXuicewHk4,1771
|
|
31
32
|
arvi/data/extra/HD86226_PFS1.rdb,sha256=vfAozbrKHM_j8dYkCBJsuHyD01KEM1asghe2KInwVao,3475
|
|
32
33
|
arvi/data/extra/HD86226_PFS2.rdb,sha256=F2P7dB6gVyzCglUjNheB0hIHVClC5RmARrGwbrY1cfo,4114
|
|
33
34
|
arvi/data/extra/metadata.json,sha256=C69hIw6CohyES6BI9vDWjxwSz7N4VOYX0PCgjXtYFmU,178
|
|
34
|
-
arvi-0.2.
|
|
35
|
-
arvi-0.2.
|
|
36
|
-
arvi-0.2.
|
|
37
|
-
arvi-0.2.
|
|
38
|
-
arvi-0.2.
|
|
35
|
+
arvi-0.2.12.dist-info/licenses/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
|
|
36
|
+
arvi-0.2.12.dist-info/METADATA,sha256=w1LdRdx-VwxhRgcIik-9at_k49xkJQQasIrGu1qtwc4,1933
|
|
37
|
+
arvi-0.2.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
38
|
+
arvi-0.2.12.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
|
|
39
|
+
arvi-0.2.12.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|