arvi 0.2.9__py3-none-any.whl → 0.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
arvi/dace_wrapper.py CHANGED
@@ -193,12 +193,13 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
193
193
  except TypeError:
194
194
  msg = f'no {instrument} observations for {star}'
195
195
  raise ValueError(msg) from None
196
+
196
197
  if (isinstance(instrument, str)):
197
198
  filters = {
198
199
  "ins_name": {"contains": [instrument]},
199
200
  "obj_id_daceid": {"contains": [dace_id]}
200
201
  }
201
- elif (isinstance(instrument, list)):
202
+ elif (isinstance(instrument, (list, tuple, np.ndarray))):
202
203
  filters = {
203
204
  "ins_name": {"contains": instrument},
204
205
  "obj_id_daceid": {"contains": [dace_id]}
@@ -240,7 +241,9 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
240
241
  'rv': result['spectro_ccf_rv'][mask3],
241
242
  'rv_err': result['spectro_ccf_rv_err'][mask3],
242
243
  'berv': result['spectro_cal_berv'][mask3],
243
- 'ccf_noise': _nan,
244
+ 'ccf_noise': np.sqrt(
245
+ np.square(result['spectro_ccf_rv_err'][mask3]) - np.square(result['spectro_cal_drift_noise'][mask3])
246
+ ),
244
247
  'rhk': result['spectro_analysis_rhk'][mask3],
245
248
  'rhk_err': result['spectro_analysis_rhk_err'][mask3],
246
249
  'contrast': result['spectro_ccf_contrast'][mask3],
@@ -123,7 +123,10 @@ def HARPS_commissioning(self, mask=True, plot=True):
123
123
  if check(self, 'HARPS') is None:
124
124
  return
125
125
 
126
- affected = self.time < HARPS_start
126
+ affected = np.logical_and(
127
+ self.instrument_array == 'HARPS03',
128
+ self.time < HARPS_start
129
+ )
127
130
  total_affected = affected.sum()
128
131
 
129
132
  if self.verbose:
@@ -133,7 +136,7 @@ def HARPS_commissioning(self, mask=True, plot=True):
133
136
 
134
137
  if mask:
135
138
  self.mask[affected] = False
136
- self._propagate_mask_changes()
139
+ self._propagate_mask_changes(_remove_instrument=False)
137
140
 
138
141
  if plot:
139
142
  self.plot(show_masked=True)
@@ -155,7 +158,14 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
155
158
  if check(self, 'HARPS') is None:
156
159
  return
157
160
 
158
- affected = (self.time >= HARPS_technical_intervention_range[0]) & (self.time <= HARPS_technical_intervention_range[1])
161
+ affected = np.logical_and(
162
+ self.time >= HARPS_technical_intervention_range[0],
163
+ self.time <= HARPS_technical_intervention_range[1]
164
+ )
165
+ affected = np.logical_and(
166
+ affected,
167
+ np.char.find(self.instrument_array, 'HARPS') == 0
168
+ )
159
169
  total_affected = affected.sum()
160
170
 
161
171
  if self.verbose:
@@ -165,7 +175,7 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
165
175
 
166
176
  if mask:
167
177
  self.mask[affected] = False
168
- self._propagate_mask_changes()
178
+ self._propagate_mask_changes(_remove_instrument=False)
169
179
 
170
180
  if plot:
171
181
  self.plot(show_masked=True)
@@ -187,7 +197,10 @@ def ESPRESSO_commissioning(self, mask=True, plot=True):
187
197
  if check(self, 'ESPRESSO') is None:
188
198
  return
189
199
 
190
- affected = self.time < ESPRESSO_start
200
+ affected = np.logical_and(
201
+ self.instrument_array == 'ESPRESSO18',
202
+ self.time < ESPRESSO_start
203
+ )
191
204
  total_affected = affected.sum()
192
205
 
193
206
  if self.verbose:
@@ -197,7 +210,7 @@ def ESPRESSO_commissioning(self, mask=True, plot=True):
197
210
 
198
211
  if mask:
199
212
  self.mask[affected] = False
200
- self._propagate_mask_changes()
213
+ self._propagate_mask_changes(_remove_instrument=False)
201
214
 
202
215
  if plot and total_affected > 0:
203
216
  self.plot(show_masked=True)
@@ -246,7 +259,7 @@ def ADC_issues(self, mask=True, plot=True, check_headers=False):
246
259
 
247
260
  if mask:
248
261
  self.mask[intersect] = False
249
- self._propagate_mask_changes()
262
+ self._propagate_mask_changes(_remove_instrument=False)
250
263
 
251
264
  if plot:
252
265
  self.plot(show_masked=True)
@@ -282,7 +295,7 @@ def blue_cryostat_issues(self, mask=True, plot=True):
282
295
 
283
296
  if mask:
284
297
  self.mask[intersect] = False
285
- self._propagate_mask_changes()
298
+ self._propagate_mask_changes(_remove_instrument=False)
286
299
 
287
300
  if plot:
288
301
  self.plot(show_masked=True)
@@ -330,7 +343,7 @@ def qc_scired_issues(self, plot=False, **kwargs):
330
343
  return
331
344
 
332
345
  self.mask[affected] = False
333
- self._propagate_mask_changes()
346
+ self._propagate_mask_changes(_remove_instrument=False)
334
347
 
335
348
  if plot:
336
349
  self.plot(show_masked=True)
@@ -364,6 +377,7 @@ class ISSUES:
364
377
  logger.error('are the data binned? cannot proceed to mask these points...')
365
378
 
366
379
  results = list(filter(lambda x: x is not None, results))
380
+ self._propagate_mask_changes()
367
381
 
368
382
  try:
369
383
  return np.logical_or.reduce(results)
@@ -0,0 +1,296 @@
1
+ import io
2
+ from contextlib import redirect_stdout, contextmanager
3
+ from string import ascii_lowercase
4
+ from matplotlib import pyplot as plt
5
+ import numpy as np
6
+
7
+ from kepmodel.rv import RvModel
8
+ from spleaf.cov import merge_series
9
+ from spleaf.term import Error, InstrumentJitter
10
+
11
+ from .setup_logger import setup_logger
12
+ from .utils import timer, adjust_lightness
13
+
14
+
15
+ class model:
16
+ logger = setup_logger()
17
+ # periodogram settings
18
+ Pmin, Pmax, nfreq = 1.5, 10_000, 100_000
19
+
20
+ @contextmanager
21
+ def ew(self):
22
+ for name in self.model.keplerian:
23
+ self.model.set_keplerian_param(name, param=['P', 'la0', 'K', 'e', 'w'])
24
+ try:
25
+ yield
26
+ finally:
27
+ for name in self.model.keplerian:
28
+ self.model.set_keplerian_param(name, param=['P', 'la0', 'K', 'esinw', 'ecosw'])
29
+
30
+ @property
31
+ def nu0(self):
32
+ return 2 * np.pi / self.Pmax
33
+
34
+ @property
35
+ def dnu(self):
36
+ return (2 * np.pi / self.Pmin - self.nu0) / (self.nfreq - 1)
37
+
38
+ def __init__(self, s):
39
+ self.s = s
40
+ self.instruments = s.instruments
41
+ self.Pmax = 2 * np.ptp(s.time)
42
+ ts = self.ts = self.s._mtime_sorter
43
+
44
+ # t, y_ ye, series_index = merge_series(
45
+ # [_s.mtime for _s in s],
46
+ # [_s.mvrad for _s in s],
47
+ # [_s.msvrad for _s in s],
48
+ # )
49
+
50
+ inst_jit = self._get_jitters()
51
+
52
+ self.model = RvModel(self.s.mtime[ts], self.s.mvrad[ts],
53
+ err=Error(self.s.msvrad[ts]), **inst_jit)
54
+ self.np = 0
55
+ self._add_means()
56
+
57
+ def _add_means(self):
58
+ for inst in self.s.instruments:
59
+ # if inst not in self.s.instrument_array[self.s.mask]:
60
+ # continue
61
+ mask = self.s.instrument_array[self.s.mask][self.ts] == inst
62
+ self.model.add_lin(
63
+ derivative=1.0 * mask,
64
+ name=f"offset_{inst}",
65
+ value=getattr(self.s, inst).mvrad.mean(),
66
+ )
67
+ self.model.fit_lin()
68
+
69
+ def _get_jitters(self):
70
+ inst_jit = {}
71
+ for inst in self.s.instruments:
72
+ inst_jit[f'jit_{inst}'] = InstrumentJitter(
73
+ indices=self.s.instrument_array[self.s.mask] == inst,
74
+ sig=self.s.svrad[self.s.mask].min()
75
+ )
76
+ return inst_jit
77
+
78
+ def _set_jitters(self, value=0.0):
79
+ for par in self.model.cov.param:
80
+ if 'jit' in par:
81
+ self.model.set_param(value, f'cov.{par}')
82
+ # self.model.fit()
83
+
84
+ def _equal_coralie_offsets(self):
85
+ if self.s._check_instrument('CORALIE') is None:
86
+ return
87
+ mask = np.char.find(self.s.instrument_array, 'CORALIE') == 0
88
+ mean = self.s.vrad[mask].mean()
89
+ for inst in self.instruments:
90
+ if 'CORALIE' in inst:
91
+ self.model.set_param(mean, f'lin.offset_{inst}')
92
+
93
+
94
+ def __repr__(self):
95
+ with self.ew():
96
+ with io.StringIO() as buf, redirect_stdout(buf):
97
+ self.model.show_param()
98
+ output = buf.getvalue()
99
+ return output
100
+
101
+ def to_table(self, **kwargs):
102
+ from .utils import pretty_print_table
103
+ lines = repr(self)
104
+ lines = lines.replace(' [deg]', '_[deg]')
105
+ lines = lines.encode().replace(b'\xc2\xb1', b'').decode()
106
+ lines = lines.split('\n')
107
+ lines = [line.split() for line in lines]
108
+ lines = [[col.replace('_[deg]', ' [deg]') for col in line] for line in lines]
109
+ pretty_print_table(lines[:-2], **kwargs)
110
+
111
+ @property
112
+ def fit_param(self):
113
+ return self.model.fit_param
114
+
115
+ def fit(self):
116
+ # fit offsets
117
+ self.model.fit_param = [f'lin.offset_{inst}' for inst in self.instruments]
118
+ # fit jitters
119
+ self.model.fit_param += [f'cov.{par}' for par in self.model.cov.param]
120
+ # fit keplerian(s)
121
+ self.model.fit_param += [
122
+ f'kep.{k}.{p}'
123
+ for k, v in self.model.keplerian.items()
124
+ for p in v._param
125
+ ]
126
+ # if self.np == 0:
127
+ # self._set_jitters(0.1 * np.std(self.model.y - self.model.model()))
128
+ try:
129
+ self.model.fit()
130
+ except Exception as e:
131
+ print(e)
132
+
133
+
134
+ def plot(self, **kwargs):
135
+ fig, ax = self.s.plot(**kwargs)
136
+ tt = self.s._tt()
137
+ time_offset = 50000 if 'remove_50000' in kwargs else 0
138
+
139
+ for i, inst in enumerate(self.s):
140
+ inst_name = inst.instruments[0].replace('-', '_')
141
+ val = self.model.get_param(f'lin.offset_{inst_name}')
142
+ x = np.array([inst.mtime.min(), inst.mtime.max()]) - time_offset
143
+ y = [val, val]
144
+ ax.plot(x, y, ls='--', color=f'C{i}')
145
+ mask = (tt > inst.mtime.min()) & (tt < inst.mtime.max())
146
+ color = adjust_lightness(f'C{i}', 1.2)
147
+ ax.plot(tt[mask] - time_offset,
148
+ val + self.model.keplerian_model(tt)[mask],
149
+ color=color)
150
+
151
+ return fig, ax
152
+
153
+ def plot_phasefolding(self, planets=None, ax=None):
154
+ t = self.model.t
155
+ res = self.model.residuals()
156
+ sig = np.sqrt(self.model.cov.A)
157
+
158
+ Msmooth = np.linspace(0, 360, 1000)
159
+
160
+ if planets is None:
161
+ planets = list(self.model.keplerian.keys())
162
+
163
+ if ax is None:
164
+ fig, axs = plt.subplots(
165
+ 1, len(planets), sharex=True, sharey=True, constrained_layout=True,
166
+ squeeze=False
167
+ )
168
+ else:
169
+ axs = np.atleast_1d(ax)
170
+ fig = axs[0].figure
171
+
172
+ for p, ax in zip(planets, axs.flat):
173
+ self.model.set_keplerian_param(p, param=['P', 'la0', 'K', 'e', 'w'])
174
+ kep = self.model.keplerian[p]
175
+ P = self.model.get_param(f'kep.{p}.P')
176
+ M0 = (180 / np.pi * (self.model.get_param(f'kep.{p}.la0') - self.model.get_param(f'kep.{p}.w')))
177
+ M = (M0 + 360 / P * t) % 360
178
+ reskep = res + kep.rv(t)
179
+ tsmooth = (Msmooth - M0) * P / 360
180
+ mod = kep.rv(tsmooth)
181
+
182
+ ax.plot([0, 360], [0, 0], '--', c='gray', lw=1)
183
+ ax.plot(Msmooth, mod, 'k-', lw=3, rasterized=True)
184
+ for inst in self.instruments:
185
+ sel = self.s.instrument_array[self.s.mask] == inst
186
+ ax.errorbar(M[sel], reskep[sel], sig[sel], fmt='.',
187
+ rasterized=True, alpha=0.7)
188
+
189
+ ax.set(ylabel='RV [m/s]', xlabel='Mean anomaly [deg]',
190
+ xticks=np.arange(0, 361, 90))
191
+ ax.minorticks_on()
192
+ self.model.set_keplerian_param(p, param=['P', 'la0', 'K', 'esinw', 'ecosw'])
193
+
194
+ # handles, labels = ax.get_legend_handles_labels()
195
+ # fig.legend(handles, labels, loc='center left', bbox_to_anchor=(0.9, 0.5))
196
+ return fig, axs
197
+
198
+ def add_planet_from_period(self, period):
199
+ self.model.add_keplerian_from_period(period, fit=True)
200
+ self.model.fit()
201
+ self.np += 1
202
+
203
+ def _plot_periodogram(self, P=None, power=None, kmax=None, faplvl=None,
204
+ **kwargs):
205
+ if P is None and power is None:
206
+ with timer('periodogram'):
207
+ nu, power = self.model.periodogram(self.nu0, self.dnu, self.nfreq)
208
+ P = 2 * np.pi / nu
209
+
210
+ if 'ax' in kwargs:
211
+ ax = kwargs.pop('ax')
212
+ fig = ax.figure
213
+ else:
214
+ fig, ax = plt.subplots(1, 1, constrained_layout=True)
215
+ ax.semilogx(P, power, 'k', lw=1, rasterized=True)
216
+ ax.set_ylim(0, 1.2 * power.max())
217
+ ax.set(xlabel='Period [days]', ylabel='Normalized power')
218
+
219
+ if kmax is None:
220
+ kmax = np.argmax(power)
221
+ ax.plot(P[kmax], power[kmax], 'or', ms=4)
222
+ ax.text(P[kmax], power[kmax] * 1.1, f'{P[kmax]:.3f} d',
223
+ ha='right', va='center', color='r')
224
+
225
+ if faplvl is None:
226
+ faplvl = self.model.fap(power[kmax], nu.max())
227
+ ax.text(0.99, 0.95, f'FAP = {faplvl:.2g}', transform=ax.transAxes,
228
+ ha='right', va='top')
229
+
230
+ return fig, ax
231
+
232
+ def add_keplerian_from_periodogram(self, fap_max=0.001, plot=False,
233
+ fit_first=True):
234
+ if fit_first and self.np == 0:
235
+ self.fit()
236
+
237
+ self._equal_coralie_offsets()
238
+
239
+ with timer('periodogram'):
240
+ nu, power = self.model.periodogram(self.nu0, self.dnu, self.nfreq)
241
+
242
+ P = 2 * np.pi / nu
243
+ # Compute FAP
244
+ kmax = np.argmax(power)
245
+ faplvl = self.model.fap(power[kmax], nu.max())
246
+ self.logger.info('highest periodogram peak:')
247
+ self.logger.info(f'P={P[kmax]:.4f} d, power={power[kmax]:.3f}, FAP={faplvl:.2e}')
248
+ if plot:
249
+ self._plot_periodogram(P, power, kmax, faplvl)
250
+
251
+ if faplvl > fap_max:
252
+ print('non-significant peak')
253
+ self.fit()
254
+ return False
255
+
256
+ # add new planet
257
+ letter = ascii_lowercase[1:][self.np]
258
+ self.model.add_keplerian_from_period(P[kmax], name=letter,
259
+ guess_kwargs={'emax': 0.8})
260
+ # self.model.set_keplerian_param(letter, param=['P', 'la0', 'K', 'e', 'w'])
261
+ self.model.set_keplerian_param(letter, param=['P', 'la0', 'K', 'esinw', 'ecosw'])
262
+ self.np += 1
263
+ self.fit()
264
+
265
+ if plot:
266
+ self.plot()
267
+
268
+ return True
269
+
270
+ @property
271
+ def offsets(self):
272
+ names = [f'lin.offset_{inst}' for inst in self.instruments]
273
+ return {
274
+ name.replace('lin.', ''): self.model.get_param(name)
275
+ for name in names
276
+ }
277
+
278
+ @property
279
+ def jitters(self):
280
+ names = [f'cov.{par}' for par in self.model.cov.param]
281
+ return {
282
+ name.replace('cov.', '').replace('.sig', ''): self.model.get_param(name)
283
+ for name in names
284
+ }
285
+
286
+ @property
287
+ def keplerians(self):
288
+ keps = {name: {} for name in self.model.keplerian.keys()}
289
+ for name in keps:
290
+ params = self.model.keplerian[name]._param
291
+ pars = [f'kep.{name}.{p}' for p in params]
292
+ keps[name] = {
293
+ par.replace(f'kep.{name}.', ''): self.model.get_param(par)
294
+ for par in pars
295
+ }
296
+ return keps
arvi/nasaexo_wrapper.py CHANGED
@@ -6,7 +6,7 @@ from io import StringIO
6
6
  import numpy as np
7
7
  from astropy.timeseries import LombScargle
8
8
 
9
- from .setup_logger import logger
9
+ from .setup_logger import setup_logger
10
10
  from kepmodel.rv import RvModel
11
11
  from spleaf.term import Error
12
12
 
@@ -32,6 +32,7 @@ def run_query(query):
32
32
 
33
33
  class Planets:
34
34
  def __init__(self, system):
35
+ logger = setup_logger()
35
36
  self.s = system
36
37
  self.verbose = system.verbose
37
38
 
@@ -163,6 +164,7 @@ class Planets:
163
164
  self.model.show_param()
164
165
 
165
166
  def fit_all(self, adjust_data=False):
167
+ logger = setup_logger()
166
168
  self.model.fit()
167
169
 
168
170
  newP = np.array([self.model.get_param(f'kep.{i}.P') for i in range(self.np)])
@@ -187,5 +189,7 @@ class Planets:
187
189
  self.s._build_arrays()
188
190
 
189
191
  def __repr__(self):
190
- return f'{self.star}({self.np} planets, '\
191
- f'P={list(self.P)}, K={list(self.K)}, e={list(self.e)})'
192
+ P = list(map(float, self.P))
193
+ K = list(map(float, self.K))
194
+ e = list(map(float, self.e))
195
+ return f'{self.star}({self.np} planets, {P=}, {K=}, {e=})'
arvi/reports.py CHANGED
@@ -40,6 +40,7 @@ class REPORTS:
40
40
  rows.append([self.star] + [''] * len(self.instruments) + [''])
41
41
  rows.append([''] + self.instruments + ['full'])
42
42
  rows.append(['N'] + list(self.NN.values()) + [self.N])
43
+ rows.append(['T span'] + [np.ptp(s.mtime).round(1) for s in self] + [np.ptp(self.mtime).round(1)])
43
44
  rows.append(['RV span'] + [np.ptp(s.mvrad).round(3) for s in self] + [np.ptp(self.mvrad).round(3)])
44
45
  rows.append(['RV std'] + [s.mvrad.std().round(3) for s in self] + [self.mvrad.std().round(3)])
45
46
  rows.append(['eRV mean'] + [s.msvrad.mean().round(3) for s in self] + [self.msvrad.mean().round(3)])
@@ -201,4 +202,110 @@ class REPORTS:
201
202
  pdf.savefig(fig)
202
203
  # os.system(f'evince {save} &')
203
204
 
204
- return fig
205
+ return fig
206
+
207
+
208
+ def kepmodel_report(self, fit_keplerians=3, save=None, nasaexo_title=False):
209
+ import matplotlib.pyplot as plt
210
+ import matplotlib.gridspec as gridspec
211
+ from matplotlib.backends.backend_pdf import PdfPages
212
+ logger = setup_logger()
213
+
214
+ def set_align_for_column(table, col, align="left"):
215
+ cells = [key for key in table._cells if key[1] == col]
216
+ for cell in cells:
217
+ table._cells[cell]._loc = align
218
+ table._cells[cell]._text.set_horizontalalignment(align)
219
+
220
+ from .kepmodel_wrapper import model
221
+ m = model(self)
222
+
223
+ while fit_keplerians > 0:
224
+ if m.add_keplerian_from_periodogram():
225
+ fit_keplerians -= 1
226
+ else:
227
+ break
228
+
229
+ m.fit()
230
+
231
+
232
+ # size = A4
233
+ size = 8.27, 11.69
234
+ fig = plt.figure(figsize=size, constrained_layout=True)
235
+ gs = gridspec.GridSpec(5, 3, figure=fig, height_ratios=[2, 1, 1, 1, 1])
236
+
237
+ # first row, all columns
238
+ ax1 = plt.subplot(gs[0, :])
239
+
240
+ if nasaexo_title:
241
+ title = str(self.planets).replace('(', '\n').replace(')', '')
242
+ star, planets = title.split('\n')
243
+ planets = planets.replace('planets,', 'known planets\n')
244
+ ax1.set_title(star, loc='left', fontsize=14)
245
+ ax1.set_title(planets, loc='right', fontsize=10)
246
+ else:
247
+ title = f'{self.star}'
248
+ ax1.set_title(title, loc='left', fontsize=14)
249
+ # ax1.set_title(r"\href{http://www.google.com}{link}", color='blue',
250
+ # loc='center')
251
+
252
+ m.plot(ax=ax1, N_in_label=True, tooltips=False, remove_50000=True)
253
+
254
+ ax1.legend().remove()
255
+ legend_ax = plt.subplot(gs[1, -1])
256
+ legend_ax.axis('off')
257
+ leg = plt.legend(*ax1.get_legend_handles_labels(),
258
+ prop={'family': 'monospace'})
259
+ legend_ax.add_artist(leg)
260
+
261
+ ax2 = plt.subplot(gs[1, :-1])
262
+ m._plot_periodogram(ax=ax2)
263
+
264
+ ax3 = plt.subplot(gs[2, 0])
265
+ ax3.axis('off')
266
+ items = list(m.offsets.items())
267
+ items = [[item[0].replace('offset_', 'offet '), item[1].round(3)] for item in items]
268
+ table = ax3.table(items, loc='center', edges='open')
269
+ table.auto_set_font_size(False)
270
+ table.set_fontsize(9)
271
+ set_align_for_column(table, 1, align="left")
272
+
273
+ ax4 = plt.subplot(gs[2, 1])
274
+ ax4.axis('off')
275
+ items = list(m.jitters.items())
276
+ items = [[item[0].replace('jit_', 'jitter '), item[1].round(3)] for item in items]
277
+ table = ax4.table(items, loc='center', edges='open')
278
+ table.auto_set_font_size(False)
279
+ table.set_fontsize(9)
280
+ set_align_for_column(table, 1, align="left")
281
+
282
+ ax5 = plt.subplot(gs[2, 2])
283
+ ax5.axis('off')
284
+ items = [
285
+ ['N', m.model.n],
286
+ [r'N$_{\rm free}$', len(m.model.fit_param)],
287
+ [r'$\chi^2$', round(m.model.chi2(), 2)],
288
+ [r'$\chi^2_r$', round(m.model.chi2() / (m.model.n - len(m.model.fit_param)), 2)],
289
+ [r'$\log L$', round(m.model.loglike(), 2)],
290
+ ]
291
+ table = ax5.table(items, loc='center', edges='open')
292
+ table.auto_set_font_size(False)
293
+ table.set_fontsize(9)
294
+ set_align_for_column(table, 1, align="left")
295
+
296
+ for i, name in enumerate(m.keplerians):
297
+ ax = plt.subplot(gs[3, i])
298
+ m.plot_phasefolding(planets=name, ax=ax)
299
+
300
+ ax = plt.subplot(gs[4, i])
301
+ ax.axis('off')
302
+ with m.ew():
303
+ items = list(m.keplerians[name].items())
304
+ items = [[item[0], item[1].round(3)] for item in items]
305
+ table = ax.table(items, loc='center', edges='open')
306
+ table.auto_set_font_size(False)
307
+ table.set_fontsize(9)
308
+ set_align_for_column(table, 1, align="left")
309
+
310
+
311
+ return fig, m
arvi/stats.py CHANGED
@@ -1,4 +1,6 @@
1
+ from functools import partial
1
2
  import numpy as np
3
+ from scipy.stats import norm
2
4
 
3
5
  def wmean(a, e):
4
6
  """Weighted mean of array `a`, with uncertainties given by `e`.
@@ -50,16 +52,39 @@ def wrms(a, e, ignore_nans=False):
50
52
  w = 1 / e**2
51
53
  return np.sqrt(np.sum(w * (a - np.average(a, weights=w))**2) / sum(w))
52
54
 
55
+ # from https://stackoverflow.com/questions/20601872/numpy-or-scipy-to-calculate-weighted-median
56
+ def weighted_quantiles_interpolate(values, weights, quantiles):
57
+ i = np.argsort(values)
58
+ c = np.cumsum(weights[i])
59
+ q = np.searchsorted(c, quantiles * c[-1])
60
+ # Ensure right-end isn't out of bounds. Thanks @Jeromino!
61
+ q_plus1 = np.clip(q + 1, a_min=None, a_max=values.shape[0] - 1)
62
+ return np.where(
63
+ c[q] / c[-1] == quantiles,
64
+ 0.5 * (values[i[q]] + values[i[q_plus1]]),
65
+ values[i[q]],
66
+ )
53
67
 
54
- def sigmaclip_median(a, low=4.0, high=4.0):
68
+ weighted_median = partial(weighted_quantiles_interpolate, quantiles=0.5)
69
+
70
+
71
+
72
+ def sigmaclip_median(a, low=4.0, high=4.0, k=1/norm.ppf(3/4)):
55
73
  """
56
74
  Same as scipy.stats.sigmaclip but using the median and median absolute
57
75
  deviation instead of the mean and standard deviation.
58
76
 
59
77
  Args:
60
- a (array): Array containing data
61
- low (float): Number of MAD to use for the lower clipping limit
62
- high (float): Number of MAD to use for the upper clipping limit
78
+ a (array):
79
+ Array containing data
80
+ low (float):
81
+ Number of MAD to use for the lower clipping limit
82
+ high (float):
83
+ Number of MAD to use for the upper clipping limit
84
+ k (float):
85
+ Scale factor for the MAD to be an estimator of the standard
86
+ deviation. Depends on the (assumed) distribution of the data.
87
+ Default value is for the normal distribution (=1/norm.ppf(3/4)).
63
88
  Returns:
64
89
  SigmaclipResult: Object with the following attributes:
65
90
  - `clipped`: Masked array of data
@@ -71,7 +96,7 @@ def sigmaclip_median(a, low=4.0, high=4.0):
71
96
  c = np.asarray(a).ravel()
72
97
  delta = 1
73
98
  while delta:
74
- c_mad = median_abs_deviation(c)
99
+ c_mad = median_abs_deviation(c) * k
75
100
  c_median = np.median(c)
76
101
  size = c.size
77
102
  critlower = c_median - c_mad * low
arvi/timeseries.py CHANGED
@@ -4,10 +4,9 @@ from typing import Union
4
4
  from functools import partial, partialmethod
5
5
  from glob import glob
6
6
  import warnings
7
- from copy import deepcopy
7
+ from copy import copy, deepcopy
8
8
  from datetime import datetime, timezone
9
9
 
10
- # import lazy_loader as lazy
11
10
  import numpy as np
12
11
 
13
12
  from .setup_logger import setup_logger
@@ -26,7 +25,8 @@ from .HZ import getHZ_period
26
25
  from .instrument_specific import ISSUES
27
26
  from .reports import REPORTS
28
27
  from .utils import sanitize_path, strtobool, there_is_internet, timer, chdir
29
- # from .utils import lazy_import
28
+ from .setup_logger import setup_logger
29
+ logger = setup_logger()
30
30
 
31
31
  # units = lazy_import('astropy.units')
32
32
  # units = lazy.load('astropy.units')
@@ -411,35 +411,71 @@ class RV(ISSUES, REPORTS):
411
411
  self._did_correct_berv = False
412
412
  self.__post_init__()
413
413
 
414
- def snapshot(self, directory=None, delete_others=False):
415
- import pickle
414
+ def snapshot(self, directory=None, delete_others=False, compress=False):
415
+ if compress:
416
+ try:
417
+ import compress_pickle as pickle
418
+ except ImportError:
419
+ logger.warning('compress_pickle not installed, not compressing')
420
+ import pickle
421
+ compress = False
422
+ else:
423
+ import pickle
424
+ import re
416
425
  from datetime import datetime
426
+
417
427
  ts = datetime.now().timestamp()
418
428
  star_name = self.star.replace(' ', '')
419
429
  file = f'{star_name}_{ts}.pkl'
420
430
 
431
+ server = None
421
432
  if directory is None:
422
433
  directory = '.'
423
434
  else:
424
- os.makedirs(directory, exist_ok=True)
425
-
426
- file = os.path.join(directory, file)
427
-
428
- if delete_others:
429
- import re
430
- other_pkls = [
431
- f for f in os.listdir(directory)
432
- if re.search(fr'{star_name}_\d+.\d+.pkl', f)
433
- ]
434
- for pkl in other_pkls:
435
- os.remove(os.path.join(directory, pkl))
435
+ if ':' in directory:
436
+ server, directory = directory.split(':')
437
+ delete_others = False
438
+ else:
439
+ os.makedirs(directory, exist_ok=True)
436
440
 
437
441
  metadata = {
438
442
  'star': self.star,
439
443
  'timestamp': ts,
440
444
  'description': 'arvi snapshot'
441
445
  }
442
- pickle.dump((self, metadata), open(file, 'wb'), protocol=0)
446
+
447
+
448
+ if server:
449
+ import posixpath
450
+ from .utils import server_sftp, server_file
451
+ with server_sftp(server=server) as sftp:
452
+ try:
453
+ sftp.chdir(directory)
454
+ except FileNotFoundError:
455
+ sftp.mkdir(directory)
456
+ finally:
457
+ sftp.chdir(directory)
458
+ with sftp.open(file, 'wb') as f:
459
+ print('saving snapshot to server...', end='', flush=True)
460
+ pickle.dump((self, metadata), f, protocol=0)
461
+ print('done')
462
+ file = posixpath.join(directory, file)
463
+ else:
464
+ if delete_others:
465
+ other_pkls = [
466
+ f for f in os.listdir(directory)
467
+ if re.search(fr'{star_name}_\d+.\d+.pkl', f)
468
+ ]
469
+ for pkl in other_pkls:
470
+ os.remove(os.path.join(directory, pkl))
471
+
472
+ file = os.path.join(directory, file)
473
+
474
+ if compress:
475
+ file += '.gz'
476
+
477
+ with open(file, 'wb') as f:
478
+ pickle.dump((self, metadata), f)
443
479
 
444
480
  if self.verbose:
445
481
  logger.info(f'saved snapshot to {file}')
@@ -514,6 +550,15 @@ class RV(ISSUES, REPORTS):
514
550
  def instrument_array(self):
515
551
  return np.concatenate([[i] * n for i, n in self.NN.items()])
516
552
 
553
+ def _instrument_mask(self, instrument):
554
+ if isinstance(instrument, str):
555
+ return np.char.find(self.instrument_array, instrument) == 0
556
+ elif isinstance(instrument, (list, tuple, np.ndarray)):
557
+ m = np.full_like(self.time, False, dtype=bool)
558
+ for i in instrument:
559
+ m |= np.char.find(self.instrument_array, i) == 0
560
+ return m
561
+
517
562
  @property
518
563
  def rms(self) -> float:
519
564
  """ Weighted rms of the (masked) radial velocities """
@@ -540,6 +585,11 @@ class RV(ISSUES, REPORTS):
540
585
  def _mtime_sorter(self):
541
586
  return np.argsort(self.mtime)
542
587
 
588
+ @property
589
+ def timespan(self):
590
+ """ Total time span of the (masked) observations """
591
+ return np.ptp(self.mtime)
592
+
543
593
  def _index_from_instrument_index(self, index, instrument):
544
594
  ind = np.where(self.instrument_array == instrument)[0]
545
595
  return ind[getattr(self, instrument).mask][index]
@@ -633,22 +683,28 @@ class RV(ISSUES, REPORTS):
633
683
  import pickle
634
684
  from datetime import datetime
635
685
  if star is None:
636
- assert file.endswith('.pkl'), 'expected a .pkl file'
637
- star, timestamp = file.replace('.pkl', '').split('_')
686
+ assert file.endswith(('.pkl', '.pkl.gz')), 'expected a .pkl file'
687
+ basefile = os.path.basename(file)
688
+ star, timestamp = basefile.replace('.pkl.gz', '').replace('.pkl', '').split('_')
638
689
  else:
639
690
  try:
640
- file = sorted(glob(f'{star}_*.*.pkl'))[-1]
691
+ file = sorted(glob(f'{star}_*.*.pkl*'))[-1]
641
692
  except IndexError:
642
693
  raise ValueError(f'cannot find any file matching {star}_*.pkl')
643
- star, timestamp = file.replace('.pkl', '').split('_')
694
+ star, timestamp = file.replace('.pkl.gz', '').replace('.pkl', '').split('_')
644
695
 
645
696
  dt = datetime.fromtimestamp(float(timestamp))
646
697
  if verbose:
647
698
  logger.info(f'reading snapshot of {star} from {dt}')
648
699
 
649
- s = pickle.load(open(file, 'rb'))
700
+ with open(file, 'rb') as f:
701
+ if file.endswith('.gz'):
702
+ import compress_pickle as pickle
703
+ s = pickle.load(f)
704
+
650
705
  if isinstance(s, tuple) and len(s) == 2:
651
706
  s, _metadata = s
707
+
652
708
  s._snapshot = file
653
709
  return s
654
710
 
@@ -1508,7 +1564,7 @@ class RV(ISSUES, REPORTS):
1508
1564
  """ Remove all observations that satisfy a condition
1509
1565
 
1510
1566
  Args:
1511
- condition (np.ndarray):
1567
+ condition (ndarray):
1512
1568
  Boolean array of the same length as the observations
1513
1569
  """
1514
1570
  if self.verbose:
@@ -1668,16 +1724,18 @@ class RV(ISSUES, REPORTS):
1668
1724
  self._propagate_mask_changes()
1669
1725
 
1670
1726
 
1671
- def _propagate_mask_changes(self):
1727
+ def _propagate_mask_changes(self, _remove_instrument=True):
1672
1728
  """ link self.mask with each self.`instrument`.mask """
1673
1729
  masked = np.where(~self.mask)[0]
1674
1730
  for m in masked:
1675
1731
  inst = self.instruments[self.obs[m] - 1]
1676
1732
  n_before = (self.obs < self.obs[m]).sum()
1677
1733
  getattr(self, inst).mask[m - n_before] = False
1678
- for inst in self.instruments:
1679
- if getattr(self, inst).mtime.size == 0:
1680
- self.remove_instrument(inst, strict=True)
1734
+ if _remove_instrument:
1735
+ instruments = copy(self.instruments)
1736
+ for inst in instruments:
1737
+ if getattr(self, inst).mtime.size == 0:
1738
+ self.remove_instrument(inst, strict=True)
1681
1739
 
1682
1740
  def secular_acceleration(self, epoch=None, just_compute=False, force_simbad=False):
1683
1741
  """
@@ -1695,9 +1753,12 @@ class RV(ISSUES, REPORTS):
1695
1753
  force_simbad (bool, optional):
1696
1754
  Use Simbad proper motions even if Gaia is available
1697
1755
  """
1698
- if self._did_secular_acceleration and not just_compute: # don't do it twice
1756
+ # don't do it twice
1757
+ if self._did_secular_acceleration and not just_compute:
1699
1758
  return
1700
1759
 
1760
+ from astropy import units
1761
+
1701
1762
  #as_yr = units.arcsec / units.year
1702
1763
  mas_yr = units.milliarcsecond / units.year
1703
1764
  mas = units.milliarcsecond
@@ -1829,15 +1890,21 @@ class RV(ISSUES, REPORTS):
1829
1890
 
1830
1891
  self._did_secular_acceleration = False
1831
1892
 
1832
- def sigmaclip(self, sigma=5, instrument=None, strict=True):
1893
+ def sigmaclip(self, sigma=5, quantity='vrad', instrument=None,
1894
+ strict=True):
1833
1895
  """
1834
- Sigma-clip RVs (per instrument!), by MAD away from the median.
1896
+ Sigma-clip RVs or other quantities (per instrument!), by MAD away from
1897
+ the median.
1835
1898
 
1836
1899
  Args:
1837
1900
  sigma (float):
1838
- Number of MADs to clip
1901
+ Number of MADs away from the median
1902
+ quantity (str):
1903
+ Quantity to sigma-clip (by default the RVs)
1839
1904
  instrument (str, list):
1840
1905
  Instrument(s) to sigma-clip
1906
+ strict (bool):
1907
+ Passed directly to self._check_instrument
1841
1908
  """
1842
1909
  #from scipy.stats import sigmaclip as dosigmaclip
1843
1910
  from .stats import sigmaclip_median as dosigmaclip
@@ -1846,20 +1913,26 @@ class RV(ISSUES, REPORTS):
1846
1913
  return
1847
1914
 
1848
1915
  instruments = self._check_instrument(instrument, strict)
1916
+ if instruments is None:
1917
+ return
1849
1918
  changed_instruments = []
1850
1919
 
1851
1920
  for inst in instruments:
1852
1921
  m = self.instrument_array == inst
1853
- result = dosigmaclip(self.vrad[m], low=sigma, high=sigma)
1922
+ d = getattr(self, quantity)
1923
+
1924
+ if np.isnan(d[m]).all():
1925
+ continue
1926
+
1927
+ result = dosigmaclip(d[m], low=sigma, high=sigma)
1854
1928
  # n = self.vrad[m].size - result.clipped.size
1855
1929
 
1856
- ind = m & self.mask & \
1857
- ((self.vrad < result.lower) | (self.vrad > result.upper))
1930
+ ind = m & self.mask & ((d < result.lower) | (d > result.upper))
1858
1931
  n = ind.sum()
1859
1932
 
1860
1933
  if self.verbose and n > 0:
1861
1934
  s = 's' if (n == 0 or n > 1) else ''
1862
- logger.warning(f'sigma-clip RVs will remove {n} point{s} for {inst}')
1935
+ logger.warning(f'sigma-clip {quantity} will remove {n} point{s} for {inst}')
1863
1936
 
1864
1937
  if n > 0:
1865
1938
  self.mask[ind] = False
@@ -1884,21 +1957,32 @@ class RV(ISSUES, REPORTS):
1884
1957
  if config.return_self:
1885
1958
  return self
1886
1959
 
1887
- def clip_maxerror(self, maxerror:float):
1888
- """ Mask out points with RV error larger than a given value
1960
+ def clip_maxerror(self, maxerror:float, instrument=None):
1961
+ """
1962
+ Mask out points with RV error larger than a given value. If `instrument`
1963
+ is given, mask only observations from that instrument.
1889
1964
 
1890
1965
  Args:
1891
1966
  maxerror (float): Maximum error to keep.
1967
+ instrument (str, list, tuple, ndarray): Instrument(s) to clip
1892
1968
  """
1893
1969
  if self._child:
1894
1970
  return
1895
1971
 
1896
1972
  self.maxerror = maxerror
1973
+
1974
+ if instrument is None:
1975
+ inst_mask = np.ones_like(self.svrad, dtype=bool)
1976
+ else:
1977
+ inst_mask = self._instrument_mask(instrument)
1978
+
1897
1979
  above = self.svrad > maxerror
1898
- n = above.sum()
1899
- self.mask[above] = False
1980
+ old_mask = self.mask.copy()
1981
+
1982
+ self.mask[inst_mask & above] = False
1900
1983
 
1901
1984
  if self.verbose and above.sum() > 0:
1985
+ n = (above[inst_mask] & old_mask[inst_mask]).sum()
1902
1986
  s = 's' if (n == 0 or n > 1) else ''
1903
1987
  logger.warning(f'clip_maxerror ({maxerror} {self.units}) removed {n} point' + s)
1904
1988
 
@@ -1906,6 +1990,36 @@ class RV(ISSUES, REPORTS):
1906
1990
  if config.return_self:
1907
1991
  return self
1908
1992
 
1993
+ def sigmaclip_ew(self, sigma=5):
1994
+ """ Sigma-clip EW (FWHM x contrast), by MAD away from the median """
1995
+ from .stats import sigmaclip_median as dosigmaclip, weighted_median
1996
+
1997
+ S = deepcopy(self)
1998
+ for _s in S:
1999
+ m = _s.mask
2000
+ _s.fwhm -= weighted_median(_s.fwhm[m], 1 / _s.fwhm_err[m])
2001
+ _s.contrast -= weighted_median(_s.contrast[m], 1 / _s.contrast_err[m])
2002
+ S._build_arrays()
2003
+ ew = S.fwhm * S.contrast
2004
+ ew_err = np.hypot(S.fwhm_err * S.contrast, S.fwhm * S.contrast_err)
2005
+
2006
+ wmed = weighted_median(ew[S.mask], 1 / ew_err[S.mask])
2007
+ data = (ew - wmed) / ew_err
2008
+ result = dosigmaclip(data, low=sigma, high=sigma)
2009
+ ind = (data < result.lower) | (data > result.upper)
2010
+ self.mask[ind] = False
2011
+
2012
+ if self.verbose and ind.sum() > 0:
2013
+ n = ind.sum()
2014
+ s = 's' if (n == 0 or n > 1) else ''
2015
+ logger.warning(f'sigmaclip_ew removed {n} point' + s)
2016
+
2017
+ self._propagate_mask_changes()
2018
+ if config.return_self:
2019
+ return self
2020
+
2021
+
2022
+
1909
2023
  def bin(self):
1910
2024
  """
1911
2025
  Nightly bin the observations.
@@ -1949,7 +2063,8 @@ class RV(ISSUES, REPORTS):
1949
2063
 
1950
2064
  # treat ccf_mask specially, doing a 'unique' bin
1951
2065
  if q == 'ccf_mask':
1952
- setattr(s, q, bin_ccf_mask(s.mtime, getattr(s, q)))
2066
+ ccf_mask = getattr(s, q)[s.mask]
2067
+ setattr(s, q, bin_ccf_mask(s.mtime, ccf_mask))
1953
2068
  continue
1954
2069
 
1955
2070
  if Q.dtype != np.float64:
@@ -2317,7 +2432,7 @@ class RV(ISSUES, REPORTS):
2317
2432
  self.units = new_units
2318
2433
 
2319
2434
 
2320
- def put_at_systemic_velocity(self, factor=1.0):
2435
+ def put_at_systemic_velocity(self, factor=1.0, ignore=None):
2321
2436
  """
2322
2437
  For instruments in which mean(RV) < `factor` * ptp(RV), "move" RVs to
2323
2438
  the systemic velocity from simbad. This is useful if some instruments
@@ -2327,6 +2442,9 @@ class RV(ISSUES, REPORTS):
2327
2442
  """
2328
2443
  changed = False
2329
2444
  for inst in self.instruments:
2445
+ if ignore is not None:
2446
+ if inst in ignore or any([i in inst for i in ignore]):
2447
+ continue
2330
2448
  changed_inst = False
2331
2449
  s = getattr(self, inst)
2332
2450
  if s.mask.any():
@@ -2360,6 +2478,11 @@ class RV(ISSUES, REPORTS):
2360
2478
  self.instruments = sorted(self.instruments, key=lambda i: getattr(self, i).time.max())
2361
2479
  self._build_arrays()
2362
2480
 
2481
+ def put_instrument_last(self, instrument):
2482
+ if not self._check_instrument(instrument, strict=True, log=True):
2483
+ return
2484
+ self.instruments = [i for i in self.instruments if i != instrument] + [instrument]
2485
+ self._build_arrays()
2363
2486
 
2364
2487
  def save(self, directory=None, instrument=None, format='rdb',
2365
2488
  indicators=False, join_instruments=False, postfix=None,
@@ -2383,7 +2506,7 @@ class RV(ISSUES, REPORTS):
2383
2506
  Postfix to add to the filenames ([star]_[instrument]_[postfix].rdb).
2384
2507
  save_masked (bool, optional)
2385
2508
  If True, also save masked observations (those for which
2386
- self.mask == True)
2509
+ self.mask == False)
2387
2510
  save_nans (bool, optional)
2388
2511
  Whether to save NaN values in the indicators, if they exist. If
2389
2512
  False, the full observation which contains NaN values is not saved.
arvi/utils.py CHANGED
@@ -2,6 +2,8 @@ import os
2
2
  import sys
3
3
  import time
4
4
  from contextlib import contextmanager
5
+ from functools import partial
6
+ from collections import defaultdict
5
7
 
6
8
  try:
7
9
  from unittest.mock import patch
@@ -68,6 +70,50 @@ def all_logging_disabled():
68
70
  logging.disable(previous_level)
69
71
 
70
72
 
73
+ class record_removals:
74
+ def __init__(self, s, storage=None):
75
+ """
76
+ A simple context manager to record removed files
77
+
78
+ Args:
79
+ s (RV):
80
+ An `RV` object
81
+ storage (dict):
82
+ A dictionary to store the removed files, with keys 'raw_file'
83
+ and 'reason' as lists.
84
+
85
+ Examples:
86
+ >>> with record_removals(s) as rec:
87
+ : s.remove_instrument('HARPS')
88
+ : rec.store('removed HARPS')
89
+ >>> rec.storage
90
+ """
91
+ self.s = s
92
+ if storage is None:
93
+ self.storage = defaultdict(list)
94
+ else:
95
+ if 'raw_file' not in storage:
96
+ storage['raw_file'] = []
97
+ if 'reason' not in storage:
98
+ storage['reason'] = []
99
+ self.storage = storage
100
+ self.raw_file_start = self.s.raw_file.copy()
101
+
102
+ def store(self, reason):
103
+ missing = ~ np.isin(self.raw_file_start, self.s.raw_file[self.s.mask])
104
+ if missing.any():
105
+ lost = self.raw_file_start[missing]
106
+ self.storage['raw_file'].extend(lost)
107
+ self.storage['reason'].extend(len(lost) * [reason])
108
+ self.raw_file_start = self.s.raw_file[self.s.mask].copy()
109
+
110
+ def __enter__(self):
111
+ return self
112
+
113
+ def __exit__(self, exc_type, exc_value, traceback):
114
+ pass
115
+
116
+
71
117
  @contextmanager
72
118
  def timer(name=None):
73
119
  """ A simple context manager to time a block of code """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.2.9
3
+ Version: 0.2.11
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -4,35 +4,36 @@ arvi/ariadne_wrapper.py,sha256=YvilopJa9T4NwPcj3Nah_U8smSeSAU5-HYZMb_GJ-BQ,2232
4
4
  arvi/berv.py,sha256=eKnpuPC1w45UrUEyFRbs9F9j3bXz3kxYzNXbnRgvFQM,17596
5
5
  arvi/binning.py,sha256=NK9y9bUrdyWCbh79LkcRABHG-n5MtlETMHMvLj1z-OM,15437
6
6
  arvi/config.py,sha256=JkHSwF-EEqwwbcc8thGgbFc9udDZPjQH-9XFjqDepBY,2337
7
- arvi/dace_wrapper.py,sha256=08hNZMCt2Kd9x8BvJHZHml4cDCcbBsY1lBheuXk9yGk,25952
7
+ arvi/dace_wrapper.py,sha256=i3aIRKSc2MylEzM5WbbU-BnuLbz8JlG1ez6sK_vvQH0,26125
8
8
  arvi/exofop_wrapper.py,sha256=8S7UEcrBAgANIweMV0-CvaWaVTPgGVo8vQQk_KRa0nU,2414
9
9
  arvi/extra_data.py,sha256=Xi65pI5kkzqlMmHGl9xFoumtH699611pJJ5PV-a_IfU,3397
10
10
  arvi/gaia_wrapper.py,sha256=HTuigIduin3raWfSC7QYuQxDk2dEXYH_4egRkzzg7Xw,4379
11
11
  arvi/headers.py,sha256=uvdJebw1M5YkGjE3vJJwYBOnLikib75uuZE9FXB5JJM,1673
12
- arvi/instrument_specific.py,sha256=ycLhtT3oeNtSREm9bmWICaT8uureYcl3NFzbdDYRMVY,11624
12
+ arvi/instrument_specific.py,sha256=94oMb6UeH6tp7H8YXnXHpxEhIz2evz0iYsT_HrNOCTo,12105
13
+ arvi/kepmodel_wrapper.py,sha256=mmHudetAZ4cBxKDwzQzgUydzkjhomCWw5VVuyiKfXq8,10288
13
14
  arvi/kima_wrapper.py,sha256=GrAZWkDCg8ukhW41M1VTadSbab0GBa6BIzjtAtvjk58,3891
14
15
  arvi/lbl_wrapper.py,sha256=_ViGVkpakvuBR_xhu9XJRV5EKHpj5Go6jBZGJZMIS2Y,11850
15
- arvi/nasaexo_wrapper.py,sha256=mWt7eHgSZe4MBKCmUvMPTyUPGuiwGTqKugNBvmjOg9s,7306
16
+ arvi/nasaexo_wrapper.py,sha256=ZKY3IUClqsJuysxDv0Gu51EnzMX7101zQb7UQy_urhI,7431
16
17
  arvi/plots.py,sha256=U4VUNyIx4h_rEFd7ZWgBcawUcIGcURES0A4VXIBKp3U,35240
17
18
  arvi/programs.py,sha256=M8o8hXr6W22dMiIX3Nxz4pgb8lsJXApDlq7HStyTfqs,9047
18
- arvi/reports.py,sha256=CKmtg5rewMyT26gbWeoZDYrL0z5Sbb6cTJry0HWk_rs,7445
19
+ arvi/reports.py,sha256=a38EZNhyGoSSzJh63wBQCAt3_xhqbpVGcDOXaZWTLXs,11127
19
20
  arvi/setup_logger.py,sha256=dHzO2gPjw6CaKWpYZd2f83z09tmxgi--qpp7k1jROjI,615
20
21
  arvi/simbad_wrapper.py,sha256=uZc8mcfNijXsQi29LReRTabZb2hRPhYdLsDLMgq1OEI,9927
21
22
  arvi/sophie_wrapper.py,sha256=KUeWccXud5_Lrx72S1HSemHIZRdjd2oLvqyofwsL0QQ,3440
22
23
  arvi/spectra.py,sha256=ebF1ocodTastLx0CyqLSpE8EZNDXBF8riyfxMr3L6H0,7491
23
- arvi/stats.py,sha256=ilzzGL9ew-SyVa9eEdrYCpD3DliOAwhoNUg9LIlHjzU,2583
24
+ arvi/stats.py,sha256=gvMkKzP83AV8_Oi71JHmA8QH8Y1z1viYykV9ELVDqZI,3547
24
25
  arvi/stellar.py,sha256=GQ7yweuBRnfkJ0M5eWjvLd8uvGq_by81PbXfidBvWis,4918
25
- arvi/timeseries.py,sha256=9-EjmhMFMo8IJ_Erqf5SnEeKi4J2x0Pt518V30jQ8bw,100162
26
+ arvi/timeseries.py,sha256=suXPLmFlNbnFbfixXufNMwqwZWz-FwKdckJ1x2PtrXQ,104664
26
27
  arvi/translations.py,sha256=PUSrn4zvYO2MqGzUxlFGwev_tBkgJaJrIYs6NKHzbWo,951
27
- arvi/utils.py,sha256=x_zDTW1vp672CZe-m9-KXo5IVk-JKOs2wme_ta4t8MU,9402
28
+ arvi/utils.py,sha256=MuAgjyXr297Sm_T6QmB1riVUktyT9ud1qngGMgKlXMc,10863
28
29
  arvi/data/info.svg,sha256=0IMI6W-eFoTD8acnury79WJJakpBwLa4qKS4JWpsXiI,489
29
30
  arvi/data/obs_affected_ADC_issues.dat,sha256=tn93uOL0eCTYhireqp1wG-_c3CbxPA7C-Rf-pejVY8M,10853
30
31
  arvi/data/obs_affected_blue_cryostat_issues.dat,sha256=z4AK17xfz8tGTDv1FjRvQFnio4XA6PNNfDXuicewHk4,1771
31
32
  arvi/data/extra/HD86226_PFS1.rdb,sha256=vfAozbrKHM_j8dYkCBJsuHyD01KEM1asghe2KInwVao,3475
32
33
  arvi/data/extra/HD86226_PFS2.rdb,sha256=F2P7dB6gVyzCglUjNheB0hIHVClC5RmARrGwbrY1cfo,4114
33
34
  arvi/data/extra/metadata.json,sha256=C69hIw6CohyES6BI9vDWjxwSz7N4VOYX0PCgjXtYFmU,178
34
- arvi-0.2.9.dist-info/licenses/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
35
- arvi-0.2.9.dist-info/METADATA,sha256=KLtdToTvrCF-TT0P04dctgqKRHH4eTOL47hspjj8qaU,1932
36
- arvi-0.2.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
37
- arvi-0.2.9.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
38
- arvi-0.2.9.dist-info/RECORD,,
35
+ arvi-0.2.11.dist-info/licenses/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
36
+ arvi-0.2.11.dist-info/METADATA,sha256=G2yojj2vhNVuEPtoY81O3aWPvgMMxNw5ax4_40zVhb8,1933
37
+ arvi-0.2.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
38
+ arvi-0.2.11.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
39
+ arvi-0.2.11.dist-info/RECORD,,
File without changes