arvi 0.2.9__tar.gz → 0.2.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arvi might be problematic. Click here for more details.

Files changed (65) hide show
  1. {arvi-0.2.9 → arvi-0.2.10}/.github/workflows/docs-gh-pages.yml +1 -1
  2. {arvi-0.2.9 → arvi-0.2.10}/.github/workflows/install.yml +1 -1
  3. {arvi-0.2.9 → arvi-0.2.10}/.github/workflows/python-publish.yml +2 -2
  4. {arvi-0.2.9/arvi.egg-info → arvi-0.2.10}/PKG-INFO +1 -1
  5. {arvi-0.2.9 → arvi-0.2.10}/arvi/dace_wrapper.py +2 -1
  6. {arvi-0.2.9 → arvi-0.2.10}/arvi/instrument_specific.py +23 -9
  7. arvi-0.2.10/arvi/kepmodel_wrapper.py +296 -0
  8. {arvi-0.2.9 → arvi-0.2.10}/arvi/nasaexo_wrapper.py +7 -3
  9. {arvi-0.2.9 → arvi-0.2.10}/arvi/reports.py +108 -1
  10. {arvi-0.2.9 → arvi-0.2.10}/arvi/stats.py +30 -5
  11. {arvi-0.2.9 → arvi-0.2.10}/arvi/timeseries.py +162 -42
  12. {arvi-0.2.9 → arvi-0.2.10/arvi.egg-info}/PKG-INFO +1 -1
  13. {arvi-0.2.9 → arvi-0.2.10}/arvi.egg-info/SOURCES.txt +1 -0
  14. {arvi-0.2.9 → arvi-0.2.10}/.github/dependabot.yml +0 -0
  15. {arvi-0.2.9 → arvi-0.2.10}/.gitignore +0 -0
  16. {arvi-0.2.9 → arvi-0.2.10}/LICENSE +0 -0
  17. {arvi-0.2.9 → arvi-0.2.10}/README.md +0 -0
  18. {arvi-0.2.9 → arvi-0.2.10}/arvi/HZ.py +0 -0
  19. {arvi-0.2.9 → arvi-0.2.10}/arvi/__init__.py +0 -0
  20. {arvi-0.2.9 → arvi-0.2.10}/arvi/ariadne_wrapper.py +0 -0
  21. {arvi-0.2.9 → arvi-0.2.10}/arvi/berv.py +0 -0
  22. {arvi-0.2.9 → arvi-0.2.10}/arvi/binning.py +0 -0
  23. {arvi-0.2.9 → arvi-0.2.10}/arvi/config.py +0 -0
  24. {arvi-0.2.9 → arvi-0.2.10}/arvi/data/extra/HD86226_PFS1.rdb +0 -0
  25. {arvi-0.2.9 → arvi-0.2.10}/arvi/data/extra/HD86226_PFS2.rdb +0 -0
  26. {arvi-0.2.9 → arvi-0.2.10}/arvi/data/extra/metadata.json +0 -0
  27. {arvi-0.2.9 → arvi-0.2.10}/arvi/data/info.svg +0 -0
  28. {arvi-0.2.9 → arvi-0.2.10}/arvi/data/obs_affected_ADC_issues.dat +0 -0
  29. {arvi-0.2.9 → arvi-0.2.10}/arvi/data/obs_affected_blue_cryostat_issues.dat +0 -0
  30. {arvi-0.2.9 → arvi-0.2.10}/arvi/exofop_wrapper.py +0 -0
  31. {arvi-0.2.9 → arvi-0.2.10}/arvi/extra_data.py +0 -0
  32. {arvi-0.2.9 → arvi-0.2.10}/arvi/gaia_wrapper.py +0 -0
  33. {arvi-0.2.9 → arvi-0.2.10}/arvi/headers.py +0 -0
  34. {arvi-0.2.9 → arvi-0.2.10}/arvi/kima_wrapper.py +0 -0
  35. {arvi-0.2.9 → arvi-0.2.10}/arvi/lbl_wrapper.py +0 -0
  36. {arvi-0.2.9 → arvi-0.2.10}/arvi/plots.py +0 -0
  37. {arvi-0.2.9 → arvi-0.2.10}/arvi/programs.py +0 -0
  38. {arvi-0.2.9 → arvi-0.2.10}/arvi/setup_logger.py +0 -0
  39. {arvi-0.2.9 → arvi-0.2.10}/arvi/simbad_wrapper.py +0 -0
  40. {arvi-0.2.9 → arvi-0.2.10}/arvi/sophie_wrapper.py +0 -0
  41. {arvi-0.2.9 → arvi-0.2.10}/arvi/spectra.py +0 -0
  42. {arvi-0.2.9 → arvi-0.2.10}/arvi/stellar.py +0 -0
  43. {arvi-0.2.9 → arvi-0.2.10}/arvi/translations.py +0 -0
  44. {arvi-0.2.9 → arvi-0.2.10}/arvi/utils.py +0 -0
  45. {arvi-0.2.9 → arvi-0.2.10}/arvi.egg-info/dependency_links.txt +0 -0
  46. {arvi-0.2.9 → arvi-0.2.10}/arvi.egg-info/requires.txt +0 -0
  47. {arvi-0.2.9 → arvi-0.2.10}/arvi.egg-info/top_level.txt +0 -0
  48. {arvi-0.2.9 → arvi-0.2.10}/docs/API.md +0 -0
  49. {arvi-0.2.9 → arvi-0.2.10}/docs/detailed.ipynb +0 -0
  50. {arvi-0.2.9 → arvi-0.2.10}/docs/downloading_data.md +0 -0
  51. {arvi-0.2.9 → arvi-0.2.10}/docs/index.md +0 -0
  52. {arvi-0.2.9 → arvi-0.2.10}/docs/logo/detective.png +0 -0
  53. {arvi-0.2.9 → arvi-0.2.10}/docs/logo/logo.png +0 -0
  54. {arvi-0.2.9 → arvi-0.2.10}/docs/stylesheets/extra.css +0 -0
  55. {arvi-0.2.9 → arvi-0.2.10}/mkdocs.yml +0 -0
  56. {arvi-0.2.9 → arvi-0.2.10}/pyproject.toml +0 -0
  57. {arvi-0.2.9 → arvi-0.2.10}/setup.cfg +0 -0
  58. {arvi-0.2.9 → arvi-0.2.10}/setup.py +0 -0
  59. {arvi-0.2.9 → arvi-0.2.10}/tests/HD10700-Bcor_ESPRESSO18.rdb +0 -0
  60. {arvi-0.2.9 → arvi-0.2.10}/tests/test_binning.py +0 -0
  61. {arvi-0.2.9 → arvi-0.2.10}/tests/test_config.py +0 -0
  62. {arvi-0.2.9 → arvi-0.2.10}/tests/test_create_RV.py +0 -0
  63. {arvi-0.2.9 → arvi-0.2.10}/tests/test_import_object.py +0 -0
  64. {arvi-0.2.9 → arvi-0.2.10}/tests/test_simbad.py +0 -0
  65. {arvi-0.2.9 → arvi-0.2.10}/tests/test_stats.py +0 -0
@@ -32,7 +32,7 @@ jobs:
32
32
  uses: actions/configure-pages@v5
33
33
 
34
34
  - name: Set up Python
35
- uses: actions/setup-python@v5
35
+ uses: actions/setup-python@v6
36
36
  with:
37
37
  python-version: "3.10"
38
38
 
@@ -18,7 +18,7 @@ jobs:
18
18
  steps:
19
19
  - uses: actions/checkout@v4
20
20
  - name: Set up Python ${{ matrix.python-version }}
21
- uses: actions/setup-python@v5
21
+ uses: actions/setup-python@v6
22
22
  with:
23
23
  python-version: ${{ matrix.python-version }}
24
24
 
@@ -24,7 +24,7 @@ jobs:
24
24
  steps:
25
25
  - uses: actions/checkout@v4
26
26
  - name: Set up Python
27
- uses: actions/setup-python@v5
27
+ uses: actions/setup-python@v6
28
28
  with:
29
29
  python-version: '3.x'
30
30
  - name: Install dependencies
@@ -34,7 +34,7 @@ jobs:
34
34
  - name: Build package
35
35
  run: python -m build
36
36
  - name: Publish package
37
- uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
37
+ uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e
38
38
  with:
39
39
  user: __token__
40
40
  password: ${{ secrets.PYPI_API_TOKEN }}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.2.9
3
+ Version: 0.2.10
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -193,12 +193,13 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
193
193
  except TypeError:
194
194
  msg = f'no {instrument} observations for {star}'
195
195
  raise ValueError(msg) from None
196
+
196
197
  if (isinstance(instrument, str)):
197
198
  filters = {
198
199
  "ins_name": {"contains": [instrument]},
199
200
  "obj_id_daceid": {"contains": [dace_id]}
200
201
  }
201
- elif (isinstance(instrument, list)):
202
+ elif (isinstance(instrument, (list, tuple, np.ndarray))):
202
203
  filters = {
203
204
  "ins_name": {"contains": instrument},
204
205
  "obj_id_daceid": {"contains": [dace_id]}
@@ -123,7 +123,10 @@ def HARPS_commissioning(self, mask=True, plot=True):
123
123
  if check(self, 'HARPS') is None:
124
124
  return
125
125
 
126
- affected = self.time < HARPS_start
126
+ affected = np.logical_and(
127
+ self.instrument_array == 'HARPS03',
128
+ self.time < HARPS_start
129
+ )
127
130
  total_affected = affected.sum()
128
131
 
129
132
  if self.verbose:
@@ -133,7 +136,7 @@ def HARPS_commissioning(self, mask=True, plot=True):
133
136
 
134
137
  if mask:
135
138
  self.mask[affected] = False
136
- self._propagate_mask_changes()
139
+ self._propagate_mask_changes(_remove_instrument=False)
137
140
 
138
141
  if plot:
139
142
  self.plot(show_masked=True)
@@ -155,7 +158,14 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
155
158
  if check(self, 'HARPS') is None:
156
159
  return
157
160
 
158
- affected = (self.time >= HARPS_technical_intervention_range[0]) & (self.time <= HARPS_technical_intervention_range[1])
161
+ affected = np.logical_and(
162
+ self.time >= HARPS_technical_intervention_range[0],
163
+ self.time <= HARPS_technical_intervention_range[1]
164
+ )
165
+ affected = np.logical_and(
166
+ affected,
167
+ np.char.find(self.instrument_array, 'HARPS') == 0
168
+ )
159
169
  total_affected = affected.sum()
160
170
 
161
171
  if self.verbose:
@@ -165,7 +175,7 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
165
175
 
166
176
  if mask:
167
177
  self.mask[affected] = False
168
- self._propagate_mask_changes()
178
+ self._propagate_mask_changes(_remove_instrument=False)
169
179
 
170
180
  if plot:
171
181
  self.plot(show_masked=True)
@@ -187,7 +197,10 @@ def ESPRESSO_commissioning(self, mask=True, plot=True):
187
197
  if check(self, 'ESPRESSO') is None:
188
198
  return
189
199
 
190
- affected = self.time < ESPRESSO_start
200
+ affected = np.logical_and(
201
+ self.instrument_array == 'ESPRESSO18',
202
+ self.time < ESPRESSO_start
203
+ )
191
204
  total_affected = affected.sum()
192
205
 
193
206
  if self.verbose:
@@ -197,7 +210,7 @@ def ESPRESSO_commissioning(self, mask=True, plot=True):
197
210
 
198
211
  if mask:
199
212
  self.mask[affected] = False
200
- self._propagate_mask_changes()
213
+ self._propagate_mask_changes(_remove_instrument=False)
201
214
 
202
215
  if plot and total_affected > 0:
203
216
  self.plot(show_masked=True)
@@ -246,7 +259,7 @@ def ADC_issues(self, mask=True, plot=True, check_headers=False):
246
259
 
247
260
  if mask:
248
261
  self.mask[intersect] = False
249
- self._propagate_mask_changes()
262
+ self._propagate_mask_changes(_remove_instrument=False)
250
263
 
251
264
  if plot:
252
265
  self.plot(show_masked=True)
@@ -282,7 +295,7 @@ def blue_cryostat_issues(self, mask=True, plot=True):
282
295
 
283
296
  if mask:
284
297
  self.mask[intersect] = False
285
- self._propagate_mask_changes()
298
+ self._propagate_mask_changes(_remove_instrument=False)
286
299
 
287
300
  if plot:
288
301
  self.plot(show_masked=True)
@@ -330,7 +343,7 @@ def qc_scired_issues(self, plot=False, **kwargs):
330
343
  return
331
344
 
332
345
  self.mask[affected] = False
333
- self._propagate_mask_changes()
346
+ self._propagate_mask_changes(_remove_instrument=False)
334
347
 
335
348
  if plot:
336
349
  self.plot(show_masked=True)
@@ -364,6 +377,7 @@ class ISSUES:
364
377
  logger.error('are the data binned? cannot proceed to mask these points...')
365
378
 
366
379
  results = list(filter(lambda x: x is not None, results))
380
+ self._propagate_mask_changes()
367
381
 
368
382
  try:
369
383
  return np.logical_or.reduce(results)
@@ -0,0 +1,296 @@
1
+ import io
2
+ from contextlib import redirect_stdout, contextmanager
3
+ from string import ascii_lowercase
4
+ from matplotlib import pyplot as plt
5
+ import numpy as np
6
+
7
+ from kepmodel.rv import RvModel
8
+ from spleaf.cov import merge_series
9
+ from spleaf.term import Error, InstrumentJitter
10
+
11
+ from .setup_logger import setup_logger
12
+ from .utils import timer, adjust_lightness
13
+
14
+
15
+ class model:
16
+ logger = setup_logger()
17
+ # periodogram settings
18
+ Pmin, Pmax, nfreq = 1.5, 10_000, 100_000
19
+
20
+ @contextmanager
21
+ def ew(self):
22
+ for name in self.model.keplerian:
23
+ self.model.set_keplerian_param(name, param=['P', 'la0', 'K', 'e', 'w'])
24
+ try:
25
+ yield
26
+ finally:
27
+ for name in self.model.keplerian:
28
+ self.model.set_keplerian_param(name, param=['P', 'la0', 'K', 'esinw', 'ecosw'])
29
+
30
+ @property
31
+ def nu0(self):
32
+ return 2 * np.pi / self.Pmax
33
+
34
+ @property
35
+ def dnu(self):
36
+ return (2 * np.pi / self.Pmin - self.nu0) / (self.nfreq - 1)
37
+
38
+ def __init__(self, s):
39
+ self.s = s
40
+ self.instruments = s.instruments
41
+ self.Pmax = 2 * np.ptp(s.time)
42
+ ts = self.ts = self.s._mtime_sorter
43
+
44
+ # t, y_ ye, series_index = merge_series(
45
+ # [_s.mtime for _s in s],
46
+ # [_s.mvrad for _s in s],
47
+ # [_s.msvrad for _s in s],
48
+ # )
49
+
50
+ inst_jit = self._get_jitters()
51
+
52
+ self.model = RvModel(self.s.mtime[ts], self.s.mvrad[ts],
53
+ err=Error(self.s.msvrad[ts]), **inst_jit)
54
+ self.np = 0
55
+ self._add_means()
56
+
57
+ def _add_means(self):
58
+ for inst in self.s.instruments:
59
+ # if inst not in self.s.instrument_array[self.s.mask]:
60
+ # continue
61
+ mask = self.s.instrument_array[self.s.mask][self.ts] == inst
62
+ self.model.add_lin(
63
+ derivative=1.0 * mask,
64
+ name=f"offset_{inst}",
65
+ value=getattr(self.s, inst).mvrad.mean(),
66
+ )
67
+ self.model.fit_lin()
68
+
69
+ def _get_jitters(self):
70
+ inst_jit = {}
71
+ for inst in self.s.instruments:
72
+ inst_jit[f'jit_{inst}'] = InstrumentJitter(
73
+ indices=self.s.instrument_array[self.s.mask] == inst,
74
+ sig=self.s.svrad[self.s.mask].min()
75
+ )
76
+ return inst_jit
77
+
78
+ def _set_jitters(self, value=0.0):
79
+ for par in self.model.cov.param:
80
+ if 'jit' in par:
81
+ self.model.set_param(value, f'cov.{par}')
82
+ # self.model.fit()
83
+
84
+ def _equal_coralie_offsets(self):
85
+ if self.s._check_instrument('CORALIE') is None:
86
+ return
87
+ mask = np.char.find(self.s.instrument_array, 'CORALIE') == 0
88
+ mean = self.s.vrad[mask].mean()
89
+ for inst in self.instruments:
90
+ if 'CORALIE' in inst:
91
+ self.model.set_param(mean, f'lin.offset_{inst}')
92
+
93
+
94
+ def __repr__(self):
95
+ with self.ew():
96
+ with io.StringIO() as buf, redirect_stdout(buf):
97
+ self.model.show_param()
98
+ output = buf.getvalue()
99
+ return output
100
+
101
+ def to_table(self, **kwargs):
102
+ from .utils import pretty_print_table
103
+ lines = repr(self)
104
+ lines = lines.replace(' [deg]', '_[deg]')
105
+ lines = lines.encode().replace(b'\xc2\xb1', b'').decode()
106
+ lines = lines.split('\n')
107
+ lines = [line.split() for line in lines]
108
+ lines = [[col.replace('_[deg]', ' [deg]') for col in line] for line in lines]
109
+ pretty_print_table(lines[:-2], **kwargs)
110
+
111
+ @property
112
+ def fit_param(self):
113
+ return self.model.fit_param
114
+
115
+ def fit(self):
116
+ # fit offsets
117
+ self.model.fit_param = [f'lin.offset_{inst}' for inst in self.instruments]
118
+ # fit jitters
119
+ self.model.fit_param += [f'cov.{par}' for par in self.model.cov.param]
120
+ # fit keplerian(s)
121
+ self.model.fit_param += [
122
+ f'kep.{k}.{p}'
123
+ for k, v in self.model.keplerian.items()
124
+ for p in v._param
125
+ ]
126
+ # if self.np == 0:
127
+ # self._set_jitters(0.1 * np.std(self.model.y - self.model.model()))
128
+ try:
129
+ self.model.fit()
130
+ except Exception as e:
131
+ print(e)
132
+
133
+
134
+ def plot(self, **kwargs):
135
+ fig, ax = self.s.plot(**kwargs)
136
+ tt = self.s._tt()
137
+ time_offset = 50000 if 'remove_50000' in kwargs else 0
138
+
139
+ for i, inst in enumerate(self.s):
140
+ inst_name = inst.instruments[0].replace('-', '_')
141
+ val = self.model.get_param(f'lin.offset_{inst_name}')
142
+ x = np.array([inst.mtime.min(), inst.mtime.max()]) - time_offset
143
+ y = [val, val]
144
+ ax.plot(x, y, ls='--', color=f'C{i}')
145
+ mask = (tt > inst.mtime.min()) & (tt < inst.mtime.max())
146
+ color = adjust_lightness(f'C{i}', 1.2)
147
+ ax.plot(tt[mask] - time_offset,
148
+ val + self.model.keplerian_model(tt)[mask],
149
+ color=color)
150
+
151
+ return fig, ax
152
+
153
+ def plot_phasefolding(self, planets=None, ax=None):
154
+ t = self.model.t
155
+ res = self.model.residuals()
156
+ sig = np.sqrt(self.model.cov.A)
157
+
158
+ Msmooth = np.linspace(0, 360, 1000)
159
+
160
+ if planets is None:
161
+ planets = list(self.model.keplerian.keys())
162
+
163
+ if ax is None:
164
+ fig, axs = plt.subplots(
165
+ 1, len(planets), sharex=True, sharey=True, constrained_layout=True,
166
+ squeeze=False
167
+ )
168
+ else:
169
+ axs = np.atleast_1d(ax)
170
+ fig = axs[0].figure
171
+
172
+ for p, ax in zip(planets, axs.flat):
173
+ self.model.set_keplerian_param(p, param=['P', 'la0', 'K', 'e', 'w'])
174
+ kep = self.model.keplerian[p]
175
+ P = self.model.get_param(f'kep.{p}.P')
176
+ M0 = (180 / np.pi * (self.model.get_param(f'kep.{p}.la0') - self.model.get_param(f'kep.{p}.w')))
177
+ M = (M0 + 360 / P * t) % 360
178
+ reskep = res + kep.rv(t)
179
+ tsmooth = (Msmooth - M0) * P / 360
180
+ mod = kep.rv(tsmooth)
181
+
182
+ ax.plot([0, 360], [0, 0], '--', c='gray', lw=1)
183
+ ax.plot(Msmooth, mod, 'k-', lw=3, rasterized=True)
184
+ for inst in self.instruments:
185
+ sel = self.s.instrument_array[self.s.mask] == inst
186
+ ax.errorbar(M[sel], reskep[sel], sig[sel], fmt='.',
187
+ rasterized=True, alpha=0.7)
188
+
189
+ ax.set(ylabel='RV [m/s]', xlabel='Mean anomaly [deg]',
190
+ xticks=np.arange(0, 361, 90))
191
+ ax.minorticks_on()
192
+ self.model.set_keplerian_param(p, param=['P', 'la0', 'K', 'esinw', 'ecosw'])
193
+
194
+ # handles, labels = ax.get_legend_handles_labels()
195
+ # fig.legend(handles, labels, loc='center left', bbox_to_anchor=(0.9, 0.5))
196
+ return fig, axs
197
+
198
+ def add_planet_from_period(self, period):
199
+ self.model.add_keplerian_from_period(period, fit=True)
200
+ self.model.fit()
201
+ self.np += 1
202
+
203
+ def _plot_periodogram(self, P=None, power=None, kmax=None, faplvl=None,
204
+ **kwargs):
205
+ if P is None and power is None:
206
+ with timer('periodogram'):
207
+ nu, power = self.model.periodogram(self.nu0, self.dnu, self.nfreq)
208
+ P = 2 * np.pi / nu
209
+
210
+ if 'ax' in kwargs:
211
+ ax = kwargs.pop('ax')
212
+ fig = ax.figure
213
+ else:
214
+ fig, ax = plt.subplots(1, 1, constrained_layout=True)
215
+ ax.semilogx(P, power, 'k', lw=1, rasterized=True)
216
+ ax.set_ylim(0, 1.2 * power.max())
217
+ ax.set(xlabel='Period [days]', ylabel='Normalized power')
218
+
219
+ if kmax is None:
220
+ kmax = np.argmax(power)
221
+ ax.plot(P[kmax], power[kmax], 'or', ms=4)
222
+ ax.text(P[kmax], power[kmax] * 1.1, f'{P[kmax]:.3f} d',
223
+ ha='right', va='center', color='r')
224
+
225
+ if faplvl is None:
226
+ faplvl = self.model.fap(power[kmax], nu.max())
227
+ ax.text(0.99, 0.95, f'FAP = {faplvl:.2g}', transform=ax.transAxes,
228
+ ha='right', va='top')
229
+
230
+ return fig, ax
231
+
232
+ def add_keplerian_from_periodogram(self, fap_max=0.001, plot=False,
233
+ fit_first=True):
234
+ if fit_first and self.np == 0:
235
+ self.fit()
236
+
237
+ self._equal_coralie_offsets()
238
+
239
+ with timer('periodogram'):
240
+ nu, power = self.model.periodogram(self.nu0, self.dnu, self.nfreq)
241
+
242
+ P = 2 * np.pi / nu
243
+ # Compute FAP
244
+ kmax = np.argmax(power)
245
+ faplvl = self.model.fap(power[kmax], nu.max())
246
+ self.logger.info('highest periodogram peak:')
247
+ self.logger.info(f'P={P[kmax]:.4f} d, power={power[kmax]:.3f}, FAP={faplvl:.2e}')
248
+ if plot:
249
+ self._plot_periodogram(P, power, kmax, faplvl)
250
+
251
+ if faplvl > fap_max:
252
+ print('non-significant peak')
253
+ self.fit()
254
+ return False
255
+
256
+ # add new planet
257
+ letter = ascii_lowercase[1:][self.np]
258
+ self.model.add_keplerian_from_period(P[kmax], name=letter,
259
+ guess_kwargs={'emax': 0.8})
260
+ # self.model.set_keplerian_param(letter, param=['P', 'la0', 'K', 'e', 'w'])
261
+ self.model.set_keplerian_param(letter, param=['P', 'la0', 'K', 'esinw', 'ecosw'])
262
+ self.np += 1
263
+ self.fit()
264
+
265
+ if plot:
266
+ self.plot()
267
+
268
+ return True
269
+
270
+ @property
271
+ def offsets(self):
272
+ names = [f'lin.offset_{inst}' for inst in self.instruments]
273
+ return {
274
+ name.replace('lin.', ''): self.model.get_param(name)
275
+ for name in names
276
+ }
277
+
278
+ @property
279
+ def jitters(self):
280
+ names = [f'cov.{par}' for par in self.model.cov.param]
281
+ return {
282
+ name.replace('cov.', '').replace('.sig', ''): self.model.get_param(name)
283
+ for name in names
284
+ }
285
+
286
+ @property
287
+ def keplerians(self):
288
+ keps = {name: {} for name in self.model.keplerian.keys()}
289
+ for name in keps:
290
+ params = self.model.keplerian[name]._param
291
+ pars = [f'kep.{name}.{p}' for p in params]
292
+ keps[name] = {
293
+ par.replace(f'kep.{name}.', ''): self.model.get_param(par)
294
+ for par in pars
295
+ }
296
+ return keps
@@ -6,7 +6,7 @@ from io import StringIO
6
6
  import numpy as np
7
7
  from astropy.timeseries import LombScargle
8
8
 
9
- from .setup_logger import logger
9
+ from .setup_logger import setup_logger
10
10
  from kepmodel.rv import RvModel
11
11
  from spleaf.term import Error
12
12
 
@@ -32,6 +32,7 @@ def run_query(query):
32
32
 
33
33
  class Planets:
34
34
  def __init__(self, system):
35
+ logger = setup_logger()
35
36
  self.s = system
36
37
  self.verbose = system.verbose
37
38
 
@@ -163,6 +164,7 @@ class Planets:
163
164
  self.model.show_param()
164
165
 
165
166
  def fit_all(self, adjust_data=False):
167
+ logger = setup_logger()
166
168
  self.model.fit()
167
169
 
168
170
  newP = np.array([self.model.get_param(f'kep.{i}.P') for i in range(self.np)])
@@ -187,5 +189,7 @@ class Planets:
187
189
  self.s._build_arrays()
188
190
 
189
191
  def __repr__(self):
190
- return f'{self.star}({self.np} planets, '\
191
- f'P={list(self.P)}, K={list(self.K)}, e={list(self.e)})'
192
+ P = list(map(float, self.P))
193
+ K = list(map(float, self.K))
194
+ e = list(map(float, self.e))
195
+ return f'{self.star}({self.np} planets, {P=}, {K=}, {e=})'
@@ -40,6 +40,7 @@ class REPORTS:
40
40
  rows.append([self.star] + [''] * len(self.instruments) + [''])
41
41
  rows.append([''] + self.instruments + ['full'])
42
42
  rows.append(['N'] + list(self.NN.values()) + [self.N])
43
+ rows.append(['T span'] + [np.ptp(s.mtime).round(1) for s in self] + [np.ptp(self.mtime).round(1)])
43
44
  rows.append(['RV span'] + [np.ptp(s.mvrad).round(3) for s in self] + [np.ptp(self.mvrad).round(3)])
44
45
  rows.append(['RV std'] + [s.mvrad.std().round(3) for s in self] + [self.mvrad.std().round(3)])
45
46
  rows.append(['eRV mean'] + [s.msvrad.mean().round(3) for s in self] + [self.msvrad.mean().round(3)])
@@ -201,4 +202,110 @@ class REPORTS:
201
202
  pdf.savefig(fig)
202
203
  # os.system(f'evince {save} &')
203
204
 
204
- return fig
205
+ return fig
206
+
207
+
208
+ def kepmodel_report(self, fit_keplerians=3, save=None, nasaexo_title=False):
209
+ import matplotlib.pyplot as plt
210
+ import matplotlib.gridspec as gridspec
211
+ from matplotlib.backends.backend_pdf import PdfPages
212
+ logger = setup_logger()
213
+
214
+ def set_align_for_column(table, col, align="left"):
215
+ cells = [key for key in table._cells if key[1] == col]
216
+ for cell in cells:
217
+ table._cells[cell]._loc = align
218
+ table._cells[cell]._text.set_horizontalalignment(align)
219
+
220
+ from .kepmodel_wrapper import model
221
+ m = model(self)
222
+
223
+ while fit_keplerians > 0:
224
+ if m.add_keplerian_from_periodogram():
225
+ fit_keplerians -= 1
226
+ else:
227
+ break
228
+
229
+ m.fit()
230
+
231
+
232
+ # size = A4
233
+ size = 8.27, 11.69
234
+ fig = plt.figure(figsize=size, constrained_layout=True)
235
+ gs = gridspec.GridSpec(5, 3, figure=fig, height_ratios=[2, 1, 1, 1, 1])
236
+
237
+ # first row, all columns
238
+ ax1 = plt.subplot(gs[0, :])
239
+
240
+ if nasaexo_title:
241
+ title = str(self.planets).replace('(', '\n').replace(')', '')
242
+ star, planets = title.split('\n')
243
+ planets = planets.replace('planets,', 'known planets\n')
244
+ ax1.set_title(star, loc='left', fontsize=14)
245
+ ax1.set_title(planets, loc='right', fontsize=10)
246
+ else:
247
+ title = f'{self.star}'
248
+ ax1.set_title(title, loc='left', fontsize=14)
249
+ # ax1.set_title(r"\href{http://www.google.com}{link}", color='blue',
250
+ # loc='center')
251
+
252
+ m.plot(ax=ax1, N_in_label=True, tooltips=False, remove_50000=True)
253
+
254
+ ax1.legend().remove()
255
+ legend_ax = plt.subplot(gs[1, -1])
256
+ legend_ax.axis('off')
257
+ leg = plt.legend(*ax1.get_legend_handles_labels(),
258
+ prop={'family': 'monospace'})
259
+ legend_ax.add_artist(leg)
260
+
261
+ ax2 = plt.subplot(gs[1, :-1])
262
+ m._plot_periodogram(ax=ax2)
263
+
264
+ ax3 = plt.subplot(gs[2, 0])
265
+ ax3.axis('off')
266
+ items = list(m.offsets.items())
267
+ items = [[item[0].replace('offset_', 'offet '), item[1].round(3)] for item in items]
268
+ table = ax3.table(items, loc='center', edges='open')
269
+ table.auto_set_font_size(False)
270
+ table.set_fontsize(9)
271
+ set_align_for_column(table, 1, align="left")
272
+
273
+ ax4 = plt.subplot(gs[2, 1])
274
+ ax4.axis('off')
275
+ items = list(m.jitters.items())
276
+ items = [[item[0].replace('jit_', 'jitter '), item[1].round(3)] for item in items]
277
+ table = ax4.table(items, loc='center', edges='open')
278
+ table.auto_set_font_size(False)
279
+ table.set_fontsize(9)
280
+ set_align_for_column(table, 1, align="left")
281
+
282
+ ax5 = plt.subplot(gs[2, 2])
283
+ ax5.axis('off')
284
+ items = [
285
+ ['N', m.model.n],
286
+ [r'N$_{\rm free}$', len(m.model.fit_param)],
287
+ [r'$\chi^2$', round(m.model.chi2(), 2)],
288
+ [r'$\chi^2_r$', round(m.model.chi2() / (m.model.n - len(m.model.fit_param)), 2)],
289
+ [r'$\log L$', round(m.model.loglike(), 2)],
290
+ ]
291
+ table = ax5.table(items, loc='center', edges='open')
292
+ table.auto_set_font_size(False)
293
+ table.set_fontsize(9)
294
+ set_align_for_column(table, 1, align="left")
295
+
296
+ for i, name in enumerate(m.keplerians):
297
+ ax = plt.subplot(gs[3, i])
298
+ m.plot_phasefolding(planets=name, ax=ax)
299
+
300
+ ax = plt.subplot(gs[4, i])
301
+ ax.axis('off')
302
+ with m.ew():
303
+ items = list(m.keplerians[name].items())
304
+ items = [[item[0], item[1].round(3)] for item in items]
305
+ table = ax.table(items, loc='center', edges='open')
306
+ table.auto_set_font_size(False)
307
+ table.set_fontsize(9)
308
+ set_align_for_column(table, 1, align="left")
309
+
310
+
311
+ return fig, m
@@ -1,4 +1,6 @@
1
+ from functools import partial
1
2
  import numpy as np
3
+ from scipy.stats import norm
2
4
 
3
5
  def wmean(a, e):
4
6
  """Weighted mean of array `a`, with uncertainties given by `e`.
@@ -50,16 +52,39 @@ def wrms(a, e, ignore_nans=False):
50
52
  w = 1 / e**2
51
53
  return np.sqrt(np.sum(w * (a - np.average(a, weights=w))**2) / sum(w))
52
54
 
55
+ # from https://stackoverflow.com/questions/20601872/numpy-or-scipy-to-calculate-weighted-median
56
+ def weighted_quantiles_interpolate(values, weights, quantiles):
57
+ i = np.argsort(values)
58
+ c = np.cumsum(weights[i])
59
+ q = np.searchsorted(c, quantiles * c[-1])
60
+ # Ensure right-end isn't out of bounds. Thanks @Jeromino!
61
+ q_plus1 = np.clip(q + 1, a_min=None, a_max=values.shape[0] - 1)
62
+ return np.where(
63
+ c[q] / c[-1] == quantiles,
64
+ 0.5 * (values[i[q]] + values[i[q_plus1]]),
65
+ values[i[q]],
66
+ )
53
67
 
54
- def sigmaclip_median(a, low=4.0, high=4.0):
68
+ weighted_median = partial(weighted_quantiles_interpolate, quantiles=0.5)
69
+
70
+
71
+
72
+ def sigmaclip_median(a, low=4.0, high=4.0, k=1/norm.ppf(3/4)):
55
73
  """
56
74
  Same as scipy.stats.sigmaclip but using the median and median absolute
57
75
  deviation instead of the mean and standard deviation.
58
76
 
59
77
  Args:
60
- a (array): Array containing data
61
- low (float): Number of MAD to use for the lower clipping limit
62
- high (float): Number of MAD to use for the upper clipping limit
78
+ a (array):
79
+ Array containing data
80
+ low (float):
81
+ Number of MAD to use for the lower clipping limit
82
+ high (float):
83
+ Number of MAD to use for the upper clipping limit
84
+ k (float):
85
+ Scale factor for the MAD to be an estimator of the standard
86
+ deviation. Depends on the (assumed) distribution of the data.
87
+ Default value is for the normal distribution (=1/norm.ppf(3/4)).
63
88
  Returns:
64
89
  SigmaclipResult: Object with the following attributes:
65
90
  - `clipped`: Masked array of data
@@ -71,7 +96,7 @@ def sigmaclip_median(a, low=4.0, high=4.0):
71
96
  c = np.asarray(a).ravel()
72
97
  delta = 1
73
98
  while delta:
74
- c_mad = median_abs_deviation(c)
99
+ c_mad = median_abs_deviation(c) * k
75
100
  c_median = np.median(c)
76
101
  size = c.size
77
102
  critlower = c_median - c_mad * low
@@ -7,7 +7,6 @@ import warnings
7
7
  from copy import deepcopy
8
8
  from datetime import datetime, timezone
9
9
 
10
- # import lazy_loader as lazy
11
10
  import numpy as np
12
11
 
13
12
  from .setup_logger import setup_logger
@@ -26,7 +25,8 @@ from .HZ import getHZ_period
26
25
  from .instrument_specific import ISSUES
27
26
  from .reports import REPORTS
28
27
  from .utils import sanitize_path, strtobool, there_is_internet, timer, chdir
29
- # from .utils import lazy_import
28
+ from .setup_logger import setup_logger
29
+ logger = setup_logger()
30
30
 
31
31
  # units = lazy_import('astropy.units')
32
32
  # units = lazy.load('astropy.units')
@@ -411,35 +411,71 @@ class RV(ISSUES, REPORTS):
411
411
  self._did_correct_berv = False
412
412
  self.__post_init__()
413
413
 
414
- def snapshot(self, directory=None, delete_others=False):
415
- import pickle
414
+ def snapshot(self, directory=None, delete_others=False, compress=False):
415
+ if compress:
416
+ try:
417
+ import compress_pickle as pickle
418
+ except ImportError:
419
+ logger.warning('compress_pickle not installed, not compressing')
420
+ import pickle
421
+ compress = False
422
+ else:
423
+ import pickle
424
+ import re
416
425
  from datetime import datetime
426
+
417
427
  ts = datetime.now().timestamp()
418
428
  star_name = self.star.replace(' ', '')
419
429
  file = f'{star_name}_{ts}.pkl'
420
430
 
431
+ server = None
421
432
  if directory is None:
422
433
  directory = '.'
423
434
  else:
424
- os.makedirs(directory, exist_ok=True)
425
-
426
- file = os.path.join(directory, file)
427
-
428
- if delete_others:
429
- import re
430
- other_pkls = [
431
- f for f in os.listdir(directory)
432
- if re.search(fr'{star_name}_\d+.\d+.pkl', f)
433
- ]
434
- for pkl in other_pkls:
435
- os.remove(os.path.join(directory, pkl))
435
+ if ':' in directory:
436
+ server, directory = directory.split(':')
437
+ delete_others = False
438
+ else:
439
+ os.makedirs(directory, exist_ok=True)
436
440
 
437
441
  metadata = {
438
442
  'star': self.star,
439
443
  'timestamp': ts,
440
444
  'description': 'arvi snapshot'
441
445
  }
442
- pickle.dump((self, metadata), open(file, 'wb'), protocol=0)
446
+
447
+
448
+ if server:
449
+ import posixpath
450
+ from .utils import server_sftp, server_file
451
+ with server_sftp(server=server) as sftp:
452
+ try:
453
+ sftp.chdir(directory)
454
+ except FileNotFoundError:
455
+ sftp.mkdir(directory)
456
+ finally:
457
+ sftp.chdir(directory)
458
+ with sftp.open(file, 'wb') as f:
459
+ print('saving snapshot to server...', end='', flush=True)
460
+ pickle.dump((self, metadata), f, protocol=0)
461
+ print('done')
462
+ file = posixpath.join(directory, file)
463
+ else:
464
+ if delete_others:
465
+ other_pkls = [
466
+ f for f in os.listdir(directory)
467
+ if re.search(fr'{star_name}_\d+.\d+.pkl', f)
468
+ ]
469
+ for pkl in other_pkls:
470
+ os.remove(os.path.join(directory, pkl))
471
+
472
+ file = os.path.join(directory, file)
473
+
474
+ if compress:
475
+ file += '.gz'
476
+
477
+ with open(file, 'wb') as f:
478
+ pickle.dump((self, metadata), f)
443
479
 
444
480
  if self.verbose:
445
481
  logger.info(f'saved snapshot to {file}')
@@ -514,6 +550,15 @@ class RV(ISSUES, REPORTS):
514
550
  def instrument_array(self):
515
551
  return np.concatenate([[i] * n for i, n in self.NN.items()])
516
552
 
553
+ def _instrument_mask(self, instrument):
554
+ if isinstance(instrument, str):
555
+ return np.char.find(self.instrument_array, instrument) == 0
556
+ elif isinstance(instrument, (list, tuple, np.ndarray)):
557
+ m = np.full_like(self.time, False, dtype=bool)
558
+ for i in instrument:
559
+ m |= np.char.find(self.instrument_array, i) == 0
560
+ return m
561
+
517
562
  @property
518
563
  def rms(self) -> float:
519
564
  """ Weighted rms of the (masked) radial velocities """
@@ -540,6 +585,11 @@ class RV(ISSUES, REPORTS):
540
585
  def _mtime_sorter(self):
541
586
  return np.argsort(self.mtime)
542
587
 
588
+ @property
589
+ def timespan(self):
590
+ """ Total time span of the (masked) observations """
591
+ return np.ptp(self.mtime)
592
+
543
593
  def _index_from_instrument_index(self, index, instrument):
544
594
  ind = np.where(self.instrument_array == instrument)[0]
545
595
  return ind[getattr(self, instrument).mask][index]
@@ -633,22 +683,28 @@ class RV(ISSUES, REPORTS):
633
683
  import pickle
634
684
  from datetime import datetime
635
685
  if star is None:
636
- assert file.endswith('.pkl'), 'expected a .pkl file'
637
- star, timestamp = file.replace('.pkl', '').split('_')
686
+ assert file.endswith(('.pkl', '.pkl.gz')), 'expected a .pkl file'
687
+ basefile = os.path.basename(file)
688
+ star, timestamp = basefile.replace('.pkl.gz', '').replace('.pkl', '').split('_')
638
689
  else:
639
690
  try:
640
- file = sorted(glob(f'{star}_*.*.pkl'))[-1]
691
+ file = sorted(glob(f'{star}_*.*.pkl*'))[-1]
641
692
  except IndexError:
642
693
  raise ValueError(f'cannot find any file matching {star}_*.pkl')
643
- star, timestamp = file.replace('.pkl', '').split('_')
694
+ star, timestamp = file.replace('.pkl.gz', '').replace('.pkl', '').split('_')
644
695
 
645
696
  dt = datetime.fromtimestamp(float(timestamp))
646
697
  if verbose:
647
698
  logger.info(f'reading snapshot of {star} from {dt}')
648
699
 
649
- s = pickle.load(open(file, 'rb'))
700
+ with open(file, 'rb') as f:
701
+ if file.endswith('.gz'):
702
+ import compress_pickle as pickle
703
+ s = pickle.load(f)
704
+
650
705
  if isinstance(s, tuple) and len(s) == 2:
651
706
  s, _metadata = s
707
+
652
708
  s._snapshot = file
653
709
  return s
654
710
 
@@ -1508,7 +1564,7 @@ class RV(ISSUES, REPORTS):
1508
1564
  """ Remove all observations that satisfy a condition
1509
1565
 
1510
1566
  Args:
1511
- condition (np.ndarray):
1567
+ condition (ndarray):
1512
1568
  Boolean array of the same length as the observations
1513
1569
  """
1514
1570
  if self.verbose:
@@ -1668,16 +1724,17 @@ class RV(ISSUES, REPORTS):
1668
1724
  self._propagate_mask_changes()
1669
1725
 
1670
1726
 
1671
- def _propagate_mask_changes(self):
1727
+ def _propagate_mask_changes(self, _remove_instrument=True):
1672
1728
  """ link self.mask with each self.`instrument`.mask """
1673
1729
  masked = np.where(~self.mask)[0]
1674
1730
  for m in masked:
1675
1731
  inst = self.instruments[self.obs[m] - 1]
1676
1732
  n_before = (self.obs < self.obs[m]).sum()
1677
1733
  getattr(self, inst).mask[m - n_before] = False
1678
- for inst in self.instruments:
1679
- if getattr(self, inst).mtime.size == 0:
1680
- self.remove_instrument(inst, strict=True)
1734
+ if _remove_instrument:
1735
+ for inst in self.instruments:
1736
+ if getattr(self, inst).mtime.size == 0:
1737
+ self.remove_instrument(inst, strict=True)
1681
1738
 
1682
1739
  def secular_acceleration(self, epoch=None, just_compute=False, force_simbad=False):
1683
1740
  """
@@ -1695,9 +1752,12 @@ class RV(ISSUES, REPORTS):
1695
1752
  force_simbad (bool, optional):
1696
1753
  Use Simbad proper motions even if Gaia is available
1697
1754
  """
1698
- if self._did_secular_acceleration and not just_compute: # don't do it twice
1755
+ # don't do it twice
1756
+ if self._did_secular_acceleration and not just_compute:
1699
1757
  return
1700
1758
 
1759
+ from astropy import units
1760
+
1701
1761
  #as_yr = units.arcsec / units.year
1702
1762
  mas_yr = units.milliarcsecond / units.year
1703
1763
  mas = units.milliarcsecond
@@ -1829,15 +1889,21 @@ class RV(ISSUES, REPORTS):
1829
1889
 
1830
1890
  self._did_secular_acceleration = False
1831
1891
 
1832
- def sigmaclip(self, sigma=5, instrument=None, strict=True):
1892
+ def sigmaclip(self, sigma=5, quantity='vrad', instrument=None,
1893
+ strict=True):
1833
1894
  """
1834
- Sigma-clip RVs (per instrument!), by MAD away from the median.
1895
+ Sigma-clip RVs or other quantities (per instrument!), by MAD away from
1896
+ the median.
1835
1897
 
1836
1898
  Args:
1837
1899
  sigma (float):
1838
- Number of MADs to clip
1900
+ Number of MADs away from the median
1901
+ quantity (str):
1902
+ Quantity to sigma-clip (by default the RVs)
1839
1903
  instrument (str, list):
1840
1904
  Instrument(s) to sigma-clip
1905
+ strict (bool):
1906
+ Passed directly to self._check_instrument
1841
1907
  """
1842
1908
  #from scipy.stats import sigmaclip as dosigmaclip
1843
1909
  from .stats import sigmaclip_median as dosigmaclip
@@ -1850,16 +1916,20 @@ class RV(ISSUES, REPORTS):
1850
1916
 
1851
1917
  for inst in instruments:
1852
1918
  m = self.instrument_array == inst
1853
- result = dosigmaclip(self.vrad[m], low=sigma, high=sigma)
1919
+ d = getattr(self, quantity)
1920
+
1921
+ if np.isnan(d[m]).all():
1922
+ continue
1923
+
1924
+ result = dosigmaclip(d[m], low=sigma, high=sigma)
1854
1925
  # n = self.vrad[m].size - result.clipped.size
1855
1926
 
1856
- ind = m & self.mask & \
1857
- ((self.vrad < result.lower) | (self.vrad > result.upper))
1927
+ ind = m & self.mask & ((d < result.lower) | (d > result.upper))
1858
1928
  n = ind.sum()
1859
1929
 
1860
1930
  if self.verbose and n > 0:
1861
1931
  s = 's' if (n == 0 or n > 1) else ''
1862
- logger.warning(f'sigma-clip RVs will remove {n} point{s} for {inst}')
1932
+ logger.warning(f'sigma-clip {quantity} will remove {n} point{s} for {inst}')
1863
1933
 
1864
1934
  if n > 0:
1865
1935
  self.mask[ind] = False
@@ -1884,21 +1954,32 @@ class RV(ISSUES, REPORTS):
1884
1954
  if config.return_self:
1885
1955
  return self
1886
1956
 
1887
- def clip_maxerror(self, maxerror:float):
1888
- """ Mask out points with RV error larger than a given value
1957
+ def clip_maxerror(self, maxerror:float, instrument=None):
1958
+ """
1959
+ Mask out points with RV error larger than a given value. If `instrument`
1960
+ is given, mask only observations from that instrument.
1889
1961
 
1890
1962
  Args:
1891
1963
  maxerror (float): Maximum error to keep.
1964
+ instrument (str, list, tuple, ndarray): Instrument(s) to clip
1892
1965
  """
1893
1966
  if self._child:
1894
1967
  return
1895
1968
 
1896
1969
  self.maxerror = maxerror
1970
+
1971
+ if instrument is None:
1972
+ inst_mask = np.ones_like(self.svrad, dtype=bool)
1973
+ else:
1974
+ inst_mask = self._instrument_mask(instrument)
1975
+
1897
1976
  above = self.svrad > maxerror
1898
- n = above.sum()
1899
- self.mask[above] = False
1977
+ old_mask = self.mask.copy()
1978
+
1979
+ self.mask[inst_mask & above] = False
1900
1980
 
1901
1981
  if self.verbose and above.sum() > 0:
1982
+ n = (above[inst_mask] & old_mask[inst_mask]).sum()
1902
1983
  s = 's' if (n == 0 or n > 1) else ''
1903
1984
  logger.warning(f'clip_maxerror ({maxerror} {self.units}) removed {n} point' + s)
1904
1985
 
@@ -1906,6 +1987,36 @@ class RV(ISSUES, REPORTS):
1906
1987
  if config.return_self:
1907
1988
  return self
1908
1989
 
1990
+ def sigmaclip_ew(self, sigma=5):
1991
+ """ Sigma-clip EW (FWHM x contrast), by MAD away from the median """
1992
+ from .stats import sigmaclip_median as dosigmaclip, weighted_median
1993
+
1994
+ S = deepcopy(self)
1995
+ for _s in S:
1996
+ m = _s.mask
1997
+ _s.fwhm -= weighted_median(_s.fwhm[m], 1 / _s.fwhm_err[m])
1998
+ _s.contrast -= weighted_median(_s.contrast[m], 1 / _s.contrast_err[m])
1999
+ S._build_arrays()
2000
+ ew = S.fwhm * S.contrast
2001
+ ew_err = np.hypot(S.fwhm_err * S.contrast, S.fwhm * S.contrast_err)
2002
+
2003
+ wmed = weighted_median(ew[S.mask], 1 / ew_err[S.mask])
2004
+ data = (ew - wmed) / ew_err
2005
+ result = dosigmaclip(data, low=sigma, high=sigma)
2006
+ ind = (data < result.lower) | (data > result.upper)
2007
+ self.mask[ind] = False
2008
+
2009
+ if self.verbose and ind.sum() > 0:
2010
+ n = ind.sum()
2011
+ s = 's' if (n == 0 or n > 1) else ''
2012
+ logger.warning(f'sigmaclip_ew removed {n} point' + s)
2013
+
2014
+ self._propagate_mask_changes()
2015
+ if config.return_self:
2016
+ return self
2017
+
2018
+
2019
+
1909
2020
  def bin(self):
1910
2021
  """
1911
2022
  Nightly bin the observations.
@@ -1949,7 +2060,8 @@ class RV(ISSUES, REPORTS):
1949
2060
 
1950
2061
  # treat ccf_mask specially, doing a 'unique' bin
1951
2062
  if q == 'ccf_mask':
1952
- setattr(s, q, bin_ccf_mask(s.mtime, getattr(s, q)))
2063
+ ccf_mask = getattr(s, q)[s.mask]
2064
+ setattr(s, q, bin_ccf_mask(s.mtime, ccf_mask))
1953
2065
  continue
1954
2066
 
1955
2067
  if Q.dtype != np.float64:
@@ -2317,7 +2429,7 @@ class RV(ISSUES, REPORTS):
2317
2429
  self.units = new_units
2318
2430
 
2319
2431
 
2320
- def put_at_systemic_velocity(self, factor=1.0):
2432
+ def put_at_systemic_velocity(self, factor=1.0, ignore=None):
2321
2433
  """
2322
2434
  For instruments in which mean(RV) < `factor` * ptp(RV), "move" RVs to
2323
2435
  the systemic velocity from simbad. This is useful if some instruments
@@ -2327,6 +2439,9 @@ class RV(ISSUES, REPORTS):
2327
2439
  """
2328
2440
  changed = False
2329
2441
  for inst in self.instruments:
2442
+ if ignore is not None:
2443
+ if inst in ignore or any([i in inst for i in ignore]):
2444
+ continue
2330
2445
  changed_inst = False
2331
2446
  s = getattr(self, inst)
2332
2447
  if s.mask.any():
@@ -2360,6 +2475,11 @@ class RV(ISSUES, REPORTS):
2360
2475
  self.instruments = sorted(self.instruments, key=lambda i: getattr(self, i).time.max())
2361
2476
  self._build_arrays()
2362
2477
 
2478
+ def put_instrument_last(self, instrument):
2479
+ if not self._check_instrument(instrument, strict=True, log=True):
2480
+ return
2481
+ self.instruments = [i for i in self.instruments if i != instrument] + [instrument]
2482
+ self._build_arrays()
2363
2483
 
2364
2484
  def save(self, directory=None, instrument=None, format='rdb',
2365
2485
  indicators=False, join_instruments=False, postfix=None,
@@ -2383,7 +2503,7 @@ class RV(ISSUES, REPORTS):
2383
2503
  Postfix to add to the filenames ([star]_[instrument]_[postfix].rdb).
2384
2504
  save_masked (bool, optional)
2385
2505
  If True, also save masked observations (those for which
2386
- self.mask == True)
2506
+ self.mask == False)
2387
2507
  save_nans (bool, optional)
2388
2508
  Whether to save NaN values in the indicators, if they exist. If
2389
2509
  False, the full observation which contains NaN values is not saved.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.2.9
3
+ Version: 0.2.10
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -20,6 +20,7 @@ arvi/extra_data.py
20
20
  arvi/gaia_wrapper.py
21
21
  arvi/headers.py
22
22
  arvi/instrument_specific.py
23
+ arvi/kepmodel_wrapper.py
23
24
  arvi/kima_wrapper.py
24
25
  arvi/lbl_wrapper.py
25
26
  arvi/nasaexo_wrapper.py
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes