arvi 0.1.12__tar.gz → 0.1.14__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arvi might be problematic. Click here for more details.

Files changed (56) hide show
  1. {arvi-0.1.12 → arvi-0.1.14}/.github/workflows/install.yml +2 -2
  2. {arvi-0.1.12 → arvi-0.1.14}/.github/workflows/python-publish.yml +1 -0
  3. {arvi-0.1.12 → arvi-0.1.14}/PKG-INFO +1 -1
  4. {arvi-0.1.12 → arvi-0.1.14}/arvi/HZ.py +0 -1
  5. arvi-0.1.14/arvi/__init__.py +34 -0
  6. arvi-0.1.14/arvi/berv.py +437 -0
  7. {arvi-0.1.12 → arvi-0.1.14}/arvi/binning.py +3 -2
  8. {arvi-0.1.12 → arvi-0.1.14}/arvi/config.py +7 -1
  9. {arvi-0.1.12 → arvi-0.1.14}/arvi/dace_wrapper.py +165 -11
  10. {arvi-0.1.12 → arvi-0.1.14}/arvi/data/obs_affected_ADC_issues.dat +4 -1
  11. {arvi-0.1.12 → arvi-0.1.14}/arvi/gaia_wrapper.py +9 -5
  12. arvi-0.1.14/arvi/headers.py +47 -0
  13. {arvi-0.1.12 → arvi-0.1.14}/arvi/instrument_specific.py +59 -3
  14. {arvi-0.1.12 → arvi-0.1.14}/arvi/nasaexo_wrapper.py +1 -1
  15. {arvi-0.1.12 → arvi-0.1.14}/arvi/plots.py +228 -28
  16. {arvi-0.1.12 → arvi-0.1.14}/arvi/programs.py +4 -2
  17. {arvi-0.1.12 → arvi-0.1.14}/arvi/simbad_wrapper.py +5 -2
  18. arvi-0.1.14/arvi/spectra.py +208 -0
  19. arvi-0.1.14/arvi/stellar.py +89 -0
  20. {arvi-0.1.12 → arvi-0.1.14}/arvi/timeseries.py +233 -66
  21. {arvi-0.1.12 → arvi-0.1.14}/arvi/translations.py +2 -0
  22. {arvi-0.1.12 → arvi-0.1.14}/arvi/utils.py +21 -0
  23. {arvi-0.1.12 → arvi-0.1.14}/arvi.egg-info/PKG-INFO +1 -1
  24. {arvi-0.1.12 → arvi-0.1.14}/arvi.egg-info/SOURCES.txt +4 -0
  25. {arvi-0.1.12 → arvi-0.1.14}/pyproject.toml +1 -1
  26. {arvi-0.1.12 → arvi-0.1.14}/tests/test_import_object.py +1 -1
  27. arvi-0.1.12/arvi/__init__.py +0 -19
  28. {arvi-0.1.12 → arvi-0.1.14}/.github/workflows/docs-gh-pages.yml +0 -0
  29. {arvi-0.1.12 → arvi-0.1.14}/.gitignore +0 -0
  30. {arvi-0.1.12 → arvi-0.1.14}/LICENSE +0 -0
  31. {arvi-0.1.12 → arvi-0.1.14}/README.md +0 -0
  32. {arvi-0.1.12 → arvi-0.1.14}/arvi/ariadne_wrapper.py +0 -0
  33. {arvi-0.1.12 → arvi-0.1.14}/arvi/data/extra/HD86226_PFS1.rdb +0 -0
  34. {arvi-0.1.12 → arvi-0.1.14}/arvi/data/extra/HD86226_PFS2.rdb +0 -0
  35. {arvi-0.1.12 → arvi-0.1.14}/arvi/data/extra/metadata.json +0 -0
  36. {arvi-0.1.12 → arvi-0.1.14}/arvi/data/info.svg +0 -0
  37. {arvi-0.1.12 → arvi-0.1.14}/arvi/data/obs_affected_blue_cryostat_issues.dat +0 -0
  38. {arvi-0.1.12 → arvi-0.1.14}/arvi/extra_data.py +0 -0
  39. {arvi-0.1.12 → arvi-0.1.14}/arvi/lbl_wrapper.py +0 -0
  40. {arvi-0.1.12 → arvi-0.1.14}/arvi/reports.py +0 -0
  41. {arvi-0.1.12 → arvi-0.1.14}/arvi/setup_logger.py +0 -0
  42. {arvi-0.1.12 → arvi-0.1.14}/arvi/stats.py +0 -0
  43. {arvi-0.1.12 → arvi-0.1.14}/arvi.egg-info/dependency_links.txt +0 -0
  44. {arvi-0.1.12 → arvi-0.1.14}/arvi.egg-info/requires.txt +0 -0
  45. {arvi-0.1.12 → arvi-0.1.14}/arvi.egg-info/top_level.txt +0 -0
  46. {arvi-0.1.12 → arvi-0.1.14}/docs/API.md +0 -0
  47. {arvi-0.1.12 → arvi-0.1.14}/docs/detailed.md +0 -0
  48. {arvi-0.1.12 → arvi-0.1.14}/docs/index.md +0 -0
  49. {arvi-0.1.12 → arvi-0.1.14}/docs/logo/detective.png +0 -0
  50. {arvi-0.1.12 → arvi-0.1.14}/docs/logo/logo.png +0 -0
  51. {arvi-0.1.12 → arvi-0.1.14}/mkdocs.yml +0 -0
  52. {arvi-0.1.12 → arvi-0.1.14}/setup.cfg +0 -0
  53. {arvi-0.1.12 → arvi-0.1.14}/setup.py +0 -0
  54. {arvi-0.1.12 → arvi-0.1.14}/tests/test_binning.py +0 -0
  55. {arvi-0.1.12 → arvi-0.1.14}/tests/test_simbad.py +0 -0
  56. {arvi-0.1.12 → arvi-0.1.14}/tests/test_stats.py +0 -0
@@ -19,9 +19,9 @@ jobs:
19
19
  python-version: ["3.8", "3.9", "3.10", "3.11"]
20
20
 
21
21
  steps:
22
- - uses: actions/checkout@v3
22
+ - uses: actions/checkout@v4
23
23
  - name: Set up Python ${{ matrix.python-version }}
24
- uses: actions/setup-python@v3
24
+ uses: actions/setup-python@v5
25
25
  with:
26
26
  python-version: ${{ matrix.python-version }}
27
27
 
@@ -9,6 +9,7 @@
9
9
  name: Upload Python Package
10
10
 
11
11
  on:
12
+ workflow_dispatch:
12
13
  release:
13
14
  types: [published]
14
15
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: arvi
3
- Version: 0.1.12
3
+ Version: 0.1.14
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -1,4 +1,3 @@
1
- from collections import namedtuple
2
1
  import numpy as np
3
2
  from astropy.constants import G
4
3
  from astropy import units
@@ -0,0 +1,34 @@
1
+ __all__ = ['RV']
2
+
3
+ from .timeseries import RV
4
+
5
+ ## OLD
6
+ # # the __getattr__ function is always called twice, so we need this
7
+ # # to only build and return the RV object on the second time
8
+ # _ran_once = False
9
+
10
+ def __getattr__(name: str):
11
+ if name in (
12
+ '_ipython_canary_method_should_not_exist_',
13
+ '_ipython_display_',
14
+ '_repr_mimebundle_',
15
+ '__wrapped__'
16
+ ):
17
+ return
18
+
19
+ try:
20
+ globals()[name] = RV(name)
21
+ return globals()[name]
22
+ except ValueError as e:
23
+ raise ImportError(e) from None
24
+ # raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
25
+
26
+ ## OLD
27
+ # # can't do it any other way :(
28
+ # global _ran_once
29
+
30
+ # if _ran_once:
31
+ # _ran_once = False
32
+ # return RV(name)
33
+ # else:
34
+ # _ran_once = True
@@ -0,0 +1,437 @@
1
+ import os
2
+ import numpy as np
3
+ import matplotlib.pyplot as plt
4
+
5
+ from arvi.headers import get_headers
6
+ from barycorrpy import get_BC_vel
7
+ from astropy.coordinates import SkyCoord
8
+ from astropy.time import Time
9
+ from astropy import units as u
10
+ from astropy import constants as const
11
+ from astropy.timeseries import LombScargle
12
+ from tqdm import tqdm
13
+
14
+ from .setup_logger import logger
15
+ from . import config
16
+
17
+
18
+ def correct_rvs(self, simple=False, H=None, save_files=False, plot=True):
19
+ """
20
+ """
21
+ import pickle
22
+
23
+ if hasattr(self, '_did_correct_berv') and self._did_correct_berv:
24
+ logger.info('Already corrected for the BERV! Not doing anything.')
25
+ return
26
+
27
+ path = os.path.dirname(__file__)
28
+ path = os.path.join(path, 'data')
29
+ pkl = os.path.join(path, 'berv_espresso_sine.pkl')
30
+ berv_espresso = pickle.load(open(pkl, 'rb'))
31
+
32
+ if simple:
33
+ logger.info('Correcting RVs with a previously-fitted sinusoid function')
34
+ _f = berv_espresso['func'].replace('lambda t: ', '')
35
+ logger.info(f': {_f}')
36
+ f = eval(berv_espresso['func'])
37
+ if plot:
38
+ _, ax = self.plot()
39
+ ax.plot(self._tt, f(self._tt) + self.vrad.mean(), 'k')
40
+ _, axgls = self.gls(label='before')
41
+
42
+ self.vrad = self.vrad + f(self.time)
43
+
44
+ if plot:
45
+ self.gls(ax=axgls, label='after')
46
+
47
+ return f(self.time)
48
+
49
+ else:
50
+ logger.info('Correcting RVs with actual difference between BERVs')
51
+ logger.info('(basically, use BERV_barycorrpy for BERV correction)')
52
+
53
+ old_vrad = self.vrad.copy()
54
+
55
+ _, berv = BERV(self, H=H, use_gaia_meassurements=True, plx=self.gaia.plx,
56
+ plot=False, ignore_mask=True)
57
+
58
+ if plot:
59
+ fig, axs = plt.subplots(2, 1, constrained_layout=True, height_ratios=(3, 1), sharex=True)
60
+ _, ax = self.plot(ax=axs[0])
61
+ _, axgls = self.gls(label='before')
62
+
63
+ # undo secular acceleration, if it was done
64
+ _did_secular_acceleration = self._did_secular_acceleration
65
+ self._undo_secular_acceleration()
66
+
67
+ # transform RVs: RV --> RV - BERVpipe + BERVbarycorrpy
68
+
69
+ diff = berv[self.star]['berv_barycorrpy'] - berv[self.star]['berv_pipeline']
70
+
71
+ if save_files:
72
+ i_inst = np.hstack([np.arange(n) for n in self.NN.values()])
73
+ with open(f'{self.star}_berv_correction.rdb', 'w') as rdb:
74
+ rdb.write('# time\n')
75
+ rdb.write('# vrad\n')
76
+ rdb.write('# svrad\n')
77
+ rdb.write('# berv - BERV value from header\n')
78
+ rdb.write('# berv_pipe - BERV from header corrected for 1.55e-8 factor\n')
79
+ rdb.write('# berv_barycorrpy - BERV value from barycorrpy\n')
80
+ rdb.write('# diff - difference between berv_barycorrpy and berv_pipe\n')
81
+ rdb.write('# vrad_berv_corrected = vrad + diff\n')
82
+ rdb.write('# instrument\n')
83
+ rdb.write('# i - index\n')
84
+ rdb.write('# i_inst - index within the instrument\n')
85
+ rdb.write('#\n')
86
+ rdb.write('# --> TO CORRECT vrad, we ** add the diff column **\n')
87
+ rdb.write('# --> the result of this operation is in column vrad_berv_corrected\n')
88
+ rdb.write('# --> vrad_berv_corrected is already corrected for the secular acceleration, vrad is not\n')
89
+ rdb.write('#\n')
90
+ #
91
+ cols = [
92
+ 'time', 'vrad', 'svrad',
93
+ 'berv', 'berv_pipe', 'berv_barycorrpy', 'diff', 'vrad_berv_corrected',
94
+ 'instrument', 'i', 'i_inst'
95
+ ]
96
+ rdb.write('# ' + '\t'.join(cols) + '\n')
97
+ for i, t in enumerate(self.time):
98
+ rdb.write(f'{t:11.5f}\t')
99
+ # if _did_secular_acceleration:
100
+ # rdb.write(f'{old_vrad[i]:13.5f}\t')
101
+ # else:
102
+ rdb.write(f'{self.vrad[i]:13.7f}\t')
103
+ rdb.write(f'{self.svrad[i]:13.7f}\t')
104
+ rdb.write(f'{self.berv[i]:15.7f}\t')
105
+ rdb.write(f'{berv[self.star]["berv_pipeline"][i]/1e3:15.7f}\t')
106
+ rdb.write(f'{berv[self.star]["berv_barycorrpy"][i]/1e3:15.7f}\t')
107
+ rdb.write(f'{diff[i]:15.7f}\t')
108
+ rdb.write(f'{self.vrad[i] + diff[i]:13.7f}\t')
109
+ rdb.write(f'{self.instrument_array[i]}\t')
110
+ rdb.write(f'{i}\t')
111
+ rdb.write(f'{i_inst[i]}\t')
112
+ rdb.write('\n')
113
+
114
+ self.add_to_vrad(diff)
115
+ self._did_correct_berv = True
116
+ self._did_secular_acceleration = True # "automatically", by using BERV_barycorrpy
117
+ self._did_secular_acceleration_simbad = False
118
+ self._did_secular_acceleration_epoch = Time('J2016').jd - 24e5
119
+
120
+ # the secular acceleration hadn't been done, but it was introduced by
121
+ # BERV_barycorrpy, so we need to undo it
122
+ if not _did_secular_acceleration:
123
+ self._undo_secular_acceleration()
124
+
125
+ if plot:
126
+ self.plot(ax=axs[0], marker='+', ms=5)
127
+ axs[1].plot(self.time, old_vrad - self.vrad, '.k', label='old RV - new RV')
128
+ ma = np.abs(axs[1].get_ylim()).max()
129
+ axs[1].set(ylim=(-ma, ma), xlabel=axs[0].get_xlabel(), ylabel='RV difference [m/s]')
130
+ self.gls(ax=axgls, label='after')
131
+
132
+ return diff
133
+
134
+ def get_A_and_V_from_lesta(self, username=config.username):
135
+ try:
136
+ import paramiko
137
+ except ImportError:
138
+ raise ImportError("paramiko is not installed. Please install it with 'pip install paramiko'")
139
+
140
+ logs = []
141
+ for f in self.raw_file:
142
+ f = f.replace('espresso/', '/projects/astro/ESPRESSODRS/')
143
+ f = f.replace('nirps/', '/projects/astro/NIRPSDRS/')
144
+ f = f.replace('.fits', '_SCIENCE_FP.log')
145
+ f = f.replace('reduced', 'log')
146
+ f = f.replace('r.ESPRE', 'ESPRESSO')
147
+ logs.append(f)
148
+
149
+ A, V = [], []
150
+
151
+ try:
152
+ ssh = paramiko.SSHClient()
153
+ ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
154
+ ssh.connect("lesta02.astro.unige.ch", username=username, timeout=5)
155
+ except Exception as e:
156
+ if 'getaddrinfo failed' in str(e):
157
+ jump = paramiko.SSHClient()
158
+ jump.set_missing_host_key_policy(paramiko.AutoAddPolicy())
159
+ jump.connect('login01.astro.unige.ch', username=username, timeout=5)
160
+ jump_transport = jump.get_transport()
161
+ jump_channel = jump_transport.open_channel('direct-tcpip', ('10.194.64.162', 22), ('129.194.64.20', 22))
162
+ ssh = paramiko.SSHClient()
163
+ ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
164
+ ssh.connect('lesta02.astro.unige.ch', username=username, sock=jump_channel)
165
+ else:
166
+ raise e
167
+
168
+ with ssh.open_sftp() as sftp:
169
+ pbar = tqdm(logs, total=len(logs), unit='file', desc='Reading logs')
170
+ for f in pbar:
171
+ with sftp.open(f) as fp:
172
+ pattern1 = 'Sun-Earth'
173
+ pattern2 = "Barycentric Observer's Velocity"
174
+ for line in fp:
175
+ if pattern1 in line:
176
+ value = line.strip().split(':')[-1].replace('\x1b[32m', '').replace('\x1b[0m', '').replace(' ', '')
177
+ A.append(float(value))
178
+ if pattern2 in line:
179
+ value = line.strip().split(':')[-1].replace('\x1b[32m', '').replace('\x1b[0m', '').replace(' ', '')
180
+ V.append(float(value))
181
+
182
+ ssh.close()
183
+
184
+ return np.array(A), np.array(V)
185
+
186
+
187
+ def BERV(self, H=None, use_gaia_meassurements=False, plx=None,
188
+ A=None, V=None, plot=True, ignore_mask=False, verbose=False, dpi=None):
189
+ """ Calculate Barycentric Radial Velocity with barycorr and compare with pipeline
190
+
191
+ Args:
192
+ H (list, optional):
193
+ List of (CCF/S1D/etc) headers for the target. If None, try to
194
+ download the CCF files to get the headers.
195
+ use_gaia_meassurements (bool, optional):
196
+ Use Gaia coordinates and proper motions instead of those in the headers.
197
+ plx (float, optional):
198
+ Value of stellar parallax [mas] to use in barycorr.
199
+ A (array, optional):
200
+ Earth-Sun distance [AU] for each BJD (found in the pipeline logs).
201
+ V (array, optional):
202
+ Earth's orbital velocity [km/s] for each BJD (found in the pipeline logs).
203
+ plot (bool, optional):
204
+ Plot the results.
205
+ """
206
+ if H is None:
207
+ H = get_headers(self, check_lesta=False, check_exo2=False, instrument='ESPRE')
208
+
209
+ if len(H) != self.N:
210
+ raise ValueError(f'Expected {self.N} headers (in `H`), got {len(H)}')
211
+
212
+ if 'HARPS' in H[0]['INSTRUME'] or 'NIRPS' in H[0]['INSTRUME']:
213
+ obsname = 'lasilla'
214
+ elif 'ESPRESSO' in H[0]['INSTRUME']:
215
+ obsname = 'paranal'
216
+ else:
217
+ raise ValueError('unknown instrument')
218
+
219
+ bjd = np.array([h['HIERARCH ESO QC BJD'] for h in H])
220
+ bjd -= 24e5
221
+ berv_pipeline = np.array([h['HIERARCH ESO QC BERV'] for h in H])
222
+
223
+ # in the pipeline, the BERV is used to shift wavelenghts with this formula
224
+ # berv_factor = (1 + 1.55e-8) * (1 + BERV/c)
225
+ # The 1.55e-8 factor is an average of some relativistic effects, which are
226
+ # probably already included in the BERV calculated from barycorrpy.
227
+ # Therefore, we compute an "effective" BERV from the pipeline doing
228
+ # (1 + 1.55e-8) * (1 + BERV/c) = 1 + effBERV/c
229
+ # => effBERV = ((1 + 1.55e-8) * (1 + BERV/c) - 1) * c
230
+
231
+ if A is None and V is None:
232
+ if verbose:
233
+ logger.info("Using mean value for Earth-Sun distance and Earth's orbital velocity")
234
+
235
+ if A is None:
236
+ Φobs = const.G * const.M_sun / const.au + const.G * const.M_earth / const.R_earth
237
+ else:
238
+ A = np.atleast_1d(A) * u.km
239
+ Φobs = const.G * const.M_sun / A + const.G * const.M_earth / const.R_earth
240
+
241
+ if V is None:
242
+ V = 29785 *u.m / u.second
243
+ else:
244
+ V = np.atleast_1d(V) * u.km / u.second
245
+
246
+ f = 1 / (1 - Φobs / const.c**2 - V**2 / (2*const.c**2))
247
+ c = const.c.to(u.km / u.second).value
248
+ berv_pipeline = (f * (1 + berv_pipeline/c) - 1) * c
249
+
250
+
251
+ tmmean = np.array([h['HIERARCH ESO QC TMMEAN USED'] for h in H])
252
+ mjdobs = np.array([h['MJD-OBS'] for h in H])
253
+ texp = np.array([h['EXPTIME'] for h in H])
254
+ jd = mjdobs + 24e5 + 0.5 + (texp * tmmean)/60/60/24
255
+
256
+ if verbose:
257
+ logger.info(f"Unique exposure times: {np.unique(texp)}")
258
+
259
+ berv = []
260
+ if verbose:
261
+ pbar = enumerate(jd)
262
+ else:
263
+ pbar = tqdm(enumerate(jd), total=len(jd),
264
+ unit='observation', desc='Computing BERV')
265
+
266
+ for i, _jd in pbar:
267
+ if use_gaia_meassurements:
268
+ if not hasattr(self, 'gaia'):
269
+ raise ValueError('No Gaia data available')
270
+
271
+ target = self.gaia.coords
272
+ pmra = self.gaia.pmra
273
+ pmdec = self.gaia.pmdec
274
+ epoch = Time('J2016').jd
275
+ else:
276
+ ra = H[i]['* TARG ALPHA'][0]
277
+ ra = f'{ra:09.2f}'
278
+ ra = ra[:2] + 'h' + ra[2:4] + 'm' + ra[4:] + 's'
279
+
280
+ dec = H[i]['* TARG DELTA'][0]
281
+ if dec < 0:
282
+ dec = f'{dec:010.2f}'
283
+ else:
284
+ dec = f'{dec:09.2f}'
285
+ if dec.startswith('-'):
286
+ dec = dec[:3] + 'd' + dec[3:5] + 'm' + dec[5:] + 's'
287
+ else:
288
+ dec = dec[:2] + 'd' + dec[2:4] + 'm' + dec[4:] + 's'
289
+
290
+ target = SkyCoord(ra, dec)
291
+ pmra = H[i]['* TARG PMA'][0] * 1e3
292
+ pmdec = H[i]['* TARG PMD'][0] * 1e3
293
+ epoch = Time('J2000').jd
294
+
295
+ if verbose:
296
+ logger.info(f'jd: {_jd}')
297
+ logger.info(f'\t ra: {target.ra}')
298
+ logger.info(f'\t dec: {target.dec}')
299
+ logger.info(f'\t pmra: {pmra}')
300
+ logger.info(f'\t pmdec: {pmdec}')
301
+
302
+
303
+ px = plx or 0.0
304
+ out = get_BC_vel(_jd, obsname=obsname, rv=0.0, px=px, zmeas=0.0, epoch=epoch,
305
+ ra=target.ra.value, dec=target.dec.value, pmra=pmra, pmdec=pmdec)
306
+ # print(out[1][3])
307
+ berv.append(out[0])
308
+
309
+ berv = np.array(berv).flatten()
310
+
311
+ if ignore_mask: # ignore the system's masked points
312
+ pass
313
+ else: # mask points in the BERV output as well
314
+ bjd = bjd[self.mask]
315
+ berv = berv[self.mask]
316
+ berv_pipeline = berv_pipeline[self.mask]
317
+
318
+ fig = None
319
+ if plot:
320
+ fig, axs = plt.subplots(2, 1, figsize=(8, 6), dpi=dpi, sharex=True,
321
+ constrained_layout=True)
322
+
323
+ axs[0].set_title(f'{self.star}', loc='right')
324
+ axs[0].plot(bjd, berv_pipeline*1e3, '.', label='pipeline', alpha=0.5)
325
+ axs[0].plot(bjd, berv, '.', label='barycorrpy', alpha=0.5)
326
+ axs[0].legend(bbox_to_anchor=(0.0, 1.15), loc=2, borderaxespad=0., ncol=2)
327
+ axs[0].set(xlabel='BJD - 2450000', ylabel='BERV [m/s]')
328
+
329
+
330
+ if plx is not None:
331
+ epoch = 55500
332
+ sa = self.secular_acceleration(just_compute=True)
333
+ print('sa:', sa)
334
+ sec_acc = sa.value * (bjd - epoch) / 365.25
335
+
336
+ axs[0].plot(bjd, sec_acc)
337
+
338
+ # fitp = np.polyfit(bjd - epoch, diff, 1)
339
+ # axs[1].plot(bjd, np.polyval(fitp, bjd - epoch))
340
+ # axs[1].plot(bjd, np.mean(diff) + diff - np.polyval(fitp, bjd - epoch), '.')
341
+
342
+ if plx is None:
343
+ diff = berv - berv_pipeline*1e3
344
+ label=r'BERV$_{\rm barycorrpy}$ - BERV$_{\rm pipeline}$'
345
+ else:
346
+ diff = berv + sec_acc - berv_pipeline*1e3
347
+ label=r'BERV$_{\rm barycorrpy}$ (+SA) - BERV$_{\rm pipeline}$'
348
+
349
+ axs[1].plot(bjd, diff, 'k.', label=label)
350
+ axs[1].axhline(np.mean(diff), ls='--', c='k', alpha=0.1)
351
+
352
+ from adjustText import adjust_text
353
+ text = axs[1].text(bjd.max(), diff.min() + 0.1*diff.ptp(),
354
+ f'ptp: {diff.ptp()*1e2:.2f} cm/s',
355
+ ha='right', va='bottom', color='g', alpha=0.8)
356
+ axs[1].plot([bjd[np.argmax(diff)], bjd.max() + 0.05 * bjd.ptp()],
357
+ [np.max(diff), np.max(diff)], 'g--', alpha=0.3)
358
+ axs[1].plot([bjd[np.argmin(diff)], bjd.max() + 0.05 * bjd.ptp()],
359
+ [np.min(diff), np.min(diff)], 'g--', alpha=0.3)
360
+
361
+ ax = axs[1].twinx()
362
+ diff_cms = 1e2*(diff - np.mean(diff))
363
+ ax.plot(bjd, diff_cms, alpha=0)
364
+ ma = np.max(np.abs(ax.get_ylim()))
365
+ ax.set_ylim(-1 - 5*round(ma/5), 1 + 5*round(ma/5))
366
+ ax.set(ylabel='diff - mean(diff) [cm/s]')
367
+ axs[1].set_ylim(np.mean(diff)-ma/100, np.mean(diff)+ma/100)
368
+
369
+ axs[1].legend(bbox_to_anchor=(0.0, 1.15), loc=2, borderaxespad=0.)
370
+ axs[1].set(xlabel='BJD - 2450000', ylabel='diff [m/s]')
371
+
372
+ # adjust_text([text], va='bottom')
373
+
374
+ return fig, {
375
+ self.star: {
376
+ 'bjd': bjd,
377
+ 'berv_pipeline': berv_pipeline*1e3,
378
+ 'berv_barycorrpy': berv
379
+ }
380
+ }
381
+
382
+
383
+ def plot_BERV_correction(self, H, A, V, berv2=None, berv6=None,
384
+ inset=False, inset_range=(3, 5)):
385
+ fig, axs = plt.subplot_mosaic('ab\ncc\ndd\nee', constrained_layout=True, figsize=(2*3.57, 10))
386
+
387
+ if berv2 is None:
388
+ _, berv2 = BERV(self, H, plot=False)
389
+ if berv6 is None:
390
+ _, berv6 = BERV(self, H, A=A, V=V, plot=False)
391
+
392
+ self.plot(ax=axs['a'], ms=2)
393
+ axs['a'].set_title('original', loc='right')
394
+ self.gls(ax=axs['e'], label='original', color='r', alpha=0.5,
395
+ fill_between=True, samples_per_peak=20)
396
+
397
+ temp_vrad = self.vrad.copy()
398
+ self.vrad[self.mask] = self.vrad[self.mask] - berv2[self.star]['berv_pipeline'].value + berv6[self.star]['berv_pipeline'].value
399
+
400
+ self.plot(ax=axs['b'], ms=2)
401
+ axs['b'].set_title('after correction', loc='right')
402
+
403
+ diff = temp_vrad[self.mask] - self.vrad[self.mask]
404
+
405
+ axs['c'].plot(self.mtime, diff, 'k.')
406
+ axs['c'].set_title('RV difference', loc='right')
407
+ axs['c'].set(xlabel='BJD - 2450000', ylabel='RV diff [m/s]')
408
+
409
+ text = axs['c'].text(self.mtime.max(), diff.min() + 0.1*diff.ptp(),
410
+ f'ptp: {diff.ptp()*1e2:.2f} cm/s',
411
+ ha='right', va='bottom', color='g', alpha=0.8)
412
+ axs['c'].plot([self.mtime[np.argmax(diff)], self.mtime.max() + 0.05 * self.mtime.ptp()],
413
+ [np.max(diff), np.max(diff)], 'g--', alpha=0.3)
414
+ axs['c'].plot([self.mtime[np.argmin(diff)], self.mtime.max() + 0.05 * self.mtime.ptp()],
415
+ [np.min(diff), np.min(diff)], 'g--', alpha=0.3)
416
+
417
+
418
+ f, p = LombScargle(self.mtime, diff).autopower(maximum_frequency=1.0, samples_per_peak=10)
419
+ axs['d'].semilogx(1/f, p, color='k', alpha=0.8)
420
+ axs['d'].vlines([365.25, 365.25/2], 0, 1, color='k', ls='--', alpha=0.3)
421
+ axs['d'].set(xlabel='Period [days]', ylabel='normalized power', ylim=(0, 1))
422
+ axs['d'].set_title('GLS of RV difference', loc='right')
423
+
424
+ if inset:
425
+ inset = axs['d'].inset_axes(bounds=[0.15, 0.3, 0.3, 0.6])
426
+ m = (1/f > inset_range[0]) & (1/f < inset_range[1])
427
+ inset.plot(1/f[m], p[m], color='k', alpha=0.8)
428
+ inset.set(xlim=inset_range, yticks=[])
429
+ inset.minorticks_on()
430
+
431
+ self.gls(ax=axs['e'], label='after correction', color='g', alpha=1,
432
+ lw=0.8, samples_per_peak=20)
433
+ axs['e'].set(xlabel='Period [days]', ylabel='normalized power')
434
+ axs['e'].sharex(axs['d'])
435
+
436
+ self.vrad = temp_vrad
437
+ return fig
@@ -1,6 +1,9 @@
1
1
  import numpy as np
2
2
  from numpy.testing import suppress_warnings
3
3
 
4
+ from scipy.stats import binned_statistic as old_binned_statistic,\
5
+ binned_statistic_dd as old_binned_statistic_dd
6
+
4
7
  from .setup_logger import logger
5
8
 
6
9
  ###############################################################################
@@ -212,8 +215,6 @@ def binned_statistic_dd(sample, values, statistic='mean', bins=10, range=None,
212
215
 
213
216
 
214
217
  # put back the documentation
215
- from scipy.stats import binned_statistic as old_binned_statistic,\
216
- binned_statistic_dd as old_binned_statistic_dd
217
218
  doc1 = old_binned_statistic.__doc__
218
219
  doc2 = old_binned_statistic_dd.__doc__
219
220
  binned_statistic.__doc__ = doc1
@@ -5,4 +5,10 @@ return_self = False
5
5
  check_internet = False
6
6
 
7
7
  # make all DACE requests without using a .dacerc file
8
- request_as_public = False
8
+ request_as_public = False
9
+
10
+ # whether to adjust instrument means before gls by default
11
+ adjust_means_gls = True
12
+
13
+ # debug
14
+ debug = False