arvi 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arvi/__init__.py +21 -6
- arvi/berv.py +437 -0
- arvi/binning.py +14 -9
- arvi/dace_wrapper.py +2 -2
- arvi/gaia_wrapper.py +1 -1
- arvi/headers.py +47 -0
- arvi/plots.py +158 -90
- arvi/programs.py +4 -2
- arvi/reports.py +4 -3
- arvi/simbad_wrapper.py +3 -2
- arvi/stats.py +2 -3
- arvi/stellar.py +89 -0
- arvi/timeseries.py +104 -37
- arvi/translations.py +2 -0
- arvi/utils.py +13 -0
- {arvi-0.1.13.dist-info → arvi-0.1.15.dist-info}/METADATA +2 -4
- arvi-0.1.15.dist-info/RECORD +35 -0
- {arvi-0.1.13.dist-info → arvi-0.1.15.dist-info}/WHEEL +1 -1
- arvi-0.1.13.dist-info/RECORD +0 -32
- {arvi-0.1.13.dist-info → arvi-0.1.15.dist-info}/LICENSE +0 -0
- {arvi-0.1.13.dist-info → arvi-0.1.15.dist-info}/top_level.txt +0 -0
arvi/__init__.py
CHANGED
|
@@ -2,18 +2,33 @@ __all__ = ['RV']
|
|
|
2
2
|
|
|
3
3
|
from .timeseries import RV
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
## OLD
|
|
6
|
+
# # the __getattr__ function is always called twice, so we need this
|
|
7
|
+
# # to only build and return the RV object on the second time
|
|
8
|
+
# _ran_once = False
|
|
6
9
|
|
|
7
10
|
def __getattr__(name: str):
|
|
8
11
|
if name in (
|
|
9
12
|
'_ipython_canary_method_should_not_exist_',
|
|
13
|
+
'_ipython_display_',
|
|
10
14
|
'_repr_mimebundle_',
|
|
11
15
|
'__wrapped__'
|
|
12
16
|
):
|
|
13
17
|
return
|
|
14
18
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
return
|
|
18
|
-
|
|
19
|
-
|
|
19
|
+
try:
|
|
20
|
+
globals()[name] = RV(name)
|
|
21
|
+
return globals()[name]
|
|
22
|
+
except ValueError as e:
|
|
23
|
+
raise ImportError(e) from None
|
|
24
|
+
# raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
|
25
|
+
|
|
26
|
+
## OLD
|
|
27
|
+
# # can't do it any other way :(
|
|
28
|
+
# global _ran_once
|
|
29
|
+
|
|
30
|
+
# if _ran_once:
|
|
31
|
+
# _ran_once = False
|
|
32
|
+
# return RV(name)
|
|
33
|
+
# else:
|
|
34
|
+
# _ran_once = True
|
arvi/berv.py
ADDED
|
@@ -0,0 +1,437 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import numpy as np
|
|
3
|
+
import matplotlib.pyplot as plt
|
|
4
|
+
|
|
5
|
+
from arvi.headers import get_headers
|
|
6
|
+
from barycorrpy import get_BC_vel
|
|
7
|
+
from astropy.coordinates import SkyCoord
|
|
8
|
+
from astropy.time import Time
|
|
9
|
+
from astropy import units as u
|
|
10
|
+
from astropy import constants as const
|
|
11
|
+
from astropy.timeseries import LombScargle
|
|
12
|
+
from tqdm import tqdm
|
|
13
|
+
|
|
14
|
+
from .setup_logger import logger
|
|
15
|
+
from . import config
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def correct_rvs(self, simple=False, H=None, save_files=False, plot=True):
|
|
19
|
+
"""
|
|
20
|
+
"""
|
|
21
|
+
import pickle
|
|
22
|
+
|
|
23
|
+
if hasattr(self, '_did_correct_berv') and self._did_correct_berv:
|
|
24
|
+
logger.info('Already corrected for the BERV! Not doing anything.')
|
|
25
|
+
return
|
|
26
|
+
|
|
27
|
+
path = os.path.dirname(__file__)
|
|
28
|
+
path = os.path.join(path, 'data')
|
|
29
|
+
pkl = os.path.join(path, 'berv_espresso_sine.pkl')
|
|
30
|
+
berv_espresso = pickle.load(open(pkl, 'rb'))
|
|
31
|
+
|
|
32
|
+
if simple:
|
|
33
|
+
logger.info('Correcting RVs with a previously-fitted sinusoid function')
|
|
34
|
+
_f = berv_espresso['func'].replace('lambda t: ', '')
|
|
35
|
+
logger.info(f': {_f}')
|
|
36
|
+
f = eval(berv_espresso['func'])
|
|
37
|
+
if plot:
|
|
38
|
+
_, ax = self.plot()
|
|
39
|
+
ax.plot(self._tt, f(self._tt) + self.vrad.mean(), 'k')
|
|
40
|
+
_, axgls = self.gls(label='before')
|
|
41
|
+
|
|
42
|
+
self.vrad = self.vrad + f(self.time)
|
|
43
|
+
|
|
44
|
+
if plot:
|
|
45
|
+
self.gls(ax=axgls, label='after')
|
|
46
|
+
|
|
47
|
+
return f(self.time)
|
|
48
|
+
|
|
49
|
+
else:
|
|
50
|
+
logger.info('Correcting RVs with actual difference between BERVs')
|
|
51
|
+
logger.info('(basically, use BERV_barycorrpy for BERV correction)')
|
|
52
|
+
|
|
53
|
+
old_vrad = self.vrad.copy()
|
|
54
|
+
|
|
55
|
+
_, berv = BERV(self, H=H, use_gaia_meassurements=True, plx=self.gaia.plx,
|
|
56
|
+
plot=False, ignore_mask=True)
|
|
57
|
+
|
|
58
|
+
if plot:
|
|
59
|
+
fig, axs = plt.subplots(2, 1, constrained_layout=True, height_ratios=(3, 1), sharex=True)
|
|
60
|
+
_, ax = self.plot(ax=axs[0])
|
|
61
|
+
_, axgls = self.gls(label='before')
|
|
62
|
+
|
|
63
|
+
# undo secular acceleration, if it was done
|
|
64
|
+
_did_secular_acceleration = self._did_secular_acceleration
|
|
65
|
+
self._undo_secular_acceleration()
|
|
66
|
+
|
|
67
|
+
# transform RVs: RV --> RV - BERVpipe + BERVbarycorrpy
|
|
68
|
+
|
|
69
|
+
diff = berv[self.star]['berv_barycorrpy'] - berv[self.star]['berv_pipeline']
|
|
70
|
+
|
|
71
|
+
if save_files:
|
|
72
|
+
i_inst = np.hstack([np.arange(n) for n in self.NN.values()])
|
|
73
|
+
with open(f'{self.star}_berv_correction.rdb', 'w') as rdb:
|
|
74
|
+
rdb.write('# time\n')
|
|
75
|
+
rdb.write('# vrad\n')
|
|
76
|
+
rdb.write('# svrad\n')
|
|
77
|
+
rdb.write('# berv - BERV value from header\n')
|
|
78
|
+
rdb.write('# berv_pipe - BERV from header corrected for 1.55e-8 factor\n')
|
|
79
|
+
rdb.write('# berv_barycorrpy - BERV value from barycorrpy\n')
|
|
80
|
+
rdb.write('# diff - difference between berv_barycorrpy and berv_pipe\n')
|
|
81
|
+
rdb.write('# vrad_berv_corrected = vrad + diff\n')
|
|
82
|
+
rdb.write('# instrument\n')
|
|
83
|
+
rdb.write('# i - index\n')
|
|
84
|
+
rdb.write('# i_inst - index within the instrument\n')
|
|
85
|
+
rdb.write('#\n')
|
|
86
|
+
rdb.write('# --> TO CORRECT vrad, we ** add the diff column **\n')
|
|
87
|
+
rdb.write('# --> the result of this operation is in column vrad_berv_corrected\n')
|
|
88
|
+
rdb.write('# --> vrad_berv_corrected is already corrected for the secular acceleration, vrad is not\n')
|
|
89
|
+
rdb.write('#\n')
|
|
90
|
+
#
|
|
91
|
+
cols = [
|
|
92
|
+
'time', 'vrad', 'svrad',
|
|
93
|
+
'berv', 'berv_pipe', 'berv_barycorrpy', 'diff', 'vrad_berv_corrected',
|
|
94
|
+
'instrument', 'i', 'i_inst'
|
|
95
|
+
]
|
|
96
|
+
rdb.write('# ' + '\t'.join(cols) + '\n')
|
|
97
|
+
for i, t in enumerate(self.time):
|
|
98
|
+
rdb.write(f'{t:11.5f}\t')
|
|
99
|
+
# if _did_secular_acceleration:
|
|
100
|
+
# rdb.write(f'{old_vrad[i]:13.5f}\t')
|
|
101
|
+
# else:
|
|
102
|
+
rdb.write(f'{self.vrad[i]:13.7f}\t')
|
|
103
|
+
rdb.write(f'{self.svrad[i]:13.7f}\t')
|
|
104
|
+
rdb.write(f'{self.berv[i]:15.7f}\t')
|
|
105
|
+
rdb.write(f'{berv[self.star]["berv_pipeline"][i]/1e3:15.7f}\t')
|
|
106
|
+
rdb.write(f'{berv[self.star]["berv_barycorrpy"][i]/1e3:15.7f}\t')
|
|
107
|
+
rdb.write(f'{diff[i]:15.7f}\t')
|
|
108
|
+
rdb.write(f'{self.vrad[i] + diff[i]:13.7f}\t')
|
|
109
|
+
rdb.write(f'{self.instrument_array[i]}\t')
|
|
110
|
+
rdb.write(f'{i}\t')
|
|
111
|
+
rdb.write(f'{i_inst[i]}\t')
|
|
112
|
+
rdb.write('\n')
|
|
113
|
+
|
|
114
|
+
self.add_to_vrad(diff)
|
|
115
|
+
self._did_correct_berv = True
|
|
116
|
+
self._did_secular_acceleration = True # "automatically", by using BERV_barycorrpy
|
|
117
|
+
self._did_secular_acceleration_simbad = False
|
|
118
|
+
self._did_secular_acceleration_epoch = Time('J2016').jd - 24e5
|
|
119
|
+
|
|
120
|
+
# the secular acceleration hadn't been done, but it was introduced by
|
|
121
|
+
# BERV_barycorrpy, so we need to undo it
|
|
122
|
+
if not _did_secular_acceleration:
|
|
123
|
+
self._undo_secular_acceleration()
|
|
124
|
+
|
|
125
|
+
if plot:
|
|
126
|
+
self.plot(ax=axs[0], marker='+', ms=5)
|
|
127
|
+
axs[1].plot(self.time, old_vrad - self.vrad, '.k', label='old RV - new RV')
|
|
128
|
+
ma = np.abs(axs[1].get_ylim()).max()
|
|
129
|
+
axs[1].set(ylim=(-ma, ma), xlabel=axs[0].get_xlabel(), ylabel='RV difference [m/s]')
|
|
130
|
+
self.gls(ax=axgls, label='after')
|
|
131
|
+
|
|
132
|
+
return diff
|
|
133
|
+
|
|
134
|
+
def get_A_and_V_from_lesta(self, username=config.username):
|
|
135
|
+
try:
|
|
136
|
+
import paramiko
|
|
137
|
+
except ImportError:
|
|
138
|
+
raise ImportError("paramiko is not installed. Please install it with 'pip install paramiko'")
|
|
139
|
+
|
|
140
|
+
logs = []
|
|
141
|
+
for f in self.raw_file:
|
|
142
|
+
f = f.replace('espresso/', '/projects/astro/ESPRESSODRS/')
|
|
143
|
+
f = f.replace('nirps/', '/projects/astro/NIRPSDRS/')
|
|
144
|
+
f = f.replace('.fits', '_SCIENCE_FP.log')
|
|
145
|
+
f = f.replace('reduced', 'log')
|
|
146
|
+
f = f.replace('r.ESPRE', 'ESPRESSO')
|
|
147
|
+
logs.append(f)
|
|
148
|
+
|
|
149
|
+
A, V = [], []
|
|
150
|
+
|
|
151
|
+
try:
|
|
152
|
+
ssh = paramiko.SSHClient()
|
|
153
|
+
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
154
|
+
ssh.connect("lesta02.astro.unige.ch", username=username, timeout=5)
|
|
155
|
+
except Exception as e:
|
|
156
|
+
if 'getaddrinfo failed' in str(e):
|
|
157
|
+
jump = paramiko.SSHClient()
|
|
158
|
+
jump.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
159
|
+
jump.connect('login01.astro.unige.ch', username=username, timeout=5)
|
|
160
|
+
jump_transport = jump.get_transport()
|
|
161
|
+
jump_channel = jump_transport.open_channel('direct-tcpip', ('10.194.64.162', 22), ('129.194.64.20', 22))
|
|
162
|
+
ssh = paramiko.SSHClient()
|
|
163
|
+
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
164
|
+
ssh.connect('lesta02.astro.unige.ch', username=username, sock=jump_channel)
|
|
165
|
+
else:
|
|
166
|
+
raise e
|
|
167
|
+
|
|
168
|
+
with ssh.open_sftp() as sftp:
|
|
169
|
+
pbar = tqdm(logs, total=len(logs), unit='file', desc='Reading logs')
|
|
170
|
+
for f in pbar:
|
|
171
|
+
with sftp.open(f) as fp:
|
|
172
|
+
pattern1 = 'Sun-Earth'
|
|
173
|
+
pattern2 = "Barycentric Observer's Velocity"
|
|
174
|
+
for line in fp:
|
|
175
|
+
if pattern1 in line:
|
|
176
|
+
value = line.strip().split(':')[-1].replace('\x1b[32m', '').replace('\x1b[0m', '').replace(' ', '')
|
|
177
|
+
A.append(float(value))
|
|
178
|
+
if pattern2 in line:
|
|
179
|
+
value = line.strip().split(':')[-1].replace('\x1b[32m', '').replace('\x1b[0m', '').replace(' ', '')
|
|
180
|
+
V.append(float(value))
|
|
181
|
+
|
|
182
|
+
ssh.close()
|
|
183
|
+
|
|
184
|
+
return np.array(A), np.array(V)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def BERV(self, H=None, use_gaia_meassurements=False, plx=None,
|
|
188
|
+
A=None, V=None, plot=True, ignore_mask=False, verbose=False, dpi=None):
|
|
189
|
+
""" Calculate Barycentric Radial Velocity with barycorr and compare with pipeline
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
H (list, optional):
|
|
193
|
+
List of (CCF/S1D/etc) headers for the target. If None, try to
|
|
194
|
+
download the CCF files to get the headers.
|
|
195
|
+
use_gaia_meassurements (bool, optional):
|
|
196
|
+
Use Gaia coordinates and proper motions instead of those in the headers.
|
|
197
|
+
plx (float, optional):
|
|
198
|
+
Value of stellar parallax [mas] to use in barycorr.
|
|
199
|
+
A (array, optional):
|
|
200
|
+
Earth-Sun distance [AU] for each BJD (found in the pipeline logs).
|
|
201
|
+
V (array, optional):
|
|
202
|
+
Earth's orbital velocity [km/s] for each BJD (found in the pipeline logs).
|
|
203
|
+
plot (bool, optional):
|
|
204
|
+
Plot the results.
|
|
205
|
+
"""
|
|
206
|
+
if H is None:
|
|
207
|
+
H = get_headers(self, check_lesta=False, check_exo2=False, instrument='ESPRE')
|
|
208
|
+
|
|
209
|
+
if len(H) != self.N:
|
|
210
|
+
raise ValueError(f'Expected {self.N} headers (in `H`), got {len(H)}')
|
|
211
|
+
|
|
212
|
+
if 'HARPS' in H[0]['INSTRUME'] or 'NIRPS' in H[0]['INSTRUME']:
|
|
213
|
+
obsname = 'lasilla'
|
|
214
|
+
elif 'ESPRESSO' in H[0]['INSTRUME']:
|
|
215
|
+
obsname = 'paranal'
|
|
216
|
+
else:
|
|
217
|
+
raise ValueError('unknown instrument')
|
|
218
|
+
|
|
219
|
+
bjd = np.array([h['HIERARCH ESO QC BJD'] for h in H])
|
|
220
|
+
bjd -= 24e5
|
|
221
|
+
berv_pipeline = np.array([h['HIERARCH ESO QC BERV'] for h in H])
|
|
222
|
+
|
|
223
|
+
# in the pipeline, the BERV is used to shift wavelenghts with this formula
|
|
224
|
+
# berv_factor = (1 + 1.55e-8) * (1 + BERV/c)
|
|
225
|
+
# The 1.55e-8 factor is an average of some relativistic effects, which are
|
|
226
|
+
# probably already included in the BERV calculated from barycorrpy.
|
|
227
|
+
# Therefore, we compute an "effective" BERV from the pipeline doing
|
|
228
|
+
# (1 + 1.55e-8) * (1 + BERV/c) = 1 + effBERV/c
|
|
229
|
+
# => effBERV = ((1 + 1.55e-8) * (1 + BERV/c) - 1) * c
|
|
230
|
+
|
|
231
|
+
if A is None and V is None:
|
|
232
|
+
if verbose:
|
|
233
|
+
logger.info("Using mean value for Earth-Sun distance and Earth's orbital velocity")
|
|
234
|
+
|
|
235
|
+
if A is None:
|
|
236
|
+
Φobs = const.G * const.M_sun / const.au + const.G * const.M_earth / const.R_earth
|
|
237
|
+
else:
|
|
238
|
+
A = np.atleast_1d(A) * u.km
|
|
239
|
+
Φobs = const.G * const.M_sun / A + const.G * const.M_earth / const.R_earth
|
|
240
|
+
|
|
241
|
+
if V is None:
|
|
242
|
+
V = 29785 *u.m / u.second
|
|
243
|
+
else:
|
|
244
|
+
V = np.atleast_1d(V) * u.km / u.second
|
|
245
|
+
|
|
246
|
+
f = 1 / (1 - Φobs / const.c**2 - V**2 / (2*const.c**2))
|
|
247
|
+
c = const.c.to(u.km / u.second).value
|
|
248
|
+
berv_pipeline = (f * (1 + berv_pipeline/c) - 1) * c
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
tmmean = np.array([h['HIERARCH ESO QC TMMEAN USED'] for h in H])
|
|
252
|
+
mjdobs = np.array([h['MJD-OBS'] for h in H])
|
|
253
|
+
texp = np.array([h['EXPTIME'] for h in H])
|
|
254
|
+
jd = mjdobs + 24e5 + 0.5 + (texp * tmmean)/60/60/24
|
|
255
|
+
|
|
256
|
+
if verbose:
|
|
257
|
+
logger.info(f"Unique exposure times: {np.unique(texp)}")
|
|
258
|
+
|
|
259
|
+
berv = []
|
|
260
|
+
if verbose:
|
|
261
|
+
pbar = enumerate(jd)
|
|
262
|
+
else:
|
|
263
|
+
pbar = tqdm(enumerate(jd), total=len(jd),
|
|
264
|
+
unit='observation', desc='Computing BERV')
|
|
265
|
+
|
|
266
|
+
for i, _jd in pbar:
|
|
267
|
+
if use_gaia_meassurements:
|
|
268
|
+
if not hasattr(self, 'gaia'):
|
|
269
|
+
raise ValueError('No Gaia data available')
|
|
270
|
+
|
|
271
|
+
target = self.gaia.coords
|
|
272
|
+
pmra = self.gaia.pmra
|
|
273
|
+
pmdec = self.gaia.pmdec
|
|
274
|
+
epoch = Time('J2016').jd
|
|
275
|
+
else:
|
|
276
|
+
ra = H[i]['* TARG ALPHA'][0]
|
|
277
|
+
ra = f'{ra:09.2f}'
|
|
278
|
+
ra = ra[:2] + 'h' + ra[2:4] + 'm' + ra[4:] + 's'
|
|
279
|
+
|
|
280
|
+
dec = H[i]['* TARG DELTA'][0]
|
|
281
|
+
if dec < 0:
|
|
282
|
+
dec = f'{dec:010.2f}'
|
|
283
|
+
else:
|
|
284
|
+
dec = f'{dec:09.2f}'
|
|
285
|
+
if dec.startswith('-'):
|
|
286
|
+
dec = dec[:3] + 'd' + dec[3:5] + 'm' + dec[5:] + 's'
|
|
287
|
+
else:
|
|
288
|
+
dec = dec[:2] + 'd' + dec[2:4] + 'm' + dec[4:] + 's'
|
|
289
|
+
|
|
290
|
+
target = SkyCoord(ra, dec)
|
|
291
|
+
pmra = H[i]['* TARG PMA'][0] * 1e3
|
|
292
|
+
pmdec = H[i]['* TARG PMD'][0] * 1e3
|
|
293
|
+
epoch = Time('J2000').jd
|
|
294
|
+
|
|
295
|
+
if verbose:
|
|
296
|
+
logger.info(f'jd: {_jd}')
|
|
297
|
+
logger.info(f'\t ra: {target.ra}')
|
|
298
|
+
logger.info(f'\t dec: {target.dec}')
|
|
299
|
+
logger.info(f'\t pmra: {pmra}')
|
|
300
|
+
logger.info(f'\t pmdec: {pmdec}')
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
px = plx or 0.0
|
|
304
|
+
out = get_BC_vel(_jd, obsname=obsname, rv=0.0, px=px, zmeas=0.0, epoch=epoch,
|
|
305
|
+
ra=target.ra.value, dec=target.dec.value, pmra=pmra, pmdec=pmdec)
|
|
306
|
+
# print(out[1][3])
|
|
307
|
+
berv.append(out[0])
|
|
308
|
+
|
|
309
|
+
berv = np.array(berv).flatten()
|
|
310
|
+
|
|
311
|
+
if ignore_mask: # ignore the system's masked points
|
|
312
|
+
pass
|
|
313
|
+
else: # mask points in the BERV output as well
|
|
314
|
+
bjd = bjd[self.mask]
|
|
315
|
+
berv = berv[self.mask]
|
|
316
|
+
berv_pipeline = berv_pipeline[self.mask]
|
|
317
|
+
|
|
318
|
+
fig = None
|
|
319
|
+
if plot:
|
|
320
|
+
fig, axs = plt.subplots(2, 1, figsize=(8, 6), dpi=dpi, sharex=True,
|
|
321
|
+
constrained_layout=True)
|
|
322
|
+
|
|
323
|
+
axs[0].set_title(f'{self.star}', loc='right')
|
|
324
|
+
axs[0].plot(bjd, berv_pipeline*1e3, '.', label='pipeline', alpha=0.5)
|
|
325
|
+
axs[0].plot(bjd, berv, '.', label='barycorrpy', alpha=0.5)
|
|
326
|
+
axs[0].legend(bbox_to_anchor=(0.0, 1.15), loc=2, borderaxespad=0., ncol=2)
|
|
327
|
+
axs[0].set(xlabel='BJD - 2450000', ylabel='BERV [m/s]')
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
if plx is not None:
|
|
331
|
+
epoch = 55500
|
|
332
|
+
sa = self.secular_acceleration(just_compute=True)
|
|
333
|
+
print('sa:', sa)
|
|
334
|
+
sec_acc = sa.value * (bjd - epoch) / 365.25
|
|
335
|
+
|
|
336
|
+
axs[0].plot(bjd, sec_acc)
|
|
337
|
+
|
|
338
|
+
# fitp = np.polyfit(bjd - epoch, diff, 1)
|
|
339
|
+
# axs[1].plot(bjd, np.polyval(fitp, bjd - epoch))
|
|
340
|
+
# axs[1].plot(bjd, np.mean(diff) + diff - np.polyval(fitp, bjd - epoch), '.')
|
|
341
|
+
|
|
342
|
+
if plx is None:
|
|
343
|
+
diff = berv - berv_pipeline*1e3
|
|
344
|
+
label=r'BERV$_{\rm barycorrpy}$ - BERV$_{\rm pipeline}$'
|
|
345
|
+
else:
|
|
346
|
+
diff = berv + sec_acc - berv_pipeline*1e3
|
|
347
|
+
label=r'BERV$_{\rm barycorrpy}$ (+SA) - BERV$_{\rm pipeline}$'
|
|
348
|
+
|
|
349
|
+
axs[1].plot(bjd, diff, 'k.', label=label)
|
|
350
|
+
axs[1].axhline(np.mean(diff), ls='--', c='k', alpha=0.1)
|
|
351
|
+
|
|
352
|
+
from adjustText import adjust_text
|
|
353
|
+
text = axs[1].text(bjd.max(), diff.min() + 0.1*diff.ptp(),
|
|
354
|
+
f'ptp: {diff.ptp()*1e2:.2f} cm/s',
|
|
355
|
+
ha='right', va='bottom', color='g', alpha=0.8)
|
|
356
|
+
axs[1].plot([bjd[np.argmax(diff)], bjd.max() + 0.05 * bjd.ptp()],
|
|
357
|
+
[np.max(diff), np.max(diff)], 'g--', alpha=0.3)
|
|
358
|
+
axs[1].plot([bjd[np.argmin(diff)], bjd.max() + 0.05 * bjd.ptp()],
|
|
359
|
+
[np.min(diff), np.min(diff)], 'g--', alpha=0.3)
|
|
360
|
+
|
|
361
|
+
ax = axs[1].twinx()
|
|
362
|
+
diff_cms = 1e2*(diff - np.mean(diff))
|
|
363
|
+
ax.plot(bjd, diff_cms, alpha=0)
|
|
364
|
+
ma = np.max(np.abs(ax.get_ylim()))
|
|
365
|
+
ax.set_ylim(-1 - 5*round(ma/5), 1 + 5*round(ma/5))
|
|
366
|
+
ax.set(ylabel='diff - mean(diff) [cm/s]')
|
|
367
|
+
axs[1].set_ylim(np.mean(diff)-ma/100, np.mean(diff)+ma/100)
|
|
368
|
+
|
|
369
|
+
axs[1].legend(bbox_to_anchor=(0.0, 1.15), loc=2, borderaxespad=0.)
|
|
370
|
+
axs[1].set(xlabel='BJD - 2450000', ylabel='diff [m/s]')
|
|
371
|
+
|
|
372
|
+
# adjust_text([text], va='bottom')
|
|
373
|
+
|
|
374
|
+
return fig, {
|
|
375
|
+
self.star: {
|
|
376
|
+
'bjd': bjd,
|
|
377
|
+
'berv_pipeline': berv_pipeline*1e3,
|
|
378
|
+
'berv_barycorrpy': berv
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
def plot_BERV_correction(self, H, A, V, berv2=None, berv6=None,
|
|
384
|
+
inset=False, inset_range=(3, 5)):
|
|
385
|
+
fig, axs = plt.subplot_mosaic('ab\ncc\ndd\nee', constrained_layout=True, figsize=(2*3.57, 10))
|
|
386
|
+
|
|
387
|
+
if berv2 is None:
|
|
388
|
+
_, berv2 = BERV(self, H, plot=False)
|
|
389
|
+
if berv6 is None:
|
|
390
|
+
_, berv6 = BERV(self, H, A=A, V=V, plot=False)
|
|
391
|
+
|
|
392
|
+
self.plot(ax=axs['a'], ms=2)
|
|
393
|
+
axs['a'].set_title('original', loc='right')
|
|
394
|
+
self.gls(ax=axs['e'], label='original', color='r', alpha=0.5,
|
|
395
|
+
fill_between=True, samples_per_peak=20)
|
|
396
|
+
|
|
397
|
+
temp_vrad = self.vrad.copy()
|
|
398
|
+
self.vrad[self.mask] = self.vrad[self.mask] - berv2[self.star]['berv_pipeline'].value + berv6[self.star]['berv_pipeline'].value
|
|
399
|
+
|
|
400
|
+
self.plot(ax=axs['b'], ms=2)
|
|
401
|
+
axs['b'].set_title('after correction', loc='right')
|
|
402
|
+
|
|
403
|
+
diff = temp_vrad[self.mask] - self.vrad[self.mask]
|
|
404
|
+
|
|
405
|
+
axs['c'].plot(self.mtime, diff, 'k.')
|
|
406
|
+
axs['c'].set_title('RV difference', loc='right')
|
|
407
|
+
axs['c'].set(xlabel='BJD - 2450000', ylabel='RV diff [m/s]')
|
|
408
|
+
|
|
409
|
+
text = axs['c'].text(self.mtime.max(), diff.min() + 0.1*diff.ptp(),
|
|
410
|
+
f'ptp: {diff.ptp()*1e2:.2f} cm/s',
|
|
411
|
+
ha='right', va='bottom', color='g', alpha=0.8)
|
|
412
|
+
axs['c'].plot([self.mtime[np.argmax(diff)], self.mtime.max() + 0.05 * self.mtime.ptp()],
|
|
413
|
+
[np.max(diff), np.max(diff)], 'g--', alpha=0.3)
|
|
414
|
+
axs['c'].plot([self.mtime[np.argmin(diff)], self.mtime.max() + 0.05 * self.mtime.ptp()],
|
|
415
|
+
[np.min(diff), np.min(diff)], 'g--', alpha=0.3)
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
f, p = LombScargle(self.mtime, diff).autopower(maximum_frequency=1.0, samples_per_peak=10)
|
|
419
|
+
axs['d'].semilogx(1/f, p, color='k', alpha=0.8)
|
|
420
|
+
axs['d'].vlines([365.25, 365.25/2], 0, 1, color='k', ls='--', alpha=0.3)
|
|
421
|
+
axs['d'].set(xlabel='Period [days]', ylabel='normalized power', ylim=(0, 1))
|
|
422
|
+
axs['d'].set_title('GLS of RV difference', loc='right')
|
|
423
|
+
|
|
424
|
+
if inset:
|
|
425
|
+
inset = axs['d'].inset_axes(bounds=[0.15, 0.3, 0.3, 0.6])
|
|
426
|
+
m = (1/f > inset_range[0]) & (1/f < inset_range[1])
|
|
427
|
+
inset.plot(1/f[m], p[m], color='k', alpha=0.8)
|
|
428
|
+
inset.set(xlim=inset_range, yticks=[])
|
|
429
|
+
inset.minorticks_on()
|
|
430
|
+
|
|
431
|
+
self.gls(ax=axs['e'], label='after correction', color='g', alpha=1,
|
|
432
|
+
lw=0.8, samples_per_peak=20)
|
|
433
|
+
axs['e'].set(xlabel='Period [days]', ylabel='normalized power')
|
|
434
|
+
axs['e'].sharex(axs['d'])
|
|
435
|
+
|
|
436
|
+
self.vrad = temp_vrad
|
|
437
|
+
return fig
|
arvi/binning.py
CHANGED
|
@@ -1,8 +1,4 @@
|
|
|
1
1
|
import numpy as np
|
|
2
|
-
from numpy.testing import suppress_warnings
|
|
3
|
-
|
|
4
|
-
from scipy.stats import binned_statistic as old_binned_statistic,\
|
|
5
|
-
binned_statistic_dd as old_binned_statistic_dd
|
|
6
2
|
|
|
7
3
|
from .setup_logger import logger
|
|
8
4
|
|
|
@@ -38,6 +34,7 @@ def binned_statistic(x, values, statistic='mean', bins=10, range=None,
|
|
|
38
34
|
|
|
39
35
|
def binned_statistic_dd(sample, values, statistic='mean', bins=10, range=None,
|
|
40
36
|
expand_binnumbers=False, weights=None):
|
|
37
|
+
from numpy.testing import suppress_warnings
|
|
41
38
|
known_stats = [
|
|
42
39
|
'mean', 'median', 'count', 'sum', 'std', 'min', 'max', 'ptp'
|
|
43
40
|
]
|
|
@@ -214,11 +211,13 @@ def binned_statistic_dd(sample, values, statistic='mean', bins=10, range=None,
|
|
|
214
211
|
return result, edges, binnumbers
|
|
215
212
|
|
|
216
213
|
|
|
217
|
-
# put back the documentation
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
214
|
+
# # put back the documentation
|
|
215
|
+
# from scipy.stats import binned_statistic as old_binned_statistic,\
|
|
216
|
+
# binned_statistic_dd as old_binned_statistic_dd
|
|
217
|
+
# doc1 = old_binned_statistic.__doc__
|
|
218
|
+
# doc2 = old_binned_statistic_dd.__doc__
|
|
219
|
+
# binned_statistic.__doc__ = doc1
|
|
220
|
+
# binned_statistic_dd.__doc__ = doc2
|
|
222
221
|
|
|
223
222
|
###############################################################################
|
|
224
223
|
|
|
@@ -339,6 +338,9 @@ def binRV(time, rv, err=None, stat='wmean', tstat='wmean', estat='addquad',
|
|
|
339
338
|
if (err is not None) and (stat == 'wmean'):
|
|
340
339
|
stat = wmean # default is weighted mean
|
|
341
340
|
|
|
341
|
+
if (err is None) and (stat == 'wmean'):
|
|
342
|
+
stat = 'mean'
|
|
343
|
+
|
|
342
344
|
brv = binned_statistic(time, rv, statistic=stat, bins=bins, range=None,
|
|
343
345
|
weights=err)
|
|
344
346
|
|
|
@@ -346,6 +348,9 @@ def binRV(time, rv, err=None, stat='wmean', tstat='wmean', estat='addquad',
|
|
|
346
348
|
if (err is not None) and (tstat == 'wmean'):
|
|
347
349
|
tstat = wmean # default is weighted mean
|
|
348
350
|
|
|
351
|
+
if (err is None) and (tstat == 'wmean'):
|
|
352
|
+
tstat = 'mean'
|
|
353
|
+
|
|
349
354
|
times = binned_statistic(time, time, statistic=tstat, bins=bins,
|
|
350
355
|
range=None, weights=err)
|
|
351
356
|
# if there are errors, bin them too
|
arvi/dace_wrapper.py
CHANGED
|
@@ -23,7 +23,7 @@ def load_spectroscopy() -> SpectroscopyClass:
|
|
|
23
23
|
|
|
24
24
|
@lru_cache()
|
|
25
25
|
def get_dace_id(star):
|
|
26
|
-
filters = {"obj_id_catname": {"
|
|
26
|
+
filters = {"obj_id_catname": {"equal": [star]}}
|
|
27
27
|
try:
|
|
28
28
|
with stdout_disabled(), all_logging_disabled():
|
|
29
29
|
r = load_spectroscopy().query_database(filters=filters, limit=1)
|
|
@@ -193,7 +193,7 @@ def get_observations(star, instrument=None, main_id=None, verbose=True):
|
|
|
193
193
|
raise ValueError(msg) from None
|
|
194
194
|
else:
|
|
195
195
|
try:
|
|
196
|
-
result = get_observations_from_instrument(star, instrument, main_id
|
|
196
|
+
result = get_observations_from_instrument(star, instrument, main_id)
|
|
197
197
|
except ValueError:
|
|
198
198
|
msg = f'no {instrument} observations for {star}'
|
|
199
199
|
raise ValueError(msg) from None
|
arvi/gaia_wrapper.py
CHANGED
|
@@ -26,7 +26,7 @@ def run_query(query):
|
|
|
26
26
|
url = 'https://gea.esac.esa.int/tap-server/tap/sync'
|
|
27
27
|
data = dict(query=query, request='doQuery', lang='ADQL', format='csv')
|
|
28
28
|
try:
|
|
29
|
-
response = requests.post(url, data=data, timeout=
|
|
29
|
+
response = requests.post(url, data=data, timeout=5)
|
|
30
30
|
except requests.ReadTimeout as err:
|
|
31
31
|
raise IndexError(err)
|
|
32
32
|
except requests.ConnectionError as err:
|
arvi/headers.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from tqdm import tqdm
|
|
2
|
+
from astropy.io import fits
|
|
3
|
+
import iCCF
|
|
4
|
+
|
|
5
|
+
from . import config
|
|
6
|
+
|
|
7
|
+
def get_headers(self, check_lesta=False, lesta_username=config.username,
|
|
8
|
+
check_exo2=False, instrument=None):
|
|
9
|
+
try:
|
|
10
|
+
import paramiko
|
|
11
|
+
except ImportError:
|
|
12
|
+
raise ImportError("paramiko is not installed. Please install it with 'pip install paramiko'")
|
|
13
|
+
|
|
14
|
+
H = []
|
|
15
|
+
|
|
16
|
+
if check_lesta:
|
|
17
|
+
with paramiko.SSHClient() as ssh:
|
|
18
|
+
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
19
|
+
ssh.connect("lesta02.astro.unige.ch", username=lesta_username)
|
|
20
|
+
sftp = ssh.open_sftp()
|
|
21
|
+
|
|
22
|
+
pbar = tqdm(self.raw_file, total=len(self.raw_file), unit='file', desc='Reading headers')
|
|
23
|
+
for f in pbar:
|
|
24
|
+
f = f.replace('espresso/', '/projects/astro/ESPRESSODRS/')
|
|
25
|
+
f = f.replace('nirps/', '/projects/astro/NIRPSDRS/')
|
|
26
|
+
f = f.replace('.fits', '_CCF_A.fits')#.replace(':', r'\:')
|
|
27
|
+
with sftp.open(f) as fp:
|
|
28
|
+
header = fits.getheader(fp)
|
|
29
|
+
H.append(header)
|
|
30
|
+
|
|
31
|
+
if len(H) == 0 and check_exo2:
|
|
32
|
+
raise NotImplementedError('getting headers from exo2 not yet implemented')
|
|
33
|
+
|
|
34
|
+
if len(H) == 0:
|
|
35
|
+
self.download_ccf()
|
|
36
|
+
if instrument is None:
|
|
37
|
+
I = iCCF.from_file(f'{self.star}_downloads/*CCF_A.fits')
|
|
38
|
+
else:
|
|
39
|
+
I = iCCF.from_file(f'{self.star}_downloads/r.{instrument}.*CCF_A.fits',
|
|
40
|
+
guess_instrument='HARPS' not in instrument)
|
|
41
|
+
H = [i.HDU[0].header for i in I]
|
|
42
|
+
|
|
43
|
+
# sort by BJD
|
|
44
|
+
H = sorted(H, key=lambda x: x['HIERARCH ESO QC BJD'])
|
|
45
|
+
|
|
46
|
+
return H
|
|
47
|
+
|