arvi 0.0.1.dev1__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
arvi/__init__.py CHANGED
@@ -1 +1,13 @@
1
- __version__ = '0.0.1dev1'
1
+ __version__ = '0.0.3'
2
+ __all__ = []
3
+
4
+ from .timeseries import RV
5
+
6
+ _ran_once = False
7
+
8
+ def __getattr__(name: str):
9
+ global _ran_once # can't do it any other way :(
10
+ if _ran_once:
11
+ return RV(name)
12
+ else:
13
+ _ran_once = True
arvi/dace_wrapper.py ADDED
@@ -0,0 +1,92 @@
1
+ import os
2
+ import tarfile
3
+ import numpy as np
4
+ from dace_query import DaceClass
5
+ from dace_query.spectroscopy import SpectroscopyClass, Spectroscopy as default_Spectroscopy
6
+ from .setup_logger import logger
7
+
8
+ def load_spectroscopy():
9
+ if 'DACERC' in os.environ:
10
+ dace = DaceClass(dace_rc_config_path=os.environ['DACERC'])
11
+ return SpectroscopyClass(dace_instance=dace)
12
+ # elif os.path.exists(os.path.expanduser('~/.dacerc')):
13
+ return default_Spectroscopy
14
+
15
+
16
+ def get_arrays(result, latest_pipeline=True):
17
+ arrays = []
18
+ instruments = list(result.keys())
19
+ for inst in instruments:
20
+ pipelines = list(result[inst].keys())
21
+ if latest_pipeline:
22
+ pipelines = [pipelines[-1]]
23
+ for pipe in pipelines:
24
+ modes = list(result[inst][pipe].keys())
25
+ for mode in modes:
26
+ if 'rjd' not in result[inst][pipe][mode]:
27
+ logger.error(f"No 'rjd' key for {inst} - {pipe}")
28
+ raise ValueError
29
+
30
+ arrays.append(
31
+ ((inst, pipe, mode), result[inst][pipe][mode])
32
+ )
33
+
34
+ return arrays
35
+
36
+
37
+ def get_observations(star, save_rdb=False, verbose=True):
38
+ Spectroscopy = load_spectroscopy()
39
+ result = Spectroscopy.get_timeseries(target=star,
40
+ sorted_by_instrument=True,
41
+ output_format='numpy')
42
+ instruments = list(result.keys())
43
+
44
+ # sort pipelines, being extra careful with HARPS pipeline names
45
+ # (i.e. ensure that 3.0.0 > 3.5)
46
+ class sorter:
47
+ def __call__(self, x):
48
+ return '0.3.5' if x == '3.5' else x
49
+
50
+ for inst in instruments:
51
+ result[inst] = dict(sorted(result[inst].items(),
52
+ key=sorter(), reverse=True))
53
+
54
+ if verbose:
55
+ logger.info('RVs available from')
56
+ with logger.contextualize(indent=' '):
57
+ for inst in instruments:
58
+ pipelines = list(result[inst].keys())
59
+ for pipe in pipelines:
60
+ mode = list(result[inst][pipe].keys())[0]
61
+ N = len(result[inst][pipe][mode]['rjd'])
62
+ # LOG
63
+ logger.info(f'{inst:12s} {pipe:10s} ({N} observations)')
64
+
65
+ return result
66
+
67
+
68
+ def do_download_ccf(raw_files, output_directory, verbose=True):
69
+ raw_files = np.atleast_1d(raw_files)
70
+ if not os.path.isdir(output_directory):
71
+ os.makedirs(output_directory)
72
+ if verbose:
73
+ logger.info(f"Downloading {len(raw_files)} CCFs into '{output_directory}'...")
74
+
75
+ Spectroscopy = load_spectroscopy()
76
+
77
+ from .utils import all_logging_disabled, stdout_disabled
78
+ with stdout_disabled(), all_logging_disabled():
79
+ Spectroscopy.download_files(raw_files[:2],
80
+ file_type='ccf',
81
+ output_directory=output_directory)
82
+
83
+ if verbose:
84
+ logger.info('Extracting .fits files')
85
+
86
+ file = os.path.join(output_directory, 'spectroscopy_download.tar.gz')
87
+ tar = tarfile.open(file, "r")
88
+ for member in tar.getmembers():
89
+ if member.isreg(): # skip if the TarInfo is not a file
90
+ member.name = os.path.basename(member.name) # remove the path
91
+ tar.extract(member, output_directory)
92
+ os.remove(file)
arvi/plots.py ADDED
@@ -0,0 +1,235 @@
1
+ from functools import partial, partialmethod
2
+
3
+ import numpy as np
4
+ import matplotlib.pyplot as plt
5
+ from matplotlib.collections import LineCollection
6
+ from astropy.timeseries import LombScargle
7
+
8
+ from .setup_logger import logger
9
+
10
+
11
+ def plot(self,
12
+ ax=None,
13
+ show_masked=False,
14
+ time_offset=0,
15
+ remove_50000=False,
16
+ tooltips=True,
17
+ N_in_label=False,
18
+ **kwargs):
19
+ """ Plot the RVs
20
+
21
+ Args:
22
+ ax (Axes, optional): Axis to plot to. Defaults to None.
23
+ show_masked (bool, optional): Show masked points. Defaults to False.
24
+ time_offset (int, optional): Value to subtract from time. Defaults to 0.
25
+ remove_50000 (bool, optional): Whether to subtract 50000 from time. Defaults to False.
26
+ tooltips (bool, optional): TBD. Defaults to True.
27
+ N_in_label (bool, optional): Show number of observations in legend. Defaults to False.
28
+
29
+ Returns:
30
+ Figure: the figure
31
+ Axes: the axis
32
+ """
33
+ if ax is None:
34
+ fig, ax = plt.subplots(1, 1, constrained_layout=True)
35
+ else:
36
+ fig = ax.figure
37
+
38
+ kwargs.setdefault('fmt', 'o')
39
+ kwargs.setdefault('capsize', 0)
40
+ kwargs.setdefault('ms', 4)
41
+
42
+ if remove_50000:
43
+ time_offset = 50000
44
+
45
+ all_lines = []
46
+ for inst in self.instruments:
47
+ s = self if self._child else getattr(self, inst)
48
+ label = f'{inst:10s} ({s.N})' if N_in_label else inst
49
+ lines, *_ = ax.errorbar(s.mtime - time_offset,
50
+ s.mvrad,
51
+ s.msvrad,
52
+ label=label,
53
+ picker=True,
54
+ **kwargs)
55
+ all_lines.append(lines)
56
+ if show_masked:
57
+ ax.errorbar(self.time[~self.mask] - time_offset,
58
+ self.vrad[~self.mask],
59
+ self.svrad[~self.mask],
60
+ label='masked', fmt='x', color='k')
61
+
62
+ ax.legend()
63
+
64
+ ax.set_ylabel(f'RV [{self.units}]')
65
+ if remove_50000:
66
+ ax.set_xlabel('BJD - 2450000 [days]')
67
+ else:
68
+ ax.set_xlabel('BJD - 2400000 [days]')
69
+
70
+ if tooltips:
71
+ inds = []
72
+ def onpick(event):
73
+ if isinstance(event.artist, LineCollection):
74
+ return
75
+ xdata, ydata = event.artist.get_data()
76
+ ind = event.ind
77
+ if ind in inds:
78
+ inds.remove(ind)
79
+ else:
80
+ inds.append(ind)
81
+
82
+ try:
83
+ reds.remove()
84
+ except UnboundLocalError:
85
+ pass
86
+
87
+ if len(inds) > 0:
88
+ reds = ax.plot(xdata[np.array(inds)], ydata[np.array(inds)],
89
+ 'ro', ms=10, zorder=-1)
90
+ fig.canvas.draw()
91
+ fig.canvas.mpl_connect('pick_event', onpick)
92
+
93
+ return fig, ax
94
+
95
+
96
+ def plot_quantity(self,
97
+ quantity,
98
+ ax=None,
99
+ time_offset=0,
100
+ remove_50000=False,
101
+ tooltips=True,
102
+ N_in_label=False,
103
+ **kwargs):
104
+
105
+ if not hasattr(self, quantity):
106
+ logger.error(f"cannot find '{quantity}' attribute")
107
+ return
108
+
109
+ if ax is None:
110
+ fig, ax = plt.subplots(1, 1, constrained_layout=True)
111
+ else:
112
+ fig = ax.figure
113
+
114
+ kwargs.setdefault('fmt', 'o')
115
+ kwargs.setdefault('capsize', 0)
116
+ kwargs.setdefault('ms', 4)
117
+
118
+ if remove_50000:
119
+ time_offset = 50000
120
+
121
+ all_lines = []
122
+ for inst in self.instruments:
123
+ s = self if self._child else getattr(self, inst)
124
+ label = f'{inst:10s} ({s.N})' if N_in_label else inst
125
+
126
+ y = getattr(s, quantity)[s.mask]
127
+ ye = getattr(s, quantity + '_err')[s.mask]
128
+
129
+ if np.isnan(y).all() or np.isnan(ye).all():
130
+ lines, *_ = ax.errorbar([], [], [],
131
+ label=label, picker=True, **kwargs)
132
+ continue
133
+
134
+ lines, *_ = ax.errorbar(s.mtime - time_offset, y, ye,
135
+ label=label, picker=True, **kwargs)
136
+ all_lines.append(lines)
137
+
138
+ ax.legend()
139
+
140
+ if quantity == 'fwhm':
141
+ ax.set_ylabel(f'FWHM [{self.units}]')
142
+ elif quantity == 'bispan':
143
+ ax.set_ylabel(f'BIS [{self.units}]')
144
+ elif quantity == 'rhk':
145
+ ax.set_ylabel("$\log$ R'$_{HK}")
146
+
147
+ if remove_50000:
148
+ ax.set_xlabel('BJD - 2450000 [days]')
149
+ else:
150
+ ax.set_xlabel('BJD - 2400000 [days]')
151
+
152
+ if tooltips:
153
+ inds = []
154
+ def onpick(event):
155
+ if isinstance(event.artist, LineCollection):
156
+ return
157
+ xdata, ydata = event.artist.get_data()
158
+ ind = event.ind
159
+ if ind in inds:
160
+ inds.remove(ind)
161
+ else:
162
+ inds.append(ind)
163
+
164
+ try:
165
+ reds.remove()
166
+ except UnboundLocalError:
167
+ pass
168
+
169
+ if len(inds) > 0:
170
+ reds = ax.plot(xdata[np.array(inds)], ydata[np.array(inds)],
171
+ 'ro', ms=10, zorder=-1)
172
+ fig.canvas.draw()
173
+ fig.canvas.mpl_connect('pick_event', onpick)
174
+
175
+ return fig, ax
176
+
177
+
178
+ plot_fwhm = partialmethod(plot_quantity, quantity='fwhm')
179
+ plot_bis = partialmethod(plot_quantity, quantity='bispan')
180
+
181
+
182
+ def gls(self, ax=None, fap=True, picker=True):
183
+ if ax is None:
184
+ fig, ax = plt.subplots(1, 1, constrained_layout=True)
185
+ else:
186
+ fig = ax.figure
187
+
188
+ gls = LombScargle(self.mtime, self.mvrad, self.msvrad)
189
+ freq, power = gls.autopower(maximum_frequency=1.0)
190
+ ax.semilogx(1/freq, power, picker=picker)
191
+ if fap:
192
+ ax.axhline(gls.false_alarm_level(0.01),
193
+ color='k',
194
+ alpha=0.2,
195
+ zorder=-1)
196
+ ax.set(xlabel='Period [days]', ylabel='Normalized power')
197
+ return fig, ax
198
+
199
+
200
+ def gls_quantity(self, quantity, ax=None, fap=True, picker=True):
201
+ if not hasattr(self, quantity):
202
+ logger.error(f"cannot find '{quantity}' attribute")
203
+ return
204
+
205
+ if ax is None:
206
+ fig, ax = plt.subplots(1, 1, constrained_layout=True)
207
+ else:
208
+ fig = ax.figure
209
+
210
+ t = self.mtime
211
+ y = getattr(self, quantity)[self.mask]
212
+ ye = getattr(self, quantity + '_err')[self.mask]
213
+
214
+ if np.isnan(y).any():
215
+ if self.verbose:
216
+ logger.warning(f'{quantity} contains NaNs, ignoring them')
217
+ m = np.isnan(y)
218
+ t = t[~m]
219
+ y = y[~m]
220
+ ye = ye[~m]
221
+
222
+ gls = LombScargle(t, y, ye)
223
+ freq, power = gls.autopower(maximum_frequency=1.0)
224
+ ax.semilogx(1/freq, power, picker=picker)
225
+ if fap:
226
+ ax.axhline(gls.false_alarm_level(0.01),
227
+ color='k',
228
+ alpha=0.2,
229
+ zorder=-1)
230
+ ax.set(xlabel='Period [days]', ylabel='Normalized power')
231
+ return fig, ax
232
+
233
+
234
+ gls_fwhm = partialmethod(gls_quantity, quantity='fwhm')
235
+ gls_bis = partialmethod(gls_quantity, quantity='bispan')
arvi/reports.py ADDED
@@ -0,0 +1,102 @@
1
+ from functools import partial
2
+ import numpy as np
3
+ from astropy.timeseries import LombScargle
4
+ import matplotlib.pyplot as plt
5
+ import matplotlib.gridspec as gridspec
6
+ from matplotlib.backends.backend_pdf import PdfPages
7
+
8
+ from .setup_logger import logger
9
+
10
+
11
+ sine_line = None
12
+ residual_gls = None
13
+ def sine_picker(event, self, fig, ax, ax1):
14
+ from .timeseries import fit_sine
15
+ global sine_line, residual_gls
16
+ if sine_line is not None:
17
+ sine_line[0].remove()
18
+ if residual_gls is not None:
19
+ residual_gls[0].remove()
20
+ xdata, ydata = event.artist.get_data()
21
+ ind = event.ind
22
+ period = xdata[ind][0]
23
+ p, sine = fit_sine(self.mtime, self.mvrad, self.msvrad, period=period)
24
+ tt = np.linspace(self.mtime.min(), self.mtime.max(), 100)
25
+ tt -= 50000
26
+ sine_line = ax1.plot(tt, sine(tt), 'k')
27
+ #
28
+ f, p = LombScargle(self.mtime, self.mvrad - sine(self.mtime), self.msvrad).autopower()
29
+ residual_gls = ax.semilogx(1/f, p, 'r')
30
+ fig.canvas.draw_idle()
31
+
32
+
33
+ def report(self, save=None):
34
+ # size = A4
35
+ size = 8.27, 11.69
36
+ fig = plt.figure(figsize=size, constrained_layout=True)
37
+ gs = gridspec.GridSpec(5, 3, figure=fig, height_ratios=[2, 2, 1, 1, 0.1])
38
+
39
+ # first row, all columns
40
+ ax1 = plt.subplot(gs[0, :])
41
+
42
+ title = f'{self.star}'
43
+ ax1.set_title(title, loc='left', fontsize=14)
44
+ # ax1.set_title(r"\href{http://www.google.com}{link}", color='blue',
45
+ # loc='center')
46
+
47
+ if self._did_adjust_means:
48
+ title = '(instrument means subtracted) '
49
+ else:
50
+ title = ''
51
+ title += f'V={self.simbad.V}, {self.simbad.sp_type}'
52
+ ax1.set_title(title, loc='right', fontsize=12)
53
+
54
+ self.plot(ax=ax1, N_in_label=True, tooltips=False, remove_50000=True)
55
+
56
+
57
+ ax1.legend().remove()
58
+ legend_ax = plt.subplot(gs[1, -1])
59
+ legend_ax.axis('off')
60
+ leg = plt.legend(*ax1.get_legend_handles_labels(),
61
+ prop={'family': 'monospace'})
62
+ legend_ax.add_artist(leg)
63
+ second_legend = f'rms : {self.rms:.2f} {self.units}\n'
64
+ second_legend += f'error: {self.error:.2f} {self.units}'
65
+ legend_ax.legend([],
66
+ title=second_legend,
67
+ loc='lower right', frameon=False,
68
+ prop={'family': 'monospace'})
69
+
70
+ ax2 = plt.subplot(gs[1, :-1])
71
+ self.gls(ax=ax2, picker=True)
72
+
73
+ ax3 = plt.subplot(gs[2, :-1])
74
+ self.plot_fwhm(ax=ax3, tooltips=False, remove_50000=True)
75
+ ax3.legend().remove()
76
+ ax3p = plt.subplot(gs[2, -1])
77
+ self.gls_fwhm(ax=ax3p, picker=False)
78
+
79
+ ax4 = plt.subplot(gs[3, :-1])
80
+ self.plot_bis(ax=ax4, tooltips=False, remove_50000=True)
81
+ ax4.legend().remove()
82
+ ax4p = plt.subplot(gs[3, -1])
83
+ self.gls_bis(ax=ax4p, picker=False)
84
+
85
+
86
+ if save is None:
87
+ fig.canvas.mpl_connect(
88
+ 'pick_event',
89
+ partial(sine_picker, self=self, fig=fig, ax=ax2, ax1=ax1))
90
+
91
+ if save is not None:
92
+ if save is True:
93
+ save = f'report_{"".join(self.star.split())}.pdf'
94
+
95
+ with PdfPages(save) as pdf:
96
+ pdf.attach_note('hello', positionRect=[5, 15, 20, 30])
97
+
98
+ if self.verbose:
99
+ logger.info(f'saving to {save}')
100
+ pdf.savefig(fig)
101
+ plt.close('all')
102
+ # os.system(f'evince {save} &')
arvi/setup_logger.py ADDED
@@ -0,0 +1,11 @@
1
+ import sys
2
+ from loguru import logger
3
+
4
+ logger.remove()
5
+ logger.configure(extra={"indent": ""})
6
+ logger.add(
7
+ sys.stdout,
8
+ colorize=True,
9
+ # format="<green>{time:YYYY-MM-DDTHH:mm:ss}</green> <level>{message}</level>",
10
+ format="{extra[indent]}<level>{message}</level>",
11
+ )
arvi/simbad_wrapper.py ADDED
@@ -0,0 +1,103 @@
1
+ from dataclasses import dataclass, field
2
+ import requests
3
+
4
+ from astropy.coordinates import SkyCoord
5
+
6
+ QUERY = """
7
+ SELECT basic.OID,
8
+ RA,
9
+ DEC,
10
+ main_id,
11
+ plx_value,
12
+ rvz_radvel,
13
+ sp_type
14
+ FROM basic JOIN ident ON oidref = oid
15
+ WHERE id = '{star}';
16
+ """
17
+
18
+ BV_QUERY = """
19
+ SELECT B, V from allfluxes
20
+ JOIN ident USING(oidref)
21
+ WHERE id = '{star}';
22
+ """
23
+
24
+ IDS_QUERY = """
25
+ SELECT ids from ids
26
+ JOIN ident USING(oidref)
27
+ WHERE id = '{star}';
28
+ """
29
+
30
+ def run_query(query):
31
+ url = 'http://simbad.u-strasbg.fr/simbad/sim-tap/sync'
32
+ response = requests.post(url,
33
+ data=dict(query=query,
34
+ request='doQuery',
35
+ lang='ADQL',
36
+ format='text/plain',
37
+ phase='run'))
38
+ return response.content.decode()
39
+
40
+ def parse_table(table, cols=None, values=None):
41
+ header = table.splitlines()[0].split('|')
42
+ if cols is None:
43
+ cols = list(map(str.strip, header))
44
+ else:
45
+ cols = cols + list(map(str.strip, header))
46
+ if values is None:
47
+ values = table.splitlines()[2].split('|')
48
+ else:
49
+ values = values + table.splitlines()[2].split('|')
50
+ values = list(map(str.strip, values))
51
+ values = [value.replace('"', '') for value in values]
52
+ return cols, values
53
+
54
+
55
+ class simbad:
56
+ """
57
+ A very simple wrapper around a TAP query to simbad for a given target. This
58
+ class simply runs a few TAP queries and stores the result as attributes.
59
+
60
+ Attributes:
61
+ ra (float): right ascension
62
+ dec (float): declination
63
+ coords (SkyCoord): coordinates as a SkyCoord object
64
+ main_id (str): main identifier
65
+ plx_value (float): parallax
66
+ rvz_radvel (float): radial velocity
67
+ sp_type (str): spectral type
68
+ B (float): B magnitude
69
+ V (float): V magnitude
70
+ ids (list): list of identifiers
71
+ """
72
+ def __init__(self, star:str):
73
+ """
74
+ Args:
75
+ star (str): The name of the star to query simbad
76
+ """
77
+ self.star = star
78
+ try:
79
+ table1 = run_query(query=QUERY.format(star=star))
80
+ cols, values = parse_table(table1)
81
+
82
+ table2 = run_query(query=BV_QUERY.format(star=star))
83
+ cols, values = parse_table(table2, cols, values)
84
+
85
+ table3 = run_query(query=IDS_QUERY.format(star=star))
86
+ line = table3.splitlines()[2]
87
+ self.ids = line.replace('"', '').replace(' ', ' ').split('|')
88
+ except IndexError:
89
+ raise ValueError(f'simbad query for {star} failed')
90
+
91
+ for col, val in zip(cols, values):
92
+ try:
93
+ setattr(self, col, float(val))
94
+ except ValueError:
95
+ setattr(self, col, val)
96
+
97
+ self.coords = SkyCoord(self.ra, self.dec, unit='deg')
98
+
99
+
100
+ def __repr__(self):
101
+ V = self.V
102
+ sp_type = self.sp_type
103
+ return f'{self.star} ({V=}, {sp_type=})'
arvi/stats.py ADDED
@@ -0,0 +1,36 @@
1
+ import numpy as np
2
+
3
+
4
+ def wmean(a, e):
5
+ """
6
+ Weighted mean of array `a`, with uncertainty given by `e`.
7
+ The weighted mean is calculated using weights equal to 1/e**2
8
+
9
+ Args:
10
+ a (array): Array containing data
11
+ e (array): Uncertainties on `a`
12
+ """
13
+ return np.average(a, weights=1 / e**2)
14
+
15
+
16
+ def rms(a):
17
+ """
18
+ Root mean square of array `a`
19
+ Args:
20
+ a (array): Array containing data
21
+ """
22
+ return np.sqrt((a**2).mean())
23
+
24
+
25
+ def wrms(a, e):
26
+ """
27
+ Weighted root mean square of array `a`, with uncertanty given by `e`.
28
+ The weighted rms is calculated using the weighted mean, where the
29
+ weights are equal to 1/e**2.
30
+
31
+ Args:
32
+ a (array): Array containing data
33
+ e (array): Uncertainties on `a`
34
+ """
35
+ w = 1 / e**2
36
+ return np.sqrt(np.sum(w * (a - np.average(a, weights=w))**2) / sum(w))
arvi/timeseries.py ADDED
@@ -0,0 +1,328 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Union
3
+ from functools import partial
4
+ from datetime import datetime, timezone
5
+ import numpy as np
6
+
7
+ from .setup_logger import logger
8
+ from .translations import translate
9
+ from .dace_wrapper import get_observations, get_arrays, do_download_ccf
10
+ from .simbad_wrapper import simbad
11
+ from .stats import wmean, wrms
12
+
13
+ @dataclass
14
+ class RV:
15
+ """
16
+ A class holding RV observations
17
+
18
+ Attributes:
19
+ star (str):
20
+ The name of the star
21
+ N (int):
22
+ Total number of observations
23
+ verbose (bool):
24
+ Log some operations to the terminal
25
+ instruments (list):
26
+ List of instruments for which there are RVs. Each instrument is also
27
+ stored as an attribute (e.g. `self.CORALIE98` or `self.HARPS`)
28
+ """
29
+ star: str
30
+ N: int = field(init=False, repr=True)
31
+ verbose: bool = field(init=True, repr=False, default=True)
32
+ do_sigma_clip: bool = field(init=True, repr=False, default=True)
33
+ do_maxerror: Union[bool, float] = field(init=True, repr=False, default=100)
34
+ do_adjust_means: bool = field(init=True, repr=False, default=True)
35
+ #
36
+ _child: bool = field(init=True, repr=False, default=False)
37
+ _did_sigma_clip: bool = field(init=False, repr=False, default=False)
38
+ _did_adjust_means: bool = field(init=False, repr=False, default=False)
39
+
40
+ def __post_init__(self):
41
+ self.__star__ = translate(self.star)
42
+ try:
43
+ self.simbad = simbad(self.__star__)
44
+ except ValueError as e:
45
+ logger.error(e.msg)
46
+
47
+ if not self._child:
48
+ if self.verbose:
49
+ logger.info('querying DACE...')
50
+ self.dace_result = get_observations(self.__star__,
51
+ verbose=self.verbose)
52
+ # store the date of the last DACE query
53
+ time_stamp = datetime.now(timezone.utc) #.isoformat().split('.')[0]
54
+ self._last_dace_query = time_stamp
55
+
56
+ self.units = 'm/s'
57
+
58
+ # build children
59
+ if not self._child:
60
+ arrays = get_arrays(self.dace_result)
61
+ for (inst, pipe, mode), data in arrays:
62
+ child = RV.from_dace_data(self.star, inst, pipe, mode, data, _child=True)
63
+ setattr(self, inst, child)
64
+
65
+ # build joint arrays
66
+ if not self._child:
67
+ self.instruments = list(self.dace_result.keys())
68
+ # self.pipelines =
69
+ # "observatory" (or instrument id)
70
+ self.obs = np.concatenate(
71
+ [np.full(getattr(self, inst).N, i+1) for i, inst in enumerate(self.instruments)],
72
+ dtype=int
73
+ )
74
+ # mask
75
+ self.mask = np.full_like(self.obs, True, dtype=bool)
76
+ # all other quantities
77
+ self._build_arrays()
78
+
79
+ # do clip_maxerror, sigmaclip, adjust_means
80
+ if not self._child:
81
+ if self.do_maxerror:
82
+ self.clip_maxerror(self.do_maxerror)
83
+
84
+ if self.do_sigma_clip:
85
+ self.sigmaclip()
86
+
87
+ if self.do_adjust_means:
88
+ self.adjust_means()
89
+
90
+
91
+ def reload(self):
92
+ self.__post_init__()
93
+
94
+ @property
95
+ def N(self):
96
+ return self.time.size
97
+
98
+ @N.setter
99
+ def N(self, value):
100
+ if not isinstance(value, property):
101
+ logger.error('Cannot set N directly')
102
+
103
+ @property
104
+ def mtime(self):
105
+ return self.time[self.mask]
106
+
107
+ @property
108
+ def mvrad(self):
109
+ return self.vrad[self.mask]
110
+
111
+ @property
112
+ def msvrad(self):
113
+ return self.svrad[self.mask]
114
+
115
+ @property
116
+ def rms(self):
117
+ """ Weighted rms of the (masked) radial velocities """
118
+ if self.mask.sum() == 0: # only one point
119
+ return np.nan
120
+ else:
121
+ return wrms(self.vrad[self.mask], self.svrad[self.mask])
122
+
123
+ @property
124
+ def sigma(self):
125
+ """ Average error bar """
126
+ if self.mask.sum() == 0: # only one point
127
+ return np.nan
128
+ else:
129
+ return self.svrad[self.mask].mean()
130
+
131
+ error = sigma # alias!
132
+
133
+ @classmethod
134
+ def from_dace_data(cls, star, inst, pipe, mode, data, **kwargs):
135
+ s = cls(star, **kwargs)
136
+ #
137
+ ind = np.argsort(data['rjd'])
138
+ # time, RVs, uncertainties
139
+ s.time = data['rjd'][ind]
140
+ s.vrad = data['rv'][ind]
141
+ s.svrad = data['rv_err'][ind]
142
+ # mask
143
+ s.mask = np.full_like(s.time, True, dtype=bool)
144
+ # all other quantities
145
+ for arr in data.keys():
146
+ if arr not in ('rjd', 'rv', 'rv_err'):
147
+ if arr == 'mask':
148
+ # change name mask -> ccf_mask
149
+ setattr(s, 'ccf_mask', data[arr][ind])
150
+ else:
151
+ setattr(s, arr, data[arr][ind])
152
+ #
153
+ s.instruments = [inst]
154
+ s.pipelines = [pipe]
155
+ return s
156
+
157
+ def _build_arrays(self):
158
+ if self._child:
159
+ return
160
+ # time
161
+ self.time = np.concatenate(
162
+ [getattr(self, inst).time for inst in self.instruments]
163
+ )
164
+ # RVs
165
+ self.vrad = np.concatenate(
166
+ [getattr(self, inst).vrad for inst in self.instruments]
167
+ )
168
+ # uncertainties
169
+ self.svrad = np.concatenate(
170
+ [getattr(self, inst).svrad for inst in self.instruments]
171
+ )
172
+ arrays = get_arrays(self.dace_result)
173
+ quantities = list(arrays[0][-1].keys())
174
+ # all other quantities
175
+ for q in quantities:
176
+ if q not in ('rjd', 'rv', 'rv_err'):
177
+ if q == 'mask': # change mask -> ccf_mask
178
+ q = 'ccf_mask'
179
+ arr = np.concatenate(
180
+ [getattr(getattr(self, inst), q) for inst in self.instruments]
181
+ )
182
+ setattr(self, q, arr)
183
+
184
+
185
+ def download_ccf(self, instrument=None):
186
+ directory = f'{self.star}_downloads'
187
+ if instrument is None:
188
+ files = [file for file in self.raw_file if file.endswith('.fits')]
189
+ else:
190
+ if instrument not in self.instruments:
191
+ logger.error(f"No data from instrument '{instrument}'")
192
+ logger.info(f'available: {self.instruments}')
193
+ return
194
+ files = getattr(self, instrument).raw_file
195
+
196
+ do_download_ccf(files, directory)
197
+
198
+
199
+ from .plots import plot, plot_fwhm, plot_bis
200
+ from .plots import gls, gls_fwhm, gls_bis
201
+ from .reports import report
202
+
203
+
204
+ def remove_instrument(self, instrument):
205
+ """ Remove all observations from `instrument` """
206
+ if instrument not in self.instruments:
207
+ logger.error(f"No data from instrument '{instrument}'")
208
+ logger.info(f'available: {self.instruments}')
209
+ return
210
+
211
+ ind = self.instruments.index(instrument) + 1
212
+ remove = np.where(self.obs == ind)
213
+ self.obs = np.delete(self.obs, remove)
214
+ self.obs[self.obs > ind] -= 1
215
+ #
216
+ self.time = np.delete(self.time, remove)
217
+ self.vrad = np.delete(self.vrad, remove)
218
+ self.svrad = np.delete(self.svrad, remove)
219
+ #
220
+ self.instruments.remove(instrument)
221
+ #
222
+ delattr(self, instrument)
223
+
224
+ if self.verbose:
225
+ logger.info(f"Removed observations from '{instrument}'")
226
+
227
+ def remove_point(self, index):
228
+ """ Remove individual observations at a given `index` (or indices) """
229
+ index = np.atleast_1d(index)
230
+ try:
231
+ instrument_index = self.obs[index]
232
+ instrument = np.array(self.instruments)[instrument_index - 1]
233
+ except IndexError:
234
+ logger.errors(f'index {index} is out of bounds for N={self.N}')
235
+ return
236
+
237
+ self.mask[index] = False
238
+ self._propagate_mask_changes()
239
+ # for i, inst in zip(index, instrument):
240
+ # index_in_instrument = i - (self.obs < instrument_index).sum()
241
+ # getattr(self, inst).mask[index_in_instrument] = False
242
+
243
+ def _propagate_mask_changes(self):
244
+ """ link self.mask with each self.`instrument`.mask """
245
+ masked = np.where(~self.mask)[0]
246
+ for m in masked:
247
+ inst = self.instruments[self.obs[m] - 1]
248
+ n_before = (self.obs < self.obs[m]).sum()
249
+ getattr(self, inst).mask[m - n_before] = False
250
+
251
+ def sigmaclip(self, sigma=3):
252
+ """ Sigma-clip RVs """
253
+ if self._child or self._did_sigma_clip:
254
+ return
255
+ from scipy.stats import sigmaclip as dosigmaclip
256
+ result = dosigmaclip(self.vrad, low=sigma, high=sigma)
257
+ n = self.vrad.size - result.clipped.size
258
+ if self.verbose and n > 0:
259
+ s = 's' if (n == 0 or n > 1) else ''
260
+ logger.warning(f'sigma-clip RVs removed {n} point' + s)
261
+ ind = (self.vrad > result.lower) & (self.vrad < result.upper)
262
+ self.mask[~ind] = False
263
+ self._propagate_mask_changes()
264
+
265
+ def clip_maxerror(self, maxerror:float, plot=False):
266
+ """ Mask out points with RV error larger than `maxerror` """
267
+ if self._child:
268
+ return
269
+ self.maxerror = maxerror
270
+ above = self.svrad > maxerror
271
+ n = above.sum()
272
+ self.mask[above] = False
273
+
274
+ if self.verbose and above.sum() > 0:
275
+ s = 's' if (n == 0 or n > 1) else ''
276
+ logger.warning(f'clip_maxerror removed {n} point' + s)
277
+
278
+ self._propagate_mask_changes()
279
+
280
+ def adjust_means(self, just_rv=False):
281
+ if self._child or self._did_adjust_means:
282
+ return
283
+
284
+ others = ('fwhm', 'bispan', )
285
+ for inst in self.instruments:
286
+ s = getattr(self, inst)
287
+ s.rv_mean = wmean(s.mvrad, s.msvrad)
288
+ s.vrad -= s.rv_mean
289
+ if self.verbose:
290
+ logger.info(f'subtracted weighted average from {inst:10s}: ({s.rv_mean:.3f} {self.units})')
291
+ if just_rv:
292
+ continue
293
+ log_msg = 'same for '
294
+ for i, other in enumerate(others):
295
+ y, ye = getattr(s, other), getattr(s, other + '_err')
296
+ m = wmean(y, ye)
297
+ setattr(s, f'{other}_mean', m)
298
+ setattr(s, other, getattr(s, other) - m)
299
+ log_msg += other
300
+ if i < len(others) - 1:
301
+ log_msg += ', '
302
+
303
+ if self.verbose:
304
+ logger.info(log_msg)
305
+
306
+ self._build_arrays()
307
+ self._did_adjust_means = True
308
+
309
+
310
+ def fit_sine(t, y, yerr, period='gls', fix_period=False):
311
+ from scipy.optimize import leastsq
312
+ if period == 'gls':
313
+ from astropy.timeseries import LombScargle
314
+ gls = LombScargle(t, y, yerr)
315
+ freq, power = gls.autopower()
316
+ period = 1 / freq[power.argmax()]
317
+
318
+ if fix_period and period is None:
319
+ logger.error('period is fixed but no value provided')
320
+ return
321
+
322
+ def sine(t, p):
323
+ return p[0] * np.sin(2 * np.pi * t / p[1] + p[2]) + p[3]
324
+
325
+ p0 = [y.ptp(), period, 0.0, 0.0]
326
+ xbest, _ = leastsq(lambda p, t, y, ye: (sine(t, p) - y) / ye,
327
+ p0, args=(t, y, yerr))
328
+ return xbest, partial(sine, p=xbest)
arvi/translations.py ADDED
@@ -0,0 +1,10 @@
1
+ STARS = {
2
+ 'Barnard': 'GJ699',
3
+ "Barnard's": 'GJ699',
4
+ }
5
+
6
+
7
+ def translate(star):
8
+ if star in STARS:
9
+ return STARS[star]
10
+ return star
arvi/utils.py ADDED
@@ -0,0 +1,32 @@
1
+ import os
2
+ from contextlib import contextmanager
3
+ from mock import patch
4
+ import logging
5
+
6
+
7
+ @contextmanager
8
+ def stdout_disabled():
9
+ devnull = open(os.devnull, 'w')
10
+ with patch('sys.stdout', devnull):
11
+ yield
12
+
13
+
14
+ @contextmanager
15
+ def all_logging_disabled():
16
+ """
17
+ A context manager that will prevent any logging messages triggered during
18
+ the body from being processed.
19
+ """
20
+ # two kind-of hacks here:
21
+ # * can't get the highest logging level in effect => delegate to the user
22
+ # * can't get the current module-level override => use an undocumented
23
+ # (but non-private!) interface
24
+
25
+ previous_level = logging.root.manager.disable
26
+
27
+ logging.disable(logging.CRITICAL)
28
+
29
+ try:
30
+ yield
31
+ finally:
32
+ logging.disable(previous_level)
@@ -1,14 +1,20 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: arvi
3
- Version: 0.0.1.dev1
3
+ Version: 0.0.3
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@astro.up.pt>
6
6
  License: MIT
7
+ Project-URL: Repository, https://github.com/j-faria/arvi
7
8
  Keywords: RV,exoplanets
8
9
  Classifier: Programming Language :: Python :: 3
9
10
  Requires-Python: >=3.8
10
11
  Description-Content-Type: text/markdown
11
12
  License-File: LICENSE
13
+ Requires-Dist: loguru
14
+ Requires-Dist: mpldatacursor
15
+ Requires-Dist: tqdm
16
+ Requires-Dist: pyexoplaneteu
17
+ Requires-Dist: dace-query
12
18
 
13
19
  <p align="center">
14
20
  <img width = "140" src="https://github.com/j-faria/arvi/blob/main/docs/logo/logo.png?raw=true"/>
@@ -0,0 +1,15 @@
1
+ arvi/__init__.py,sha256=d4UTdSj3oRwFvO1a_m5uz_fqEVFR9U-afio38MNuESU,241
2
+ arvi/dace_wrapper.py,sha256=0zId_2uti-_UOkCZHJnRvajO5EMNgwNfc5AgV-LjSPQ,3340
3
+ arvi/plots.py,sha256=uOruz1xhlx7nNjIEtLTX6gs_iya2Yds71eIYoYt2LVw,6925
4
+ arvi/reports.py,sha256=FtalLbmBHOSHQ3RldImUZTHPc3cc3Z3W5r3d-31FIo8,3261
5
+ arvi/setup_logger.py,sha256=nvnd2PtXYnpYMGleTeqAKFe-TnC6SjNlTZ-EmiewLyY,278
6
+ arvi/simbad_wrapper.py,sha256=7XnuM7Kq5kK4EchHjh-uyRoTWm6kCx0sx7Ak0eDaALo,3023
7
+ arvi/stats.py,sha256=OIlXisf_kuUhfDRhvCirmJ2e-YSIJnoBjdFOXaYd0bQ,868
8
+ arvi/timeseries.py,sha256=tWKAn5tCQpI1YaCE_breyQ_dXAsj0X06jox4nH9ES5E,11159
9
+ arvi/translations.py,sha256=eyUJei8wlsyBecTz8E_ntpP35-Mre2Tzm_mUMMGaZWY,150
10
+ arvi/utils.py,sha256=OPb6rXafv7b8vRRTEg9zQAQnhi3lRLICF62R8Fhy43A,788
11
+ arvi-0.0.3.dist-info/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
12
+ arvi-0.0.3.dist-info/METADATA,sha256=01PYughU6O-f6cwMA_6elKUMallpnN5uaxN6k8xgeys,827
13
+ arvi-0.0.3.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
14
+ arvi-0.0.3.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
15
+ arvi-0.0.3.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.41.1)
2
+ Generator: bdist_wheel (0.41.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,6 +0,0 @@
1
- arvi/__init__.py,sha256=BNInQtK0-v5cqZ_-bX0s_c8AB_Ky08gDUCNP_oy__6Y,25
2
- arvi-0.0.1.dev1.dist-info/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
3
- arvi-0.0.1.dev1.dist-info/METADATA,sha256=hgc8fc8h5k7IYdd7jOP4BdPShppngl4axsyGk7cUlIE,649
4
- arvi-0.0.1.dev1.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92
5
- arvi-0.0.1.dev1.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
6
- arvi-0.0.1.dev1.dist-info/RECORD,,