arvi 0.2.1__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
arvi/__init__.py CHANGED
@@ -1,16 +1,18 @@
1
1
  __all__ = ['RV']
2
2
 
3
3
  from importlib.metadata import version, PackageNotFoundError
4
-
5
- from .config import config
6
- from .timeseries import RV
7
-
8
4
  try:
9
5
  __version__ = version("arvi")
10
6
  except PackageNotFoundError:
11
7
  # package is not installed
12
8
  pass
13
9
 
10
+ from .config import config
11
+ from .timeseries import RV
12
+
13
+ from .simbad_wrapper import simbad
14
+
15
+
14
16
  ## OLD
15
17
  # # the __getattr__ function is always called twice, so we need this
16
18
  # # to only build and return the RV object on the second time
arvi/dace_wrapper.py CHANGED
@@ -5,13 +5,15 @@ import collections
5
5
  from functools import lru_cache
6
6
  from itertools import islice
7
7
  import numpy as np
8
- from dace_query import DaceClass
9
- from dace_query.spectroscopy import SpectroscopyClass, Spectroscopy as default_Spectroscopy
10
8
  from .setup_logger import logger
11
9
  from .utils import create_directory, all_logging_disabled, stdout_disabled, tqdm
12
10
 
13
11
 
14
- def load_spectroscopy(user=None) -> SpectroscopyClass:
12
+ def load_spectroscopy(user=None):
13
+ with all_logging_disabled():
14
+ from dace_query.spectroscopy import SpectroscopyClass, Spectroscopy as default_Spectroscopy
15
+ from dace_query import DaceClass
16
+
15
17
  from .config import config
16
18
  # requesting as public
17
19
  if config.request_as_public:
@@ -39,6 +41,8 @@ def load_spectroscopy(user=None) -> SpectroscopyClass:
39
41
  logger.info(f'using credentials for user {user} in ~/.dacerc')
40
42
  return SpectroscopyClass(dace_instance=dace)
41
43
  # default
44
+ if not os.path.exists(os.path.expanduser('~/.dacerc')):
45
+ logger.warning('requesting DACE data as public (no .dacerc file found)')
42
46
  return default_Spectroscopy
43
47
 
44
48
  @lru_cache()
@@ -153,6 +157,7 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
153
157
  dictionary with data from DACE
154
158
  """
155
159
  Spectroscopy = load_spectroscopy(user)
160
+
156
161
  found_dace_id = False
157
162
  try:
158
163
  dace_id = get_dace_id(star, verbose=verbose)
@@ -256,6 +261,7 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
256
261
  def get_observations(star, instrument=None, user=None, main_id=None, verbose=True):
257
262
  if instrument is None:
258
263
  Spectroscopy = load_spectroscopy(user)
264
+
259
265
  try:
260
266
  with stdout_disabled(), all_logging_disabled():
261
267
  result = Spectroscopy.get_timeseries(target=star,
arvi/exofop_wrapper.py ADDED
@@ -0,0 +1,61 @@
1
+ import csv
2
+ import requests
3
+ import time
4
+ import importlib.resources as resources
5
+ import numpy as np
6
+
7
+ from .setup_logger import logger
8
+
9
+ def get_toi_list(verbose=True):
10
+ toi_list = resources.files('arvi') / 'data' / 'exofop_toi_list.csv'
11
+ now = time.time()
12
+ download = not toi_list.exists() or toi_list.stat().st_mtime < now - 48 * 60 * 60
13
+ if download:
14
+ if verbose:
15
+ logger.info('Downloading exofop TOI list (can take a while)...')
16
+ r = requests.get('https://exofop.ipac.caltech.edu/tess/download_toi.php?sort=toi&output=csv')
17
+ with open(toi_list, 'wb') as f:
18
+ f.write(r.content)
19
+ return toi_list
20
+
21
+ class exofop:
22
+ def __init__(self, star: str, verbose=True, _debug=False):
23
+ self.star = star
24
+ self.verbose = verbose
25
+
26
+ toi_list = get_toi_list(verbose=verbose)
27
+ tsv = ('|'.join(i) for i in csv.reader(open(toi_list, encoding='utf8')))
28
+ self.data = np.genfromtxt(tsv, delimiter='|',
29
+ names=True, encoding='utf8', dtype=None)
30
+
31
+
32
+ try:
33
+ if self.star.startswith('TIC'):
34
+ self.tic = self.star
35
+ w = self.data['TIC_ID'] == int(self.star[3:])
36
+ self.toi = 'TOI-' + str(int(self.data['TOI'][w][0]))
37
+ else:
38
+ toi = self.star.replace('TOI-', '')
39
+ toi = toi if toi.endswith('.01') else toi + '.01'
40
+ toi_float = float(toi)
41
+ if toi_float not in self.data['TOI']:
42
+ raise ValueError
43
+ w = self.data['TOI'] == toi_float
44
+ self.tic = 'TIC' + str(int(self.data['TIC_ID'][w][0]))
45
+ self.toi = self.star
46
+ except ValueError:
47
+ raise ValueError(f'{self.star} not found in exofop TOI list')
48
+ else:
49
+ self.ra = str(self.data['RA'][w][0])
50
+ self.dec = str(self.data['Dec'][w][0])
51
+
52
+ self.epoch = float(self.data['Epoch_BJD'][w][0])
53
+ self.period = float(self.data['Period_days'][w][0])
54
+ if self.period == 0.0:
55
+ self.period = np.nan
56
+ self.duration = float(self.data['Duration_hours'][w][0])
57
+ self.depth = float(self.data['Depth_ppm'][w][0])
58
+
59
+
60
+ def __repr__(self):
61
+ return f'{self.star} (TIC={self.tic}, epoch={self.epoch:.3f}, period={self.period:.3f})'
arvi/extra_data.py CHANGED
@@ -12,16 +12,20 @@ refs = {
12
12
 
13
13
  def get_extra_data(star, instrument=None, path=None, verbose=True,
14
14
  check_for_kms=True):
15
+
15
16
  if path is None:
16
17
  path = os.path.dirname(__file__)
17
18
  path = os.path.join(path, 'data', 'extra')
19
+ metadata = json.load(open(os.path.join(path, 'metadata.json'), 'r'))
20
+ # print(metadata)
21
+ else:
22
+ metadata = {}
18
23
 
19
- metadata = json.load(open(os.path.join(path, 'metadata.json'), 'r'))
20
- # print(metadata)
21
-
22
- files = glob(os.path.join(path, star.replace(' ', '') + '*.rdb'))
24
+ files = glob(os.path.join(path, star + '*.rdb'))
25
+ files += glob(os.path.join(path, star.replace(' ', '') + '*.rdb'))
23
26
  files = [f for f in files if os.path.isfile(f)]
24
- files = [f for f in files if not os.path.basename(f).endswith('.zip')]
27
+ files = [f for f in files if not f.endswith('_actin.rdb')]
28
+ files = list(set(files))
25
29
 
26
30
  if len(files) == 0:
27
31
  raise FileNotFoundError
arvi/gaia_wrapper.py CHANGED
@@ -73,7 +73,7 @@ class gaia:
73
73
  plx (float): parallax
74
74
  radial_velocity (float): radial velocity
75
75
  """
76
- def __init__(self, star:str, simbad=None):
76
+ def __init__(self, star:str, simbad=None, _debug=False):
77
77
  """
78
78
  Args:
79
79
  star (str): The name of the star to query simbad
@@ -83,6 +83,8 @@ class gaia:
83
83
  if simbad is None:
84
84
  from .simbad_wrapper import simbad as Simbad
85
85
  simbad = Simbad(star)
86
+ if _debug:
87
+ print(simbad)
86
88
 
87
89
  ra = simbad.ra
88
90
  dec = simbad.dec
@@ -95,10 +97,14 @@ class gaia:
95
97
  try:
96
98
  if star in translate:
97
99
  table = run_query(query=QUERY_ID.format(id=translate[star]))
98
- elif hasattr(simbad, 'gaia_id'):
100
+ elif hasattr(simbad, 'gaia_id') and simbad.gaia_id is not None:
99
101
  table = run_query(query=QUERY_ID.format(id=simbad.gaia_id))
100
102
  else:
101
103
  table = run_query(query=QUERY.format(**args))
104
+
105
+ if _debug:
106
+ print('table:', table)
107
+
102
108
  results = parse_csv(table)[0]
103
109
  except IndexError:
104
110
  raise ValueError(f'Gaia query for {star} failed')
arvi/plots.py CHANGED
@@ -230,13 +230,13 @@ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
230
230
  _label = label
231
231
 
232
232
  if versus_n:
233
- container = ax.errorbar(np.arange(1, s.mtime.size + 1), s.mvrad, s.msvrad,
234
- label=_label, picker=True, marker=next(markers), zorder=next(zorders),
235
- **kwargs)
233
+ x = np.arange(1, s.mtime.size + 1)
236
234
  else:
237
- container = ax.errorbar(s.mtime - time_offset, s.mvrad, s.msvrad,
238
- label=_label, picker=True, marker=next(markers), zorder=next(zorders),
239
- **kwargs)
235
+ x = s.mtime - time_offset
236
+
237
+ container = ax.errorbar(x, s.mvrad, s.msvrad, label=_label,
238
+ picker=True, marker=next(markers), zorder=next(zorders), **kwargs)
239
+
240
240
 
241
241
  containers[inst] = list(container)
242
242
 
@@ -357,8 +357,6 @@ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
357
357
  plt.connect('pick_event', on_pick_point)
358
358
 
359
359
 
360
-
361
-
362
360
  if show_histogram:
363
361
  axh.legend()
364
362
 
@@ -585,10 +583,15 @@ def gls(self, ax=None, label=None, instrument=None,
585
583
  maximum_frequency = kwargs.pop('maximum_frequency', 1.0)
586
584
  minimum_frequency = kwargs.pop('minimum_frequency', None)
587
585
  samples_per_peak = kwargs.pop('samples_per_peak', 10)
586
+ kw = {
587
+ 'maximum_frequency': maximum_frequency,
588
+ 'minimum_frequency': minimum_frequency,
589
+ 'samples_per_peak': samples_per_peak
590
+ }
591
+
592
+ freq, power = gls.autopower(**kw)
588
593
 
589
- freq, power = gls.autopower(maximum_frequency=maximum_frequency,
590
- minimum_frequency=minimum_frequency,
591
- samples_per_peak=samples_per_peak)
594
+ show_peak_fap = kwargs.pop('show_peak_fap', False)
592
595
 
593
596
  if ax is None:
594
597
  fig, ax = plt.subplots(1, 1, constrained_layout=True)
@@ -607,14 +610,22 @@ def gls(self, ax=None, label=None, instrument=None,
607
610
  if isinstance(fap, float):
608
611
  fap_level = fap
609
612
 
610
- fap = gls.false_alarm_level(fap_level, method=fap_method)
613
+ fap = gls.false_alarm_level(fap_level, method=fap_method, **kw)
611
614
 
612
- if fap > 0.05 and fap_method == 'baluev':
613
- logger.warning('FAP is high (>5%), the analytical estimate may be underestimated. Using the bootstrap method instead.')
614
- fap = gls.false_alarm_level(fap_level, method='bootstrap')
615
+ # if fap > 0.05 and fap_method == 'baluev':
616
+ # logger.warning('FAP is high (>5%), the analytical estimate may be underestimated. Using the bootstrap method instead.')
617
+ # fap = gls.false_alarm_level(fap_level, method='bootstrap')
615
618
 
616
619
  ax.axhline(fap, color='k', alpha=0.2, zorder=-1)
617
620
 
621
+ if show_peak_fap:
622
+ peak_per = 1/freq[np.argmax(power)]
623
+ peak_power = np.max(power)
624
+ peak_fap = gls.false_alarm_probability(peak_power, method=fap_method, **kw)
625
+ ax.plot(peak_per, peak_power, 'o', color='r', zorder=1)
626
+ ax.annotate(f'{peak_per:1.3f} days\nFAP: {peak_fap:1.1e}', (peak_per, peak_power),
627
+ va='top', textcoords='offset points', xytext=(10, 0), zorder=1)
628
+
618
629
  ax.set(xlabel='Period [days]', ylabel='Normalized power', ylim=(0, None))
619
630
  ax.minorticks_on()
620
631
 
@@ -743,10 +754,17 @@ def gls_quantity(self, quantity, ax=None, instrument=None,
743
754
  else:
744
755
  fig = ax.figure
745
756
 
746
- spp = kwargs.get('samples_per_peak', 5)
757
+ maximum_frequency = kwargs.pop('maximum_frequency', 1.0)
758
+ minimum_frequency = kwargs.pop('minimum_frequency', None)
759
+ samples_per_peak = kwargs.pop('samples_per_peak', 10)
760
+ kw = {
761
+ 'maximum_frequency': maximum_frequency,
762
+ 'minimum_frequency': minimum_frequency,
763
+ 'samples_per_peak': samples_per_peak
764
+ }
747
765
 
748
766
  gls = LombScargle(t, y, ye)
749
- freq, power = gls.autopower(maximum_frequency=1.0, samples_per_peak=spp)
767
+ freq, power = gls.autopower(**kw)
750
768
 
751
769
  ax.semilogx(1/freq, power, picker=picker, **kwargs)
752
770
 
@@ -755,11 +773,11 @@ def gls_quantity(self, quantity, ax=None, instrument=None,
755
773
  if isinstance(fap, float):
756
774
  fap_level = fap
757
775
 
758
- fap = gls.false_alarm_level(fap_level, method=fap_method)
776
+ fap = gls.false_alarm_level(fap_level, method=fap_method, **kw)
759
777
 
760
- if fap > 0.05 and fap_method == 'baluev':
761
- logger.warning('FAP is high (>5%), the analytical estimate may be underestimated. Using the bootstrap method instead.')
762
- fap = gls.false_alarm_level(fap_level, method='bootstrap')
778
+ # if fap > 0.05 and fap_method == 'baluev':
779
+ # logger.warning('FAP is high (>5%), the analytical estimate may be underestimated. Using the bootstrap method instead.')
780
+ # fap = gls.false_alarm_level(fap_level, method='bootstrap', **kw)
763
781
 
764
782
  ax.axhline(fap, color='k', alpha=0.2, zorder=-1)
765
783
 
arvi/setup_logger.py CHANGED
@@ -1,7 +1,16 @@
1
1
  import sys
2
2
  from loguru import logger
3
3
 
4
- logger.remove()
4
+ try:
5
+ import marimo as mo
6
+ if mo.running_in_notebook():
7
+ raise ImportError
8
+ except (ImportError, ModuleNotFoundError):
9
+ pass
10
+ else:
11
+ logger.remove()
12
+
13
+
5
14
  logger.configure(extra={"indent": ""})
6
15
  logger.add(
7
16
  sys.stdout,
arvi/simbad_wrapper.py CHANGED
@@ -170,41 +170,52 @@ class simbad:
170
170
  if _debug:
171
171
  print('table1:', table1)
172
172
  cols, values = parse_table1(table1)
173
+ except IndexError:
174
+ raise ValueError(f'simbad query for {star} failed') from None
173
175
 
176
+ try:
174
177
  table2 = run_query(query=BV_QUERY.format(star=self.star))
175
178
  if _debug:
176
179
  print('table2:', table2)
177
180
  cols, values = parse_table1(table2, cols, values)
181
+ except IndexError:
182
+ self.B = self.V = np.nan
178
183
 
184
+ try:
179
185
  table3 = run_query(query=IDS_QUERY.format(star=self.star))
180
186
  if _debug:
181
187
  print('table3:', table3)
182
188
  line = table3.splitlines()[2]
183
189
  self.ids = line.replace('"', '').replace(' ', ' ').replace(' ', ' ').replace(' ', ' ').split('|')
190
+ except IndexError:
191
+ self.ids = []
192
+
193
+ table4 = run_query(query=FILTERS_QUERY.format(star=self.star))
194
+ if _debug:
195
+ print('table4:\n', table4)
196
+ for row in table4.splitlines()[2:]:
197
+ filter_name, mag, mag_err, bibcode = row.replace('"', '').split('|')
198
+ filter_name = filter_name.strip()
199
+ try:
200
+ setattr(self, '_' + filter_name, ufloat(float(mag), float(mag_err)))
201
+ except ValueError:
202
+ setattr(self, '_' + filter_name, float(mag))
184
203
 
185
- table4 = run_query(query=FILTERS_QUERY.format(star=self.star))
186
- for row in table4.splitlines()[2:]:
187
- filter_name, mag, mag_err, bibcode = row.replace('"', '').split('|')
188
- filter_name = filter_name.strip()
189
- try:
190
- setattr(self, '_' + filter_name, ufloat(float(mag), float(mag_err)))
191
- except ValueError:
192
- setattr(self, '_' + filter_name, float(mag))
193
-
194
- # measurements table
195
- table5 = run_query(query=MEAS_QUERY.format(star=self.star))
196
- _teff, _logg, _feh, _bibcode = [], [], [], []
197
- for row in table5.splitlines()[2:]:
198
- teff, log_g, log_g_prec, fe_h, fe_h_prec, bibcode = row.replace('"', '').split('|')
199
- _bibcode.append(bibcode)
200
- _teff.append(parse_value(teff))
201
- _logg.append(parse_value(log_g, prec=log_g_prec))
202
- _feh.append(parse_value(fe_h, prec=fe_h_prec))
203
-
204
- self.measurements = Measurements(_teff, _logg, _feh, _bibcode)
204
+ # measurements table
205
+ table5 = run_query(query=MEAS_QUERY.format(star=self.star))
206
+ if _debug:
207
+ print('table5:\n', table5)
208
+
209
+ _teff, _logg, _feh, _bibcode = [], [], [], []
210
+ for row in table5.splitlines()[2:]:
211
+ teff, log_g, log_g_prec, fe_h, fe_h_prec, bibcode = row.replace('"', '').split('|')
212
+ _bibcode.append(bibcode)
213
+ _teff.append(parse_value(teff))
214
+ _logg.append(parse_value(log_g, prec=log_g_prec))
215
+ _feh.append(parse_value(fe_h, prec=fe_h_prec))
216
+
217
+ self.measurements = Measurements(_teff, _logg, _feh, _bibcode)
205
218
 
206
- except IndexError:
207
- raise ValueError(f'simbad query for {star} failed') from None
208
219
 
209
220
  try:
210
221
  self.gaia_id = int([i for i in self.ids if 'Gaia DR3' in i][0]
@@ -245,8 +256,9 @@ class simbad:
245
256
 
246
257
  except IndexError:
247
258
  if self.sp_type == '':
248
- self.teff = int(np.mean(self.measurements.teff))
249
- self.sp_type = teff_to_sptype(self.teff)
259
+ if len(self.measurements.teff) > 0:
260
+ self.teff = int(np.mean(self.measurements.teff))
261
+ self.sp_type = teff_to_sptype(self.teff)
250
262
  elif self.sp_type[:2] in EFFECTIVE_TEMPERATURES:
251
263
  self.teff = EFFECTIVE_TEMPERATURES[self.sp_type[:2]]
252
264
 
arvi/timeseries.py CHANGED
@@ -14,6 +14,7 @@ from .translations import translate
14
14
  from .dace_wrapper import do_download_filetype, do_symlink_filetype, get_observations, get_arrays
15
15
  from .simbad_wrapper import simbad
16
16
  from .gaia_wrapper import gaia
17
+ from .exofop_wrapper import exofop
17
18
  from .extra_data import get_extra_data
18
19
  from .stats import wmean, wrms
19
20
  from .binning import bin_ccf_mask, binRV
@@ -68,23 +69,29 @@ class RV:
68
69
  _did_adjust_means: bool = field(init=False, repr=False, default=False)
69
70
  _did_simbad_query: bool = field(init=False, repr=False, default=False)
70
71
  _did_gaia_query: bool = field(init=False, repr=False, default=False)
72
+ _did_toi_query: bool = field(init=False, repr=False, default=False)
71
73
  _raise_on_error: bool = field(init=True, repr=False, default=True)
74
+ __masked_numbers: bool = field(init=False, repr=False, default=False)
72
75
  #
73
76
  _simbad = None
74
77
  _gaia = None
78
+ _toi = None
75
79
 
76
80
  def __repr__(self):
77
81
  ni = len(self.instruments)
78
82
  if self.N == 0:
79
83
  return f"RV(star='{self.star}', N=0)"
80
84
 
81
- i = f'{ni} instrument' + ('s' if ni > 1 else '')
85
+ if self._child:
86
+ i = ''
87
+ else:
88
+ i = f', {ni} instrument' + ('s' if ni > 1 else '')
82
89
 
83
90
  if self.time.size == self.mtime.size:
84
- return f"RV(star='{self.star}', N={self.N}, {i})"
91
+ return f"RV(star='{self.star}', N={self.N}{i})"
85
92
  else:
86
93
  nmasked = self.N - self.mtime.size
87
- return f"RV(star='{self.star}', N={self.N}, masked={nmasked}, {i})"
94
+ return f"RV(star='{self.star}', N={self.N}, masked={nmasked}{i})"
88
95
 
89
96
  @property
90
97
  def simbad(self):
@@ -144,6 +151,26 @@ class RV:
144
151
  self._did_gaia_query = True
145
152
  return self._gaia
146
153
 
154
+ @property
155
+ def toi(self):
156
+ if self._toi is not None:
157
+ return self._toi
158
+
159
+ if 'TOI' not in self.__star__ or 'TIC' not in self.__star__ or self._child or self._did_toi_query:
160
+ return None
161
+
162
+ if self.verbose:
163
+ logger.info('querying ExoFOP...')
164
+
165
+ try:
166
+ self._toi = exofop(self.__star__)
167
+ except ValueError:
168
+ if self.verbose:
169
+ logger.error(f'ExoFOP query for {self.__star__} failed')
170
+
171
+ self._did_toi_query = True
172
+ return self._toi
173
+
147
174
  def __post_init_special_sun(self):
148
175
  import pickle
149
176
  from .extra_data import get_sun_data
@@ -168,7 +195,7 @@ class RV:
168
195
  import concurrent.futures
169
196
  with timer('simbad and gaia queries'):
170
197
  with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
171
- executor.map(self.__getattribute__, ('simbad', 'gaia'))
198
+ executor.map(self.__getattribute__, ('simbad', 'gaia', 'toi'))
172
199
 
173
200
  # with timer('simbad query'):
174
201
  # self.simbad
@@ -241,7 +268,6 @@ class RV:
241
268
  # all other quantities
242
269
  self._build_arrays()
243
270
 
244
-
245
271
  if self.load_extra_data:
246
272
  if isinstance(self.load_extra_data, str):
247
273
  path = self.load_extra_data
@@ -314,6 +340,16 @@ class RV:
314
340
  for inst in self.instruments:
315
341
  yield getattr(self, inst)
316
342
 
343
+ @property
344
+ def _masked_numbers(self):
345
+ return self.__masked_numbers
346
+
347
+ @_masked_numbers.setter
348
+ def _masked_numbers(self, value):
349
+ self.__masked_numbers = value
350
+ if not self._child:
351
+ for s in self:
352
+ s._masked_numbers = value
317
353
 
318
354
  def reload(self):
319
355
  self._did_secular_acceleration = False
@@ -335,19 +371,35 @@ class RV:
335
371
  @property
336
372
  def N(self) -> int:
337
373
  """Total number of observations"""
374
+ if self._masked_numbers:
375
+ return self.mtime.size
338
376
  return self.time.size
339
377
 
340
378
  @property
341
379
  def NN(self):
342
380
  """ Total number of observations per instrument """
381
+ if self._child:
382
+ return {self.instruments[0]: self.N}
343
383
  return {inst: getattr(self, inst).N for inst in self.instruments}
344
384
 
345
385
  @property
346
386
  def N_nights(self) -> int:
347
387
  """ Number of individual nights """
348
- if self.mtime.size == 0:
349
- return 0
350
- return binRV(self.mtime, None, None, binning_bins=True).size - 1
388
+ def get_nights(t):
389
+ return binRV(t, None, None, binning_bins=True).size - 1
390
+
391
+ if self._masked_numbers:
392
+ if self._child:
393
+ return get_nights(self.mtime)
394
+ else:
395
+ return sum([get_nights(s.mtime) for s in self])
396
+ else:
397
+ if self._child:
398
+ return get_nights(self.time)
399
+ else:
400
+ return sum([get_nights(s.time) for s in self])
401
+ # return binRV(_t, None, None, binning_bins=True).size - 1
402
+ # return sum(list(self.NN.values()))
351
403
 
352
404
  @property
353
405
  def NN_nights(self):
@@ -516,8 +568,8 @@ class RV:
516
568
  return s
517
569
 
518
570
  @classmethod
519
- def from_rdb(cls, files, star=None, instrument=None, units='ms',
520
- header_skip=2, **kwargs):
571
+ def from_rdb(cls, files, star=None, instrument=None, instrument_suffix=None,
572
+ units='ms', header_skip=2, **kwargs):
521
573
  """ Create an RV object from an rdb file or a list of rdb files
522
574
 
523
575
  Args:
@@ -568,6 +620,9 @@ class RV:
568
620
  if instruments.size == 1 and len(files) > 1:
569
621
  instruments = np.repeat(instruments, len(files))
570
622
 
623
+ if instrument_suffix is not None:
624
+ instruments = [i + instrument_suffix for i in instruments]
625
+
571
626
  factor = 1e3 if units == 'kms' else 1.0
572
627
 
573
628
  s = cls(star, _child=True, **kwargs)
@@ -744,6 +799,8 @@ class RV:
744
799
 
745
800
  s._child = False
746
801
  s.instruments = list(map(str, instruments))
802
+ s.filenames = list(map(str, files))
803
+
747
804
  s._build_arrays()
748
805
 
749
806
  if kwargs.get('do_adjust_means', False):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.2.1
3
+ Version: 0.2.2
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -1,26 +1,27 @@
1
1
  arvi/HZ.py,sha256=u7rguhlILRBW-LOczlY3dkIB4LM8p8W7Xfg4FnNaYG0,2850
2
- arvi/__init__.py,sha256=r3oaMVwQCjNTd6odg-ga9IRncn54i7tawZrIrvSRQOo,1012
2
+ arvi/__init__.py,sha256=sgl66ujggmM6wUh1Og65MAXfL0YqcrjY6o7E6iQOSsI,1048
3
3
  arvi/ariadne_wrapper.py,sha256=YvilopJa9T4NwPcj3Nah_U8smSeSAU5-HYZMb_GJ-BQ,2232
4
4
  arvi/berv.py,sha256=eKnpuPC1w45UrUEyFRbs9F9j3bXz3kxYzNXbnRgvFQM,17596
5
5
  arvi/binning.py,sha256=jbemJ-bM3aqoOsqMo_OhWt_co-JAQ0nhdG_GpTsrRsw,15403
6
6
  arvi/config.py,sha256=W-v8NNhRd_PROu0wCMilXmOhYcju4xbUalugd5u7SRU,1881
7
- arvi/dace_wrapper.py,sha256=EKKBIqxgmbqvyhsLFK65aOP0BcEH3jnuHHNgCOvKYlk,23169
8
- arvi/extra_data.py,sha256=cpJGMle0ZqY_dtrmbbMQcyU48PkNjfzUgQ-qY-2XTj8,3249
9
- arvi/gaia_wrapper.py,sha256=2q_7bm6MGvTLlegfNUCY_EhnMKYv1CZmcbanOm_ot-k,4197
7
+ arvi/dace_wrapper.py,sha256=b0uBw2Or8ElsRky25iYP2uFkOb9I3BUo7lC-UwajcFg,23341
8
+ arvi/exofop_wrapper.py,sha256=ceBLff_8TWqUKWsYMsTESI2dPGE4upAHAJjwWzmRj4o,2380
9
+ arvi/extra_data.py,sha256=bInAgiZNuw5vkxcCYf1Ns4w72PoxpqwV48RFBIZ3rGE,3364
10
+ arvi/gaia_wrapper.py,sha256=jfBdK9N9ZOqHIzE5MRFmXNyN3PAOT_uzXM23MIy0POY,4371
10
11
  arvi/headers.py,sha256=uvdJebw1M5YkGjE3vJJwYBOnLikib75uuZE9FXB5JJM,1673
11
12
  arvi/instrument_specific.py,sha256=ORjlw79EumEiGugmGn_2WBOuEPhsfgDNryEMBDe9RgM,10733
12
13
  arvi/kima_wrapper.py,sha256=BvNTVqzM4lMNhLCyBFVh3T84hHfGKAFpgiYiOi4lh0g,2731
13
14
  arvi/lbl_wrapper.py,sha256=_ViGVkpakvuBR_xhu9XJRV5EKHpj5Go6jBZGJZMIS2Y,11850
14
15
  arvi/nasaexo_wrapper.py,sha256=mWt7eHgSZe4MBKCmUvMPTyUPGuiwGTqKugNBvmjOg9s,7306
15
- arvi/plots.py,sha256=Ut9_AOhhlp-fsYaOacZoYnKBVnbpgxKw1BXlEgTphOo,32152
16
+ arvi/plots.py,sha256=EheFTUldVaslO5GDpgCKl9jgyc_5ipluDTfmah05-5w,32693
16
17
  arvi/programs.py,sha256=BW7xBNKLei7NVLLW3_lsVskwzkaIoNRiHK2jn9Tn2ZM,8879
17
18
  arvi/reports.py,sha256=ayPdZ4HZO9iCDdnADQ18gQPJh79o-1UYG7TYkvm9Lrc,4051
18
- arvi/setup_logger.py,sha256=pBzaRTn0hntozjbaRVx0JIbWGuENkvYUApa6uB-FsRo,279
19
- arvi/simbad_wrapper.py,sha256=9hH7VczHAjLnOiqkrd1a6mTd-3Y-o_3d3SYJ7uyF_1Y,8406
19
+ arvi/setup_logger.py,sha256=26Z0uyzlOJCYOT_pJixJZWQfWEND_DZl5358RadMD8Y,431
20
+ arvi/simbad_wrapper.py,sha256=vDSsxwCsqiasjxOPJRUw3lC870qbIixLGQOaQOM8tgI,8654
20
21
  arvi/spectra.py,sha256=ebF1ocodTastLx0CyqLSpE8EZNDXBF8riyfxMr3L6H0,7491
21
22
  arvi/stats.py,sha256=ilzzGL9ew-SyVa9eEdrYCpD3DliOAwhoNUg9LIlHjzU,2583
22
23
  arvi/stellar.py,sha256=GQ7yweuBRnfkJ0M5eWjvLd8uvGq_by81PbXfidBvWis,4918
23
- arvi/timeseries.py,sha256=jZXO_dW0hzE0EmeUwZclMMywFSeBvMYZktNi7h-_kpc,88893
24
+ arvi/timeseries.py,sha256=_p4Ovv_xGQkLxCCCyaU-KkdtcoK21RdBq58bBEpNPd0,90751
24
25
  arvi/translations.py,sha256=PUSrn4zvYO2MqGzUxlFGwev_tBkgJaJrIYs6NKHzbWo,951
25
26
  arvi/utils.py,sha256=V4uSpr75YVjE0NP3T5PxnfVQQ06nd-O8X679BfVyD30,7068
26
27
  arvi/data/info.svg,sha256=0IMI6W-eFoTD8acnury79WJJakpBwLa4qKS4JWpsXiI,489
@@ -29,8 +30,8 @@ arvi/data/obs_affected_blue_cryostat_issues.dat,sha256=z4AK17xfz8tGTDv1FjRvQFnio
29
30
  arvi/data/extra/HD86226_PFS1.rdb,sha256=vfAozbrKHM_j8dYkCBJsuHyD01KEM1asghe2KInwVao,3475
30
31
  arvi/data/extra/HD86226_PFS2.rdb,sha256=F2P7dB6gVyzCglUjNheB0hIHVClC5RmARrGwbrY1cfo,4114
31
32
  arvi/data/extra/metadata.json,sha256=C69hIw6CohyES6BI9vDWjxwSz7N4VOYX0PCgjXtYFmU,178
32
- arvi-0.2.1.dist-info/licenses/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
33
- arvi-0.2.1.dist-info/METADATA,sha256=VkJZ7gNuP1IB2IMYxgrU8FBubpRjzCU_av0ZgY5AkH4,1920
34
- arvi-0.2.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
35
- arvi-0.2.1.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
36
- arvi-0.2.1.dist-info/RECORD,,
33
+ arvi-0.2.2.dist-info/licenses/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
34
+ arvi-0.2.2.dist-info/METADATA,sha256=29i7G2YPkMftx3Mhk9UvitLbiWoiIqjWDPUshqrOK4Y,1920
35
+ arvi-0.2.2.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91
36
+ arvi-0.2.2.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
37
+ arvi-0.2.2.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.0)
2
+ Generator: setuptools (79.0.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5