arvi 0.1.29__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arvi might be problematic. Click here for more details.

arvi/dace_wrapper.py CHANGED
@@ -15,6 +15,7 @@ def load_spectroscopy(user=None) -> SpectroscopyClass:
15
15
  from .config import config
16
16
  # requesting as public
17
17
  if config.request_as_public:
18
+ logger.warning('requesting DACE data as public')
18
19
  with all_logging_disabled():
19
20
  dace = DaceClass(dace_rc_config_path='none')
20
21
  return SpectroscopyClass(dace_instance=dace)
@@ -300,26 +301,53 @@ def get_observations(star, instrument=None, user=None, main_id=None, verbose=Tru
300
301
  msg = f'no {instrument} observations for {star}'
301
302
  raise ValueError(msg)
302
303
 
303
- # sort pipelines, being extra careful with HARPS pipeline names
304
- # (i.e. ensure that 3.x.x > 3.5)
304
+ # # sort pipelines, being extra careful with HARPS pipeline names
305
+ # # (i.e. ensure that 3.x.x > 3.5)
306
+ # from re import match
307
+ # def cmp(a, b):
308
+ # if a[0] in ('3.5', '3.5 EGGS') or 'EGGS' in a[0] and match(r'3.\d.\d', b[0]):
309
+ # return -1
310
+ # if b[0] in ('3.5', '3.5 EGGS') or 'EGGS' in b[0] and match(r'3.\d.\d', a[0]):
311
+ # return 1
312
+
313
+ # if a[0] == b[0]:
314
+ # return 0
315
+ # elif a[0] > b[0]:
316
+ # return 1
317
+ # else:
318
+ # return -1
319
+
320
+ # sort pipelines, must be extra careful with HARPS/HARPN pipeline version numbers
321
+ # got here with the help of DeepSeek
305
322
  from re import match
306
- def cmp(a, b):
307
- if a[0] in ('3.5', '3.5 EGGS') or 'EGGS' in a[0] and match(r'3.\d.\d', b[0]):
308
- return -1
309
- if b[0] in ('3.5', '3.5 EGGS') or 'EGGS' in b[0] and match(r'3.\d.\d', a[0]):
310
- return 1
311
-
312
- if a[0] == b[0]:
313
- return 0
314
- elif a[0] > b[0]:
315
- return 1
316
- else:
317
- return -1
318
-
319
- from functools import cmp_to_key
323
+ def custom_sort_key(s):
324
+ s = s[0]
325
+ # Check for version number pattern (e.g., 3.2.5 or 3.2.5-EGGS)
326
+ version_match = match(r'^(\d+(?:\.\d+)*)(?:-(.*))?$', s)
327
+ if version_match:
328
+ version_parts = tuple(map(int, version_match.group(1).split('.')))
329
+ suffix = version_match.group(2)
330
+
331
+ if suffix is not None:
332
+ # Suffixed versions: sort in ascending order (3.2.5-HR11 < 3.3.1-HR11)
333
+ return (0, 0, version_parts, suffix)
334
+ else:
335
+ # Unsuffixed versions: sort in descending order (3.5 > 3.2.5)
336
+ return (0, 1, tuple(-x for x in version_parts))
337
+
338
+ # Check for scientific reference pattern (e.g., 2004A&A...)
339
+ year_match = match(r'^(\d{4})', s)
340
+ if year_match:
341
+ year = int(year_match.group(1))
342
+ return (1, year)
343
+
344
+ # For all other strings, sort alphabetically
345
+ return (2, s)
346
+
347
+ # from functools import cmp_to_key
320
348
  new_result = {}
321
349
  for inst in instruments:
322
- new_result[inst] = dict(sorted(result[inst].items(), key=cmp_to_key(cmp), reverse=True))
350
+ new_result[inst] = dict(sorted(result[inst].items(), key=custom_sort_key, reverse=True))
323
351
 
324
352
  if verbose:
325
353
  logger.info('RVs available from')
@@ -2,12 +2,25 @@ import os, sys
2
2
  import numpy as np
3
3
 
4
4
  from .setup_logger import logger
5
+ from .utils import ESPRESSO_ADC_issues, ESPRESSO_cryostat_issues
5
6
 
6
7
 
8
+ # HARPS started operations in October 1st, 2003
9
+ # https://www.eso.org/sci/facilities/lasilla/instruments/harps/news.html
10
+ HARPS_start = 52913
11
+
7
12
  # HARPS fiber upgrade (28 May 2015)
8
13
  # https://www.eso.org/sci/facilities/lasilla/instruments/harps/news/harps_upgrade_2015.html
9
14
  HARPS_technical_intervention = 57170
10
15
 
16
+ # From Lo Curto et al. (2015), The Messenger, vol. 162, p. 9-15
17
+ # On **19 May 2015** HARPS stopped operations and the instrument was opened.
18
+ # Installation and alignment of the fibre link lasted roughly one week. On 29
19
+ # May, the vacuum vessel was closed and evacuated for the last time. Finally, a
20
+ # formal commissioning of the new fibre took place, finishing on **3 June**,
21
+ # when the instrument was handed back to Science Operations.
22
+ HARPS_technical_intervention_range = (57161, 57176)
23
+
11
24
  # ESPRESSO fiber link upgrade (1 July 2019)
12
25
  ESPRESSO_technical_intervention = 58665
13
26
 
@@ -86,10 +99,78 @@ def divide_HARPS(self):
86
99
  logger.info(f'divided HARPS into {self.instruments}')
87
100
 
88
101
 
102
+ def check(self, instrument):
103
+ instruments = self._check_instrument(instrument)
104
+ if instruments is None:
105
+ if self.verbose:
106
+ logger.error(f"HARPS_fiber_commissioning: no data from {instrument}")
107
+ return None
108
+ return instruments
109
+
110
+
111
+ # HARPS commissioning
112
+ def HARPS_commissioning(self, mask=True, plot=True):
113
+ """ Identify and optionally mask points during HARPS commissioning (HARPS).
114
+
115
+ Args:
116
+ mask (bool, optional):
117
+ Whether to mask out the points.
118
+ plot (bool, optional):
119
+ Whether to plot the masked points.
120
+ """
121
+ if check(self, 'HARPS') is None:
122
+ return
123
+
124
+ affected = self.time < HARPS_start
125
+ total_affected = affected.sum()
126
+
127
+ if self.verbose:
128
+ n = total_affected
129
+ logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
130
+ "during HARPS commissioning")
131
+
132
+ if mask:
133
+ self.mask[affected] = False
134
+ self._propagate_mask_changes()
135
+
136
+ if plot:
137
+ self.plot(show_masked=True)
138
+
139
+ return affected
89
140
 
90
- # ESPRESSO ADC issues
91
- from .utils import ESPRESSO_ADC_issues
92
141
 
142
+ # HARPS fiber commissioning
143
+ def HARPS_fiber_commissioning(self, mask=True, plot=True):
144
+ """ Identify and optionally mask points affected by fiber commissioning (HARPS).
145
+
146
+ Args:
147
+ mask (bool, optional):
148
+ Whether to mask out the points.
149
+ plot (bool, optional):
150
+ Whether to plot the masked points.
151
+ """
152
+ if check(self, 'HARPS') is None:
153
+ return
154
+
155
+ affected = (self.time >= HARPS_technical_intervention_range[0]) & (self.time <= HARPS_technical_intervention_range[1])
156
+ total_affected = affected.sum()
157
+
158
+ if self.verbose:
159
+ n = total_affected
160
+ logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
161
+ "during the HARPS fiber commissioning period")
162
+
163
+ if mask:
164
+ self.mask[affected] = False
165
+ self._propagate_mask_changes()
166
+
167
+ if plot:
168
+ self.plot(show_masked=True)
169
+
170
+ return affected
171
+
172
+
173
+ # ESPRESSO ADC issues
93
174
  def ADC_issues(self, mask=True, plot=True, check_headers=False):
94
175
  """ Identify and optionally mask points affected by ADC issues (ESPRESSO).
95
176
 
@@ -137,8 +218,6 @@ def ADC_issues(self, mask=True, plot=True, check_headers=False):
137
218
  return intersect
138
219
 
139
220
  # ESPRESSO cryostat issues
140
- from .utils import ESPRESSO_cryostat_issues
141
-
142
221
  def blue_cryostat_issues(self, mask=True, plot=True):
143
222
  """ Identify and mask points affected by blue cryostat issues (ESPRESSO).
144
223
 
@@ -221,7 +300,7 @@ def qc_scired_issues(self, plot=False, **kwargs):
221
300
 
222
301
 
223
302
  def known_issues(self, mask=True, plot=False, **kwargs):
224
- """ Identify and optionally mask known instrumental issues (ADC and blue cryostat for ESPRESSO)
303
+ """ Identify and optionally mask known instrumental issues.
225
304
 
226
305
  Args:
227
306
  mask (bool, optional): Whether to mask out the points.
@@ -230,18 +309,28 @@ def known_issues(self, mask=True, plot=False, **kwargs):
230
309
  try:
231
310
  adc = ADC_issues(self, mask, plot, **kwargs)
232
311
  except IndexError:
233
- # logger.error(e)
234
312
  logger.error('are the data binned? cannot proceed to mask these points...')
235
313
 
236
314
  try:
237
315
  cryostat = blue_cryostat_issues(self, mask, plot)
238
316
  except IndexError:
239
- # logger.error(e)
240
317
  logger.error('are the data binned? cannot proceed to mask these points...')
241
318
 
242
- if adc is None and cryostat is None:
243
- return
244
319
  try:
245
- return adc | cryostat
320
+ harps_comm = HARPS_commissioning(self, mask, plot)
321
+ except IndexError:
322
+ logger.error('are the data binned? cannot proceed to mask these points...')
323
+
324
+ try:
325
+ harps_fibers = HARPS_fiber_commissioning(self, mask, plot)
326
+ except IndexError:
327
+ logger.error('are the data binned? cannot proceed to mask these points...')
328
+
329
+ # if None in (adc, cryostat, harps_comm, harps_fibers):
330
+ # return
331
+
332
+ try:
333
+ # return adc | cryostat
334
+ return np.logical_or.reduce((adc, cryostat, harps_comm, harps_fibers))
246
335
  except UnboundLocalError:
247
336
  return
arvi/plots.py CHANGED
@@ -137,8 +137,8 @@ def clickable_legend(fig, ax, leg):
137
137
 
138
138
  # @plot_fast
139
139
  def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
140
- remove_50000=False, tooltips=False, show_legend=True, label=None,
141
- N_in_label=False, versus_n=False, show_histogram=False, bw=False, **kwargs):
140
+ remove_50000=False, tooltips=True, show_title=False, show_legend=True, label=None,
141
+ jitter=None, N_in_label=False, versus_n=False, show_histogram=False, bw=False, **kwargs):
142
142
  """ Plot the RVs
143
143
 
144
144
  Args:
@@ -154,6 +154,8 @@ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
154
154
  Whether to subtract 50000 from time. Defaults to False.
155
155
  tooltips (bool, optional):
156
156
  Show information upon clicking a point. Defaults to True.
157
+ show_title (bool, optional):
158
+ Show the star name in the plot title. Defaults to False.
157
159
  show_legend (bool, optional):
158
160
  Show legend. Defaults to True.
159
161
  N_in_label (bool, optional):
@@ -371,6 +373,9 @@ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
371
373
  else:
372
374
  ax.set_xlabel('BJD - 2400000 [days]')
373
375
 
376
+ if show_title:
377
+ ax.set_title(self.star, loc='right')
378
+
374
379
  # from matplotlib.backend_tools import ToolBase, ToolToggleBase
375
380
  # tm = fig.canvas.manager.toolmanager
376
381
  # class InfoTool(ToolToggleBase):
arvi/simbad_wrapper.py CHANGED
@@ -10,7 +10,9 @@ try:
10
10
  except ImportError:
11
11
  ufloat = lambda x, y: x
12
12
 
13
+ from .stellar import EFFECTIVE_TEMPERATURES, teff_to_sptype
13
14
  from .translations import translate
15
+ from .setup_logger import logger
14
16
 
15
17
  DATA_PATH = os.path.dirname(__file__)
16
18
  DATA_PATH = os.path.join(DATA_PATH, 'data')
@@ -71,15 +73,15 @@ class Measurements:
71
73
  bibcode: list
72
74
 
73
75
 
74
- def run_query(query):
75
- url = 'http://simbad.u-strasbg.fr/simbad/sim-tap/sync'
76
+ def run_query(query, SIMBAD_URL='http://simbad.u-strasbg.fr'):
77
+ url = f'{SIMBAD_URL}/simbad/sim-tap/sync'
76
78
  data = dict(query=query, request='doQuery', lang='ADQL', format='text/plain', phase='run')
77
79
  try:
78
80
  response = requests.post(url, data=data, timeout=2)
79
81
  except requests.ReadTimeout as err:
80
- raise IndexError(err)
82
+ raise IndexError(err) from None
81
83
  except requests.ConnectionError as err:
82
- raise IndexError(err)
84
+ raise IndexError(err) from None
83
85
  return response.content.decode()
84
86
 
85
87
  def parse_table1(table, cols=None, values=None):
@@ -120,14 +122,6 @@ def parse_value(value, err=None, prec=None):
120
122
  return v
121
123
 
122
124
 
123
- effective_temperatures = {
124
- 'F0': 7350, 'F2': 7050, 'F3': 6850, 'F5': 6700, 'F6': 6550, 'F7': 6400, 'F8': 6300,
125
- 'G0': 6050, 'G1': 5930, 'G2': 5800, 'G5': 5660, 'G8': 5440,
126
- 'K0': 5240, 'K1': 5110, 'K2': 4960, 'K3': 4800, 'K4': 4600, 'K5': 4400, 'K7': 4000,
127
- 'M0': 3750, 'M1': 3700, 'M2': 3600, 'M3': 3500, 'M4': 3400, 'M5': 3200, 'M6': 3100, 'M7': 2900, 'M8': 2700,
128
- }
129
-
130
-
131
125
  class simbad:
132
126
  """
133
127
  A very simple wrapper around a TAP query to simbad for a given target. This
@@ -146,7 +140,7 @@ class simbad:
146
140
  V (float): V magnitude
147
141
  ids (list): list of identifiers
148
142
  """
149
- def __init__(self, star:str):
143
+ def __init__(self, star:str, _debug=False):
150
144
  """
151
145
  Args:
152
146
  star (str): The name of the star to query simbad
@@ -173,12 +167,18 @@ class simbad:
173
167
 
174
168
  try:
175
169
  table1 = run_query(query=QUERY.format(star=self.star))
170
+ if _debug:
171
+ print('table1:', table1)
176
172
  cols, values = parse_table1(table1)
177
173
 
178
174
  table2 = run_query(query=BV_QUERY.format(star=self.star))
175
+ if _debug:
176
+ print('table2:', table2)
179
177
  cols, values = parse_table1(table2, cols, values)
180
178
 
181
179
  table3 = run_query(query=IDS_QUERY.format(star=self.star))
180
+ if _debug:
181
+ print('table3:', table3)
182
182
  line = table3.splitlines()[2]
183
183
  self.ids = line.replace('"', '').replace(' ', ' ').replace(' ', ' ').replace(' ', ' ').split('|')
184
184
 
@@ -203,9 +203,8 @@ class simbad:
203
203
 
204
204
  self.measurements = Measurements(_teff, _logg, _feh, _bibcode)
205
205
 
206
-
207
206
  except IndexError:
208
- raise ValueError(f'simbad query for {star} failed')
207
+ raise ValueError(f'simbad query for {star} failed') from None
209
208
 
210
209
  try:
211
210
  self.gaia_id = int([i for i in self.ids if 'Gaia DR3' in i][0]
@@ -242,10 +241,14 @@ class simbad:
242
241
  raise IndexError
243
242
  else:
244
243
  self.teff = data['teff']
244
+ self.sweetcat = data
245
245
 
246
246
  except IndexError:
247
- if self.sp_type[:2] in effective_temperatures:
248
- self.teff = effective_temperatures[self.sp_type[:2]]
247
+ if self.sp_type == '':
248
+ self.teff = int(np.mean(self.measurements.teff))
249
+ self.sp_type = teff_to_sptype(self.teff)
250
+ elif self.sp_type[:2] in EFFECTIVE_TEMPERATURES:
251
+ self.teff = EFFECTIVE_TEMPERATURES[self.sp_type[:2]]
249
252
 
250
253
  def __repr__(self):
251
254
  V = self.V
arvi/stellar.py CHANGED
@@ -1,6 +1,31 @@
1
-
2
1
  import numpy as np
3
2
 
3
+ # from Table 5 of Pecaut & Mamajek (2013, ApJS, 208, 9; http://adsabs.harvard.edu/abs/2013ApJS..208....9P)
4
+ # https://www.pas.rochester.edu/~emamajek/EEM_dwarf_UBVIJHK_colors_Teff.txt
5
+ EFFECTIVE_TEMPERATURES = {
6
+ 'F0': 7220, 'F1': 7020, 'F2': 6820, 'F3': 6750, 'F4': 6670, 'F5': 6550, 'F6': 6350, 'F7': 6280, 'F8': 6180, 'F9': 6050,
7
+ 'G0': 5930, 'G1': 5860, 'G2': 5770, 'G3': 5720, 'G4': 5680, 'G5': 5660, 'G6': 5600, 'G7': 5550, 'G8': 5480, 'G9': 5380,
8
+ 'K0': 5270, 'K1': 5170, 'K2': 5100, 'K3': 4830, 'K4': 4600, 'K5': 4440, 'K6': 4300, 'K7': 4100, 'K8': 3990, 'K9': 3930,
9
+ 'M0': 3850, 'M1': 3660, 'M2': 3560, 'M3': 3430, 'M4': 3210, 'M5': 3060, 'M6': 2810, 'M7': 2680, 'M8': 2570, 'M9': 2380,
10
+ }
11
+
12
+ def teff_to_sptype(teff):
13
+ """
14
+ Estimate the spectral type from the effective temperature, using the
15
+ Pecaut & Mamajek (2013) table.
16
+
17
+ Args:
18
+ teff (float): Effective temperature
19
+
20
+ Returns:
21
+ str: Spectral type
22
+ """
23
+ teffs = list(EFFECTIVE_TEMPERATURES.values())
24
+ sptypes = list(EFFECTIVE_TEMPERATURES.keys())
25
+ i = np.argmin(np.abs(np.array(teffs) - teff))
26
+ return sptypes[i]
27
+
28
+
4
29
  class prot_age_result:
5
30
  prot_n84 = None #: float | np.ndarray
6
31
  prot_n84_err = None #: float | np.ndarray
arvi/timeseries.py CHANGED
@@ -414,9 +414,9 @@ class RV:
414
414
  ind = np.where(self.instrument_array == instrument)[0]
415
415
  return ind[getattr(self, instrument).mask][index]
416
416
 
417
- @property
418
- def _tt(self) -> np.ndarray:
419
- return np.linspace(self.mtime.min(), self.mtime.max(), 20*self.N)
417
+ # @property
418
+ def _tt(self, f=20) -> np.ndarray:
419
+ return np.linspace(self.mtime.min(), self.mtime.max(), f*self.N)
420
420
 
421
421
  @classmethod
422
422
  def from_dace_data(cls, star, inst, pipe, mode, data, **kwargs):
@@ -765,13 +765,19 @@ class RV:
765
765
  if isinstance(files, str):
766
766
  files = [files]
767
767
 
768
- CCFs = iCCF.from_file(files)
768
+ hdu_number = kwargs.pop('hdu_number', 1)
769
+ data_index = kwargs.pop('data_index', -1)
770
+ CCFs = iCCF.from_file(files, hdu_number=hdu_number, data_index=data_index)
769
771
 
770
772
  if not isinstance(CCFs, list):
771
773
  CCFs = [CCFs]
772
774
 
773
- objects = np.unique([i.HDU[0].header['OBJECT'].replace(' ', '') for i in CCFs])
774
- if objects.size != 1:
775
+ try:
776
+ objects = np.unique([i.OBJECT for i in CCFs])
777
+ except AttributeError:
778
+ objects = np.unique([i.HDU[0].header['OBJECT'].replace(' ', '') for i in CCFs])
779
+
780
+ if len(objects) != 1:
775
781
  logger.warning(f'found {objects.size} different stars in the CCF files, '
776
782
  'choosing the first one')
777
783
  star = objects[0]
@@ -808,7 +814,7 @@ class RV:
808
814
 
809
815
  _s.mask = np.full_like(_s.time, True, dtype=bool)
810
816
 
811
- _s.drs_qc = np.array([i.HDU[0].header['HIERARCH ESO QC SCIRED CHECK'] for i in CCFs], dtype=bool)
817
+ _s.drs_qc = np.array([i.HDU[0].header['*QC SCIRED CHECK'][0] for i in CCFs], dtype=bool)
812
818
  # mask out drs_qc = False
813
819
  if not _s.drs_qc.all():
814
820
  n = (~ _s.drs_qc).sum()
@@ -859,7 +865,7 @@ class RV:
859
865
  fits_file = f'{star}_RVs.fits'
860
866
 
861
867
  local_exists = os.path.exists(local_targz_file)
862
- local_recent = os.path.getmtime(local_targz_file) > pytime() - 60*60*2
868
+ local_recent = local_exists and os.path.getmtime(local_targz_file) > pytime() - 60*60*2
863
869
 
864
870
  if os.path.exists(os.path.join(directory, fits_file)):
865
871
  logger.info(f'found file "{fits_file}" in "{directory}"')
@@ -877,7 +883,6 @@ class RV:
877
883
  else:
878
884
  resp = requests.get(f'https://kobe.caha.es/internal/fitsfiles/{fits_file}',
879
885
  auth=HTTPBasicAuth('kobeteam', config.kobe_password))
880
- logger.info(f'found file "{fits_file}" on server')
881
886
 
882
887
  if resp.status_code != 200:
883
888
  # something went wrong, try to extract the file by downloading the
@@ -904,6 +909,7 @@ class RV:
904
909
  hdul = fits.open(tar.extractfile(fits_file))
905
910
 
906
911
  else:
912
+ logger.info(f'found file "{fits_file}" on server')
907
913
  # found the file on the server, read it directly
908
914
  hdul = fits.open(BytesIO(resp.content))
909
915
 
@@ -1050,7 +1056,7 @@ class RV:
1050
1056
  self._download_directory = value
1051
1057
 
1052
1058
  def download_ccf(self, instrument=None, index=None, limit=None,
1053
- directory=None, symlink=False, load=True, **kwargs):
1059
+ directory=None, clobber=False, symlink=False, load=True, **kwargs):
1054
1060
  """ Download CCFs from DACE
1055
1061
 
1056
1062
  Args:
@@ -1058,6 +1064,7 @@ class RV:
1058
1064
  index (int): Specific index of point for which to download data (0-based)
1059
1065
  limit (int): Maximum number of files to download.
1060
1066
  directory (str): Directory where to store data.
1067
+ clobber (bool): Whether to overwrite existing files.
1061
1068
  """
1062
1069
  directory = directory or self.download_directory
1063
1070
 
@@ -1079,7 +1086,7 @@ class RV:
1079
1086
  logger.warning('may need to provide `top_level` in kwargs to find file')
1080
1087
  do_symlink_filetype('CCF', files[:limit], directory, **kwargs)
1081
1088
  else:
1082
- do_download_filetype('CCF', files[:limit], directory,
1089
+ do_download_filetype('CCF', files[:limit], directory, clobber=clobber,
1083
1090
  verbose=self.verbose, user=self.user, **kwargs)
1084
1091
 
1085
1092
  if load:
@@ -1105,7 +1112,7 @@ class RV:
1105
1112
  pass
1106
1113
 
1107
1114
  def download_s1d(self, instrument=None, index=None, limit=None,
1108
- directory=None, symlink=False, **kwargs):
1115
+ directory=None, clobber=False, apply_mask=True, symlink=False, **kwargs):
1109
1116
  """ Download S1Ds from DACE
1110
1117
 
1111
1118
  Args:
@@ -1113,6 +1120,8 @@ class RV:
1113
1120
  index (int): Specific index of point for which to download data (0-based)
1114
1121
  limit (int): Maximum number of files to download.
1115
1122
  directory (str): Directory where to store data.
1123
+ clobber (bool): Whether to overwrite existing files.
1124
+ apply_mask (bool): Apply mask to the observations before downloading.
1116
1125
  """
1117
1126
  directory = directory or self.download_directory
1118
1127
 
@@ -1120,7 +1129,11 @@ class RV:
1120
1129
  instrument = self._check_instrument(instrument, strict=strict)
1121
1130
  files = []
1122
1131
  for inst in instrument:
1123
- files += list(getattr(self, inst).raw_file)
1132
+ _s = getattr(self, inst)
1133
+ if apply_mask:
1134
+ files += list(_s.raw_file[_s.mask])
1135
+ else:
1136
+ files += list(_s.raw_file)
1124
1137
 
1125
1138
  if index is not None:
1126
1139
  index = np.atleast_1d(index)
@@ -1134,11 +1147,11 @@ class RV:
1134
1147
  logger.warning('may need to provide `top_level` in kwargs to find file')
1135
1148
  do_symlink_filetype('S1D', files[:limit], directory, **kwargs)
1136
1149
  else:
1137
- do_download_filetype('S1D', files[:limit], directory,
1150
+ do_download_filetype('S1D', files[:limit], directory, clobber=clobber,
1138
1151
  verbose=self.verbose, user=self.user, **kwargs)
1139
1152
 
1140
1153
  def download_s2d(self, instrument=None, index=None, limit=None,
1141
- directory=None, symlink=False, **kwargs):
1154
+ directory=None, clobber=False, symlink=False, **kwargs):
1142
1155
  """ Download S2Ds from DACE
1143
1156
 
1144
1157
  Args:
@@ -1146,6 +1159,7 @@ class RV:
1146
1159
  index (int): Specific index of point for which to download data (0-based)
1147
1160
  limit (int): Maximum number of files to download.
1148
1161
  directory (str): Directory where to store data.
1162
+ clobber (bool): Whether to overwrite existing files.
1149
1163
  """
1150
1164
  directory = directory or self.download_directory
1151
1165
 
@@ -1722,7 +1736,7 @@ class RV:
1722
1736
 
1723
1737
  if snew.verbose and len(bad_quantities) > 0:
1724
1738
  logger.warning(f"{inst}, skipping non-float quantities in binning:")
1725
- logger.warning(' ' + str(bad_quantities))
1739
+ logger.warning(' ' + str(list(map(str, bad_quantities))))
1726
1740
  for bq in bad_quantities:
1727
1741
  s._quantities = np.delete(s._quantities, s._quantities==bq)
1728
1742
  delattr(s, bq) #! careful here
@@ -1731,7 +1745,7 @@ class RV:
1731
1745
  s.mask = np.full(tb.shape, True)
1732
1746
 
1733
1747
  if snew.verbose and len(all_bad_quantities) > 0:
1734
- logger.warning('\nnew object will not have these non-float quantities')
1748
+ logger.warning('\nnew object will not have these quantities')
1735
1749
 
1736
1750
  for q in np.unique(all_bad_quantities):
1737
1751
  delattr(snew, q)
@@ -1771,14 +1785,18 @@ class RV:
1771
1785
  s.vrad += self._meanRV
1772
1786
  self._build_arrays()
1773
1787
 
1774
- def adjust_means(self, just_rv=False, instrument=None, **kwargs):
1788
+ def adjust_means(self, just_rv=False, exclude_rv=False, instrument=None, **kwargs):
1775
1789
  """
1776
- Subtract individual mean RVs from each instrument or from specific
1777
- instruments
1790
+ Subtract individual weighted mean RV from each instrument or from
1791
+ specific instruments
1778
1792
  """
1779
1793
  if self._child or self._did_adjust_means:
1780
1794
  return
1781
1795
 
1796
+ if just_rv and exclude_rv:
1797
+ logger.error('cannot use `just_rv` and `exclude_rv` at the same time')
1798
+ return
1799
+
1782
1800
  # if self.verbose:
1783
1801
  # print_as_table = len(self.instruments) > 2 and len(self.instruments) < 7
1784
1802
  # rows = [self.instruments]
@@ -1807,14 +1825,12 @@ class RV:
1807
1825
  s.vrad = np.zeros_like(s.time)
1808
1826
  continue
1809
1827
 
1810
- s.rv_mean = wmean(s.mvrad, s.msvrad)
1811
- s.vrad -= s.rv_mean
1828
+ if not exclude_rv:
1829
+ s.rv_mean = wmean(s.mvrad, s.msvrad)
1830
+ s.vrad -= s.rv_mean
1812
1831
 
1813
- if self.verbose:
1814
- # if print_as_table:
1815
- # row.append(f'{s.rv_mean:.3f}')
1816
- # else:
1817
- logger.info(f'subtracted weighted average from {inst:10s}: ({s.rv_mean:.3f} {self.units})')
1832
+ if self.verbose:
1833
+ logger.info(f'subtracted weighted average from {inst:10s}: ({s.rv_mean:.3f} {self.units})')
1818
1834
 
1819
1835
  if just_rv:
1820
1836
  continue
@@ -1828,6 +1844,9 @@ class RV:
1828
1844
  setattr(s, f'{other}_mean', m)
1829
1845
  setattr(s, other, getattr(s, other) - m)
1830
1846
 
1847
+ if self.verbose:
1848
+ logger.info(f'subtracted weighted averages from {others}')
1849
+
1831
1850
  # if print_as_table:
1832
1851
  # from .utils import pretty_print_table
1833
1852
  # rows.append(row)
@@ -2142,13 +2161,13 @@ class RV:
2142
2161
  # if self.verbose:
2143
2162
  # logger.warning(f'masking {nan_mask.sum()} observations with NaN in indicators')
2144
2163
 
2145
- header = '\t'.join(['bjd', 'vrad', 'svrad',
2164
+ header = '\t'.join(['rjd', 'vrad', 'svrad',
2146
2165
  'fwhm', 'sig_fwhm',
2147
2166
  'bispan', 'sig_bispan',
2148
2167
  'contrast', 'sig_contrast',
2149
2168
  'rhk', 'sig_rhk',
2150
2169
  'berv',
2151
- ])
2170
+ ])
2152
2171
  header += '\n'
2153
2172
  header += '\t'.join(['-' * len(c) for c in header.strip().split('\t')])
2154
2173
 
@@ -2159,7 +2178,7 @@ class RV:
2159
2178
  arrays = [_s.mtime, _s.mvrad, _s.msvrad]
2160
2179
 
2161
2180
  # d = np.stack(arrays, axis=1)
2162
- header = 'bjd\tvrad\tsvrad\n---\t----\t-----'
2181
+ header = 'rjd\tvrad\tsvrad\n---\t----\t-----'
2163
2182
 
2164
2183
  file = f'{star_name}_{inst}.rdb'
2165
2184
  if postfix is not None:
@@ -2292,7 +2311,11 @@ class RV:
2292
2311
  self.star_mass = float(input('stellar mass (Msun): '))
2293
2312
  if not hasattr(self, 'lum'):
2294
2313
  self.lum = float(input('luminosity (Lsun): '))
2295
- return getHZ_period(self.simbad.teff, self.star_mass, 1.0, self.lum)
2314
+ if hasattr(self, 'teff'):
2315
+ teff = self.teff
2316
+ else:
2317
+ teff = self.simbad.teff
2318
+ return getHZ_period(teff, self.star_mass, 1.0, self.lum)
2296
2319
 
2297
2320
 
2298
2321
  @property
arvi/utils.py CHANGED
@@ -197,6 +197,18 @@ def ESPRESSO_cryostat_issues():
197
197
  return np.array(file_roots)
198
198
 
199
199
 
200
+ def get_ra_sexagesimal(ra):
201
+ """ Convert RA in degrees to sexagesimal string representation. """
202
+ from astropy.coordinates import Angle
203
+ from astropy import units as u
204
+ return Angle(ra, unit=u.deg).to(u.hourangle).to_string(sep=':', pad=True)
205
+
206
+ def get_dec_sexagesimal(dec):
207
+ """ Convert DEC in degrees to sexagesimal string representation. """
208
+ from astropy.coordinates import Angle
209
+ from astropy import units as u
210
+ return Angle(dec, unit=u.deg).to_string(sep=':', pad=True)
211
+
200
212
  def get_max_berv_span(self, n=None):
201
213
  """
202
214
  Return the indices of the n observations which maximize the BERV span.
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.1.29
3
+ Version: 0.2.0
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -20,6 +20,7 @@ Requires-Dist: loguru
20
20
  Requires-Dist: tqdm
21
21
  Requires-Dist: pySWEETCat
22
22
  Requires-Dist: kepmodel
23
+ Dynamic: license-file
23
24
 
24
25
  <p align="center">
25
26
  <img width = "140" src="https://github.com/j-faria/arvi/blob/main/docs/logo/logo.png?raw=true"/>
@@ -58,7 +59,7 @@ s = RV('HD1234', instrument='ESPRESSO')
58
59
 
59
60
  #### Current version
60
61
 
61
- ![PyPI - Version](https://img.shields.io/pypi/v/arvi)
62
+ [![PyPI - Version](https://img.shields.io/pypi/v/arvi?color=32c854)](https://pypi.org/project/arvi/)
62
63
 
63
64
  #### Actions
64
65
 
@@ -4,33 +4,33 @@ arvi/ariadne_wrapper.py,sha256=YvilopJa9T4NwPcj3Nah_U8smSeSAU5-HYZMb_GJ-BQ,2232
4
4
  arvi/berv.py,sha256=eKnpuPC1w45UrUEyFRbs9F9j3bXz3kxYzNXbnRgvFQM,17596
5
5
  arvi/binning.py,sha256=jbemJ-bM3aqoOsqMo_OhWt_co-JAQ0nhdG_GpTsrRsw,15403
6
6
  arvi/config.py,sha256=W-v8NNhRd_PROu0wCMilXmOhYcju4xbUalugd5u7SRU,1881
7
- arvi/dace_wrapper.py,sha256=dwGj_XuN8J5An9I8ioeK7saj2TNLwwcobOu6oRo_HmM,22228
7
+ arvi/dace_wrapper.py,sha256=G1b2O7JNhYbfSXorRqqs8oZJ6rtJ1t3KMCE4wjjgJmQ,23446
8
8
  arvi/extra_data.py,sha256=cpJGMle0ZqY_dtrmbbMQcyU48PkNjfzUgQ-qY-2XTj8,3249
9
9
  arvi/gaia_wrapper.py,sha256=2q_7bm6MGvTLlegfNUCY_EhnMKYv1CZmcbanOm_ot-k,4197
10
10
  arvi/headers.py,sha256=uvdJebw1M5YkGjE3vJJwYBOnLikib75uuZE9FXB5JJM,1673
11
- arvi/instrument_specific.py,sha256=-pbm2Vk3iK_1K7nDa1avlJOKHBcXllwILI4lQn-Ze-A,7761
11
+ arvi/instrument_specific.py,sha256=ORjlw79EumEiGugmGn_2WBOuEPhsfgDNryEMBDe9RgM,10733
12
12
  arvi/kima_wrapper.py,sha256=BvNTVqzM4lMNhLCyBFVh3T84hHfGKAFpgiYiOi4lh0g,2731
13
13
  arvi/lbl_wrapper.py,sha256=_ViGVkpakvuBR_xhu9XJRV5EKHpj5Go6jBZGJZMIS2Y,11850
14
14
  arvi/nasaexo_wrapper.py,sha256=mWt7eHgSZe4MBKCmUvMPTyUPGuiwGTqKugNBvmjOg9s,7306
15
- arvi/plots.py,sha256=gZzwv1VbWMml3tF0ET8Z56ekA-VHd0nZw11XR-Qo0GA,31950
15
+ arvi/plots.py,sha256=Ut9_AOhhlp-fsYaOacZoYnKBVnbpgxKw1BXlEgTphOo,32152
16
16
  arvi/programs.py,sha256=BW7xBNKLei7NVLLW3_lsVskwzkaIoNRiHK2jn9Tn2ZM,8879
17
17
  arvi/reports.py,sha256=ayPdZ4HZO9iCDdnADQ18gQPJh79o-1UYG7TYkvm9Lrc,4051
18
18
  arvi/setup_logger.py,sha256=pBzaRTn0hntozjbaRVx0JIbWGuENkvYUApa6uB-FsRo,279
19
- arvi/simbad_wrapper.py,sha256=hyMnTeZ4DpnTzyEopkdUfNtJ_roSgdvYPXwYcmXVX2U,8238
19
+ arvi/simbad_wrapper.py,sha256=9hH7VczHAjLnOiqkrd1a6mTd-3Y-o_3d3SYJ7uyF_1Y,8406
20
20
  arvi/spectra.py,sha256=ebF1ocodTastLx0CyqLSpE8EZNDXBF8riyfxMr3L6H0,7491
21
21
  arvi/stats.py,sha256=ilzzGL9ew-SyVa9eEdrYCpD3DliOAwhoNUg9LIlHjzU,2583
22
- arvi/stellar.py,sha256=veuL_y9kJvvApU_jqYQqP3EkcRnQffTc8Us6iT5UrFI,3790
23
- arvi/timeseries.py,sha256=ZHk0SgkzlWELyklcnVi586qAGvHIH7PwTqIE-ScX6u0,87830
22
+ arvi/stellar.py,sha256=GQ7yweuBRnfkJ0M5eWjvLd8uvGq_by81PbXfidBvWis,4918
23
+ arvi/timeseries.py,sha256=jZXO_dW0hzE0EmeUwZclMMywFSeBvMYZktNi7h-_kpc,88893
24
24
  arvi/translations.py,sha256=PUSrn4zvYO2MqGzUxlFGwev_tBkgJaJrIYs6NKHzbWo,951
25
- arvi/utils.py,sha256=LImV8iPjG8ZKjPCT9lp25_pDb-51ZZk42Hc8bzZt7M0,6568
25
+ arvi/utils.py,sha256=V4uSpr75YVjE0NP3T5PxnfVQQ06nd-O8X679BfVyD30,7068
26
26
  arvi/data/info.svg,sha256=0IMI6W-eFoTD8acnury79WJJakpBwLa4qKS4JWpsXiI,489
27
27
  arvi/data/obs_affected_ADC_issues.dat,sha256=tn93uOL0eCTYhireqp1wG-_c3CbxPA7C-Rf-pejVY8M,10853
28
28
  arvi/data/obs_affected_blue_cryostat_issues.dat,sha256=z4AK17xfz8tGTDv1FjRvQFnio4XA6PNNfDXuicewHk4,1771
29
29
  arvi/data/extra/HD86226_PFS1.rdb,sha256=vfAozbrKHM_j8dYkCBJsuHyD01KEM1asghe2KInwVao,3475
30
30
  arvi/data/extra/HD86226_PFS2.rdb,sha256=F2P7dB6gVyzCglUjNheB0hIHVClC5RmARrGwbrY1cfo,4114
31
31
  arvi/data/extra/metadata.json,sha256=C69hIw6CohyES6BI9vDWjxwSz7N4VOYX0PCgjXtYFmU,178
32
- arvi-0.1.29.dist-info/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
33
- arvi-0.1.29.dist-info/METADATA,sha256=aRUdDo4XitNY1xqg7OpHKw1nDGT_mGIyNl4FoEJquYg,1852
34
- arvi-0.1.29.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
35
- arvi-0.1.29.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
36
- arvi-0.1.29.dist-info/RECORD,,
32
+ arvi-0.2.0.dist-info/licenses/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
33
+ arvi-0.2.0.dist-info/METADATA,sha256=ZBpnSY92f4u9TgpzpM1PxIqPY8u191X2HKhUq8ILEzw,1920
34
+ arvi-0.2.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
35
+ arvi-0.2.0.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
36
+ arvi-0.2.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (78.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5