arvi 0.1.30__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arvi might be problematic. Click here for more details.

arvi/dace_wrapper.py CHANGED
@@ -47,7 +47,7 @@ def get_dace_id(star, verbose=True):
47
47
  try:
48
48
  with all_logging_disabled():
49
49
  r = load_spectroscopy().query_database(filters=filters, limit=1)
50
- return r['obj_id_daceid'][0]
50
+ return str(r['obj_id_daceid'][0])
51
51
  except KeyError:
52
52
  if verbose:
53
53
  logger.error(f"Could not find DACE ID for {star}")
@@ -322,26 +322,18 @@ def get_observations(star, instrument=None, user=None, main_id=None, verbose=Tru
322
322
  from re import match
323
323
  def custom_sort_key(s):
324
324
  s = s[0]
325
- print(s)
326
325
  # Check for version number pattern (e.g., 3.2.5 or 3.2.5-EGGS)
327
- version_match = match(r'^(\d+(?:\.\d+)*)(?:-(.*))?$', s)
326
+ version_match = match(r'^(\d+(?:\.\d+)*)(?:[-\s](.*))?$', s)
328
327
  if version_match:
329
- version_parts = tuple(map(int, version_match.group(1).split('.')))
330
- suffix = version_match.group(2)
331
-
332
- if suffix is not None:
333
- # Suffixed versions: sort in ascending order (3.2.5-HR11 < 3.3.1-HR11)
334
- return (0, 0, version_parts, suffix)
335
- else:
336
- # Unsuffixed versions: sort in descending order (3.5 > 3.2.5)
337
- return (0, 1, tuple(-x for x in version_parts))
338
-
328
+ version_parts = list(map(int, version_match.group(1).split('.')))
329
+ if len(version_parts) == 2:
330
+ version_parts.insert(1, -1)
331
+ return (0, 1, version_parts)
339
332
  # Check for scientific reference pattern (e.g., 2004A&A...)
340
333
  year_match = match(r'^(\d{4})', s)
341
334
  if year_match:
342
335
  year = int(year_match.group(1))
343
336
  return (1, year)
344
-
345
337
  # For all other strings, sort alphabetically
346
338
  return (2, s)
347
339
 
@@ -2,12 +2,25 @@ import os, sys
2
2
  import numpy as np
3
3
 
4
4
  from .setup_logger import logger
5
+ from .utils import ESPRESSO_ADC_issues, ESPRESSO_cryostat_issues
5
6
 
6
7
 
8
+ # HARPS started operations in October 1st, 2003
9
+ # https://www.eso.org/sci/facilities/lasilla/instruments/harps/news.html
10
+ HARPS_start = 52913
11
+
7
12
  # HARPS fiber upgrade (28 May 2015)
8
13
  # https://www.eso.org/sci/facilities/lasilla/instruments/harps/news/harps_upgrade_2015.html
9
14
  HARPS_technical_intervention = 57170
10
15
 
16
+ # From Lo Curto et al. (2015), The Messenger, vol. 162, p. 9-15
17
+ # On **19 May 2015** HARPS stopped operations and the instrument was opened.
18
+ # Installation and alignment of the fibre link lasted roughly one week. On 29
19
+ # May, the vacuum vessel was closed and evacuated for the last time. Finally, a
20
+ # formal commissioning of the new fibre took place, finishing on **3 June**,
21
+ # when the instrument was handed back to Science Operations.
22
+ HARPS_technical_intervention_range = (57161, 57176)
23
+
11
24
  # ESPRESSO fiber link upgrade (1 July 2019)
12
25
  ESPRESSO_technical_intervention = 58665
13
26
 
@@ -86,10 +99,78 @@ def divide_HARPS(self):
86
99
  logger.info(f'divided HARPS into {self.instruments}')
87
100
 
88
101
 
102
+ def check(self, instrument):
103
+ instruments = self._check_instrument(instrument)
104
+ if instruments is None:
105
+ if self.verbose:
106
+ logger.error(f"HARPS_fiber_commissioning: no data from {instrument}")
107
+ return None
108
+ return instruments
109
+
110
+
111
+ # HARPS commissioning
112
+ def HARPS_commissioning(self, mask=True, plot=True):
113
+ """ Identify and optionally mask points during HARPS commissioning (HARPS).
114
+
115
+ Args:
116
+ mask (bool, optional):
117
+ Whether to mask out the points.
118
+ plot (bool, optional):
119
+ Whether to plot the masked points.
120
+ """
121
+ if check(self, 'HARPS') is None:
122
+ return
123
+
124
+ affected = self.time < HARPS_start
125
+ total_affected = affected.sum()
126
+
127
+ if self.verbose:
128
+ n = total_affected
129
+ logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
130
+ "during HARPS commissioning")
131
+
132
+ if mask:
133
+ self.mask[affected] = False
134
+ self._propagate_mask_changes()
135
+
136
+ if plot:
137
+ self.plot(show_masked=True)
138
+
139
+ return affected
89
140
 
90
- # ESPRESSO ADC issues
91
- from .utils import ESPRESSO_ADC_issues
92
141
 
142
+ # HARPS fiber commissioning
143
+ def HARPS_fiber_commissioning(self, mask=True, plot=True):
144
+ """ Identify and optionally mask points affected by fiber commissioning (HARPS).
145
+
146
+ Args:
147
+ mask (bool, optional):
148
+ Whether to mask out the points.
149
+ plot (bool, optional):
150
+ Whether to plot the masked points.
151
+ """
152
+ if check(self, 'HARPS') is None:
153
+ return
154
+
155
+ affected = (self.time >= HARPS_technical_intervention_range[0]) & (self.time <= HARPS_technical_intervention_range[1])
156
+ total_affected = affected.sum()
157
+
158
+ if self.verbose:
159
+ n = total_affected
160
+ logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
161
+ "during the HARPS fiber commissioning period")
162
+
163
+ if mask:
164
+ self.mask[affected] = False
165
+ self._propagate_mask_changes()
166
+
167
+ if plot:
168
+ self.plot(show_masked=True)
169
+
170
+ return affected
171
+
172
+
173
+ # ESPRESSO ADC issues
93
174
  def ADC_issues(self, mask=True, plot=True, check_headers=False):
94
175
  """ Identify and optionally mask points affected by ADC issues (ESPRESSO).
95
176
 
@@ -137,8 +218,6 @@ def ADC_issues(self, mask=True, plot=True, check_headers=False):
137
218
  return intersect
138
219
 
139
220
  # ESPRESSO cryostat issues
140
- from .utils import ESPRESSO_cryostat_issues
141
-
142
221
  def blue_cryostat_issues(self, mask=True, plot=True):
143
222
  """ Identify and mask points affected by blue cryostat issues (ESPRESSO).
144
223
 
@@ -221,7 +300,7 @@ def qc_scired_issues(self, plot=False, **kwargs):
221
300
 
222
301
 
223
302
  def known_issues(self, mask=True, plot=False, **kwargs):
224
- """ Identify and optionally mask known instrumental issues (ADC and blue cryostat for ESPRESSO)
303
+ """ Identify and optionally mask known instrumental issues.
225
304
 
226
305
  Args:
227
306
  mask (bool, optional): Whether to mask out the points.
@@ -230,18 +309,28 @@ def known_issues(self, mask=True, plot=False, **kwargs):
230
309
  try:
231
310
  adc = ADC_issues(self, mask, plot, **kwargs)
232
311
  except IndexError:
233
- # logger.error(e)
234
312
  logger.error('are the data binned? cannot proceed to mask these points...')
235
313
 
236
314
  try:
237
315
  cryostat = blue_cryostat_issues(self, mask, plot)
238
316
  except IndexError:
239
- # logger.error(e)
240
317
  logger.error('are the data binned? cannot proceed to mask these points...')
241
318
 
242
- if adc is None and cryostat is None:
243
- return
244
319
  try:
245
- return adc | cryostat
320
+ harps_comm = HARPS_commissioning(self, mask, plot)
321
+ except IndexError:
322
+ logger.error('are the data binned? cannot proceed to mask these points...')
323
+
324
+ try:
325
+ harps_fibers = HARPS_fiber_commissioning(self, mask, plot)
326
+ except IndexError:
327
+ logger.error('are the data binned? cannot proceed to mask these points...')
328
+
329
+ # if None in (adc, cryostat, harps_comm, harps_fibers):
330
+ # return
331
+
332
+ try:
333
+ # return adc | cryostat
334
+ return np.logical_or.reduce((adc, cryostat, harps_comm, harps_fibers))
246
335
  except UnboundLocalError:
247
336
  return
arvi/plots.py CHANGED
@@ -137,8 +137,8 @@ def clickable_legend(fig, ax, leg):
137
137
 
138
138
  # @plot_fast
139
139
  def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
140
- remove_50000=False, tooltips=False, show_legend=True, label=None,
141
- N_in_label=False, versus_n=False, show_histogram=False, bw=False, **kwargs):
140
+ remove_50000=False, tooltips=True, show_title=False, show_legend=True, label=None,
141
+ jitter=None, N_in_label=False, versus_n=False, show_histogram=False, bw=False, **kwargs):
142
142
  """ Plot the RVs
143
143
 
144
144
  Args:
@@ -154,6 +154,8 @@ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
154
154
  Whether to subtract 50000 from time. Defaults to False.
155
155
  tooltips (bool, optional):
156
156
  Show information upon clicking a point. Defaults to True.
157
+ show_title (bool, optional):
158
+ Show the star name in the plot title. Defaults to False.
157
159
  show_legend (bool, optional):
158
160
  Show legend. Defaults to True.
159
161
  N_in_label (bool, optional):
@@ -371,6 +373,9 @@ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
371
373
  else:
372
374
  ax.set_xlabel('BJD - 2400000 [days]')
373
375
 
376
+ if show_title:
377
+ ax.set_title(self.star, loc='right')
378
+
374
379
  # from matplotlib.backend_tools import ToolBase, ToolToggleBase
375
380
  # tm = fig.canvas.manager.toolmanager
376
381
  # class InfoTool(ToolToggleBase):
arvi/simbad_wrapper.py CHANGED
@@ -10,7 +10,9 @@ try:
10
10
  except ImportError:
11
11
  ufloat = lambda x, y: x
12
12
 
13
+ from .stellar import EFFECTIVE_TEMPERATURES, teff_to_sptype
13
14
  from .translations import translate
15
+ from .setup_logger import logger
14
16
 
15
17
  DATA_PATH = os.path.dirname(__file__)
16
18
  DATA_PATH = os.path.join(DATA_PATH, 'data')
@@ -71,15 +73,15 @@ class Measurements:
71
73
  bibcode: list
72
74
 
73
75
 
74
- def run_query(query):
75
- url = 'http://simbad.u-strasbg.fr/simbad/sim-tap/sync'
76
+ def run_query(query, SIMBAD_URL='http://simbad.u-strasbg.fr'):
77
+ url = f'{SIMBAD_URL}/simbad/sim-tap/sync'
76
78
  data = dict(query=query, request='doQuery', lang='ADQL', format='text/plain', phase='run')
77
79
  try:
78
80
  response = requests.post(url, data=data, timeout=2)
79
81
  except requests.ReadTimeout as err:
80
- raise IndexError(err)
82
+ raise IndexError(err) from None
81
83
  except requests.ConnectionError as err:
82
- raise IndexError(err)
84
+ raise IndexError(err) from None
83
85
  return response.content.decode()
84
86
 
85
87
  def parse_table1(table, cols=None, values=None):
@@ -120,14 +122,6 @@ def parse_value(value, err=None, prec=None):
120
122
  return v
121
123
 
122
124
 
123
- effective_temperatures = {
124
- 'F0': 7350, 'F2': 7050, 'F3': 6850, 'F5': 6700, 'F6': 6550, 'F7': 6400, 'F8': 6300,
125
- 'G0': 6050, 'G1': 5930, 'G2': 5800, 'G5': 5660, 'G8': 5440,
126
- 'K0': 5240, 'K1': 5110, 'K2': 4960, 'K3': 4800, 'K4': 4600, 'K5': 4400, 'K7': 4000,
127
- 'M0': 3750, 'M1': 3700, 'M2': 3600, 'M3': 3500, 'M4': 3400, 'M5': 3200, 'M6': 3100, 'M7': 2900, 'M8': 2700,
128
- }
129
-
130
-
131
125
  class simbad:
132
126
  """
133
127
  A very simple wrapper around a TAP query to simbad for a given target. This
@@ -146,7 +140,7 @@ class simbad:
146
140
  V (float): V magnitude
147
141
  ids (list): list of identifiers
148
142
  """
149
- def __init__(self, star:str):
143
+ def __init__(self, star:str, _debug=False):
150
144
  """
151
145
  Args:
152
146
  star (str): The name of the star to query simbad
@@ -173,12 +167,18 @@ class simbad:
173
167
 
174
168
  try:
175
169
  table1 = run_query(query=QUERY.format(star=self.star))
170
+ if _debug:
171
+ print('table1:', table1)
176
172
  cols, values = parse_table1(table1)
177
173
 
178
174
  table2 = run_query(query=BV_QUERY.format(star=self.star))
175
+ if _debug:
176
+ print('table2:', table2)
179
177
  cols, values = parse_table1(table2, cols, values)
180
178
 
181
179
  table3 = run_query(query=IDS_QUERY.format(star=self.star))
180
+ if _debug:
181
+ print('table3:', table3)
182
182
  line = table3.splitlines()[2]
183
183
  self.ids = line.replace('"', '').replace(' ', ' ').replace(' ', ' ').replace(' ', ' ').split('|')
184
184
 
@@ -203,9 +203,8 @@ class simbad:
203
203
 
204
204
  self.measurements = Measurements(_teff, _logg, _feh, _bibcode)
205
205
 
206
-
207
206
  except IndexError:
208
- raise ValueError(f'simbad query for {star} failed')
207
+ raise ValueError(f'simbad query for {star} failed') from None
209
208
 
210
209
  try:
211
210
  self.gaia_id = int([i for i in self.ids if 'Gaia DR3' in i][0]
@@ -242,10 +241,14 @@ class simbad:
242
241
  raise IndexError
243
242
  else:
244
243
  self.teff = data['teff']
244
+ self.sweetcat = data
245
245
 
246
246
  except IndexError:
247
- if self.sp_type[:2] in effective_temperatures:
248
- self.teff = effective_temperatures[self.sp_type[:2]]
247
+ if self.sp_type == '':
248
+ self.teff = int(np.mean(self.measurements.teff))
249
+ self.sp_type = teff_to_sptype(self.teff)
250
+ elif self.sp_type[:2] in EFFECTIVE_TEMPERATURES:
251
+ self.teff = EFFECTIVE_TEMPERATURES[self.sp_type[:2]]
249
252
 
250
253
  def __repr__(self):
251
254
  V = self.V
arvi/stellar.py CHANGED
@@ -1,6 +1,31 @@
1
-
2
1
  import numpy as np
3
2
 
3
+ # from Table 5 of Pecaut & Mamajek (2013, ApJS, 208, 9; http://adsabs.harvard.edu/abs/2013ApJS..208....9P)
4
+ # https://www.pas.rochester.edu/~emamajek/EEM_dwarf_UBVIJHK_colors_Teff.txt
5
+ EFFECTIVE_TEMPERATURES = {
6
+ 'F0': 7220, 'F1': 7020, 'F2': 6820, 'F3': 6750, 'F4': 6670, 'F5': 6550, 'F6': 6350, 'F7': 6280, 'F8': 6180, 'F9': 6050,
7
+ 'G0': 5930, 'G1': 5860, 'G2': 5770, 'G3': 5720, 'G4': 5680, 'G5': 5660, 'G6': 5600, 'G7': 5550, 'G8': 5480, 'G9': 5380,
8
+ 'K0': 5270, 'K1': 5170, 'K2': 5100, 'K3': 4830, 'K4': 4600, 'K5': 4440, 'K6': 4300, 'K7': 4100, 'K8': 3990, 'K9': 3930,
9
+ 'M0': 3850, 'M1': 3660, 'M2': 3560, 'M3': 3430, 'M4': 3210, 'M5': 3060, 'M6': 2810, 'M7': 2680, 'M8': 2570, 'M9': 2380,
10
+ }
11
+
12
+ def teff_to_sptype(teff):
13
+ """
14
+ Estimate the spectral type from the effective temperature, using the
15
+ Pecaut & Mamajek (2013) table.
16
+
17
+ Args:
18
+ teff (float): Effective temperature
19
+
20
+ Returns:
21
+ str: Spectral type
22
+ """
23
+ teffs = list(EFFECTIVE_TEMPERATURES.values())
24
+ sptypes = list(EFFECTIVE_TEMPERATURES.keys())
25
+ i = np.argmin(np.abs(np.array(teffs) - teff))
26
+ return sptypes[i]
27
+
28
+
4
29
  class prot_age_result:
5
30
  prot_n84 = None #: float | np.ndarray
6
31
  prot_n84_err = None #: float | np.ndarray
arvi/timeseries.py CHANGED
@@ -414,9 +414,9 @@ class RV:
414
414
  ind = np.where(self.instrument_array == instrument)[0]
415
415
  return ind[getattr(self, instrument).mask][index]
416
416
 
417
- @property
418
- def _tt(self) -> np.ndarray:
419
- return np.linspace(self.mtime.min(), self.mtime.max(), 20*self.N)
417
+ # @property
418
+ def _tt(self, f=20) -> np.ndarray:
419
+ return np.linspace(self.mtime.min(), self.mtime.max(), f*self.N)
420
420
 
421
421
  @classmethod
422
422
  def from_dace_data(cls, star, inst, pipe, mode, data, **kwargs):
@@ -773,7 +773,7 @@ class RV:
773
773
  CCFs = [CCFs]
774
774
 
775
775
  try:
776
- objects = [i.OBJECT for i in CCFs]
776
+ objects = np.unique([i.OBJECT for i in CCFs])
777
777
  except AttributeError:
778
778
  objects = np.unique([i.HDU[0].header['OBJECT'].replace(' ', '') for i in CCFs])
779
779
 
@@ -883,7 +883,6 @@ class RV:
883
883
  else:
884
884
  resp = requests.get(f'https://kobe.caha.es/internal/fitsfiles/{fits_file}',
885
885
  auth=HTTPBasicAuth('kobeteam', config.kobe_password))
886
- logger.info(f'found file "{fits_file}" on server')
887
886
 
888
887
  if resp.status_code != 200:
889
888
  # something went wrong, try to extract the file by downloading the
@@ -910,6 +909,7 @@ class RV:
910
909
  hdul = fits.open(tar.extractfile(fits_file))
911
910
 
912
911
  else:
912
+ logger.info(f'found file "{fits_file}" on server')
913
913
  # found the file on the server, read it directly
914
914
  hdul = fits.open(BytesIO(resp.content))
915
915
 
@@ -1056,7 +1056,7 @@ class RV:
1056
1056
  self._download_directory = value
1057
1057
 
1058
1058
  def download_ccf(self, instrument=None, index=None, limit=None,
1059
- directory=None, symlink=False, load=True, **kwargs):
1059
+ directory=None, clobber=False, symlink=False, load=True, **kwargs):
1060
1060
  """ Download CCFs from DACE
1061
1061
 
1062
1062
  Args:
@@ -1064,6 +1064,7 @@ class RV:
1064
1064
  index (int): Specific index of point for which to download data (0-based)
1065
1065
  limit (int): Maximum number of files to download.
1066
1066
  directory (str): Directory where to store data.
1067
+ clobber (bool): Whether to overwrite existing files.
1067
1068
  """
1068
1069
  directory = directory or self.download_directory
1069
1070
 
@@ -1085,7 +1086,7 @@ class RV:
1085
1086
  logger.warning('may need to provide `top_level` in kwargs to find file')
1086
1087
  do_symlink_filetype('CCF', files[:limit], directory, **kwargs)
1087
1088
  else:
1088
- do_download_filetype('CCF', files[:limit], directory,
1089
+ do_download_filetype('CCF', files[:limit], directory, clobber=clobber,
1089
1090
  verbose=self.verbose, user=self.user, **kwargs)
1090
1091
 
1091
1092
  if load:
@@ -1111,7 +1112,7 @@ class RV:
1111
1112
  pass
1112
1113
 
1113
1114
  def download_s1d(self, instrument=None, index=None, limit=None,
1114
- directory=None, symlink=False, **kwargs):
1115
+ directory=None, clobber=False, apply_mask=True, symlink=False, **kwargs):
1115
1116
  """ Download S1Ds from DACE
1116
1117
 
1117
1118
  Args:
@@ -1119,6 +1120,8 @@ class RV:
1119
1120
  index (int): Specific index of point for which to download data (0-based)
1120
1121
  limit (int): Maximum number of files to download.
1121
1122
  directory (str): Directory where to store data.
1123
+ clobber (bool): Whether to overwrite existing files.
1124
+ apply_mask (bool): Apply mask to the observations before downloading.
1122
1125
  """
1123
1126
  directory = directory or self.download_directory
1124
1127
 
@@ -1126,7 +1129,11 @@ class RV:
1126
1129
  instrument = self._check_instrument(instrument, strict=strict)
1127
1130
  files = []
1128
1131
  for inst in instrument:
1129
- files += list(getattr(self, inst).raw_file)
1132
+ _s = getattr(self, inst)
1133
+ if apply_mask:
1134
+ files += list(_s.raw_file[_s.mask])
1135
+ else:
1136
+ files += list(_s.raw_file)
1130
1137
 
1131
1138
  if index is not None:
1132
1139
  index = np.atleast_1d(index)
@@ -1140,11 +1147,11 @@ class RV:
1140
1147
  logger.warning('may need to provide `top_level` in kwargs to find file')
1141
1148
  do_symlink_filetype('S1D', files[:limit], directory, **kwargs)
1142
1149
  else:
1143
- do_download_filetype('S1D', files[:limit], directory,
1150
+ do_download_filetype('S1D', files[:limit], directory, clobber=clobber,
1144
1151
  verbose=self.verbose, user=self.user, **kwargs)
1145
1152
 
1146
1153
  def download_s2d(self, instrument=None, index=None, limit=None,
1147
- directory=None, symlink=False, **kwargs):
1154
+ directory=None, clobber=False, symlink=False, **kwargs):
1148
1155
  """ Download S2Ds from DACE
1149
1156
 
1150
1157
  Args:
@@ -1152,6 +1159,7 @@ class RV:
1152
1159
  index (int): Specific index of point for which to download data (0-based)
1153
1160
  limit (int): Maximum number of files to download.
1154
1161
  directory (str): Directory where to store data.
1162
+ clobber (bool): Whether to overwrite existing files.
1155
1163
  """
1156
1164
  directory = directory or self.download_directory
1157
1165
 
@@ -1728,7 +1736,7 @@ class RV:
1728
1736
 
1729
1737
  if snew.verbose and len(bad_quantities) > 0:
1730
1738
  logger.warning(f"{inst}, skipping non-float quantities in binning:")
1731
- logger.warning(' ' + str(bad_quantities))
1739
+ logger.warning(' ' + str(list(map(str, bad_quantities))))
1732
1740
  for bq in bad_quantities:
1733
1741
  s._quantities = np.delete(s._quantities, s._quantities==bq)
1734
1742
  delattr(s, bq) #! careful here
@@ -1737,7 +1745,7 @@ class RV:
1737
1745
  s.mask = np.full(tb.shape, True)
1738
1746
 
1739
1747
  if snew.verbose and len(all_bad_quantities) > 0:
1740
- logger.warning('\nnew object will not have these non-float quantities')
1748
+ logger.warning('\nnew object will not have these quantities')
1741
1749
 
1742
1750
  for q in np.unique(all_bad_quantities):
1743
1751
  delattr(snew, q)
@@ -1777,14 +1785,18 @@ class RV:
1777
1785
  s.vrad += self._meanRV
1778
1786
  self._build_arrays()
1779
1787
 
1780
- def adjust_means(self, just_rv=False, instrument=None, **kwargs):
1788
+ def adjust_means(self, just_rv=False, exclude_rv=False, instrument=None, **kwargs):
1781
1789
  """
1782
- Subtract individual mean RVs from each instrument or from specific
1783
- instruments
1790
+ Subtract individual weighted mean RV from each instrument or from
1791
+ specific instruments
1784
1792
  """
1785
1793
  if self._child or self._did_adjust_means:
1786
1794
  return
1787
1795
 
1796
+ if just_rv and exclude_rv:
1797
+ logger.error('cannot use `just_rv` and `exclude_rv` at the same time')
1798
+ return
1799
+
1788
1800
  # if self.verbose:
1789
1801
  # print_as_table = len(self.instruments) > 2 and len(self.instruments) < 7
1790
1802
  # rows = [self.instruments]
@@ -1813,14 +1825,12 @@ class RV:
1813
1825
  s.vrad = np.zeros_like(s.time)
1814
1826
  continue
1815
1827
 
1816
- s.rv_mean = wmean(s.mvrad, s.msvrad)
1817
- s.vrad -= s.rv_mean
1828
+ if not exclude_rv:
1829
+ s.rv_mean = wmean(s.mvrad, s.msvrad)
1830
+ s.vrad -= s.rv_mean
1818
1831
 
1819
- if self.verbose:
1820
- # if print_as_table:
1821
- # row.append(f'{s.rv_mean:.3f}')
1822
- # else:
1823
- logger.info(f'subtracted weighted average from {inst:10s}: ({s.rv_mean:.3f} {self.units})')
1832
+ if self.verbose:
1833
+ logger.info(f'subtracted weighted average from {inst:10s}: ({s.rv_mean:.3f} {self.units})')
1824
1834
 
1825
1835
  if just_rv:
1826
1836
  continue
@@ -1834,6 +1844,9 @@ class RV:
1834
1844
  setattr(s, f'{other}_mean', m)
1835
1845
  setattr(s, other, getattr(s, other) - m)
1836
1846
 
1847
+ if self.verbose:
1848
+ logger.info(f'subtracted weighted averages from {others}')
1849
+
1837
1850
  # if print_as_table:
1838
1851
  # from .utils import pretty_print_table
1839
1852
  # rows.append(row)
@@ -2148,13 +2161,13 @@ class RV:
2148
2161
  # if self.verbose:
2149
2162
  # logger.warning(f'masking {nan_mask.sum()} observations with NaN in indicators')
2150
2163
 
2151
- header = '\t'.join(['bjd', 'vrad', 'svrad',
2164
+ header = '\t'.join(['rjd', 'vrad', 'svrad',
2152
2165
  'fwhm', 'sig_fwhm',
2153
2166
  'bispan', 'sig_bispan',
2154
2167
  'contrast', 'sig_contrast',
2155
2168
  'rhk', 'sig_rhk',
2156
2169
  'berv',
2157
- ])
2170
+ ])
2158
2171
  header += '\n'
2159
2172
  header += '\t'.join(['-' * len(c) for c in header.strip().split('\t')])
2160
2173
 
@@ -2165,7 +2178,7 @@ class RV:
2165
2178
  arrays = [_s.mtime, _s.mvrad, _s.msvrad]
2166
2179
 
2167
2180
  # d = np.stack(arrays, axis=1)
2168
- header = 'bjd\tvrad\tsvrad\n---\t----\t-----'
2181
+ header = 'rjd\tvrad\tsvrad\n---\t----\t-----'
2169
2182
 
2170
2183
  file = f'{star_name}_{inst}.rdb'
2171
2184
  if postfix is not None:
@@ -2298,7 +2311,11 @@ class RV:
2298
2311
  self.star_mass = float(input('stellar mass (Msun): '))
2299
2312
  if not hasattr(self, 'lum'):
2300
2313
  self.lum = float(input('luminosity (Lsun): '))
2301
- return getHZ_period(self.simbad.teff, self.star_mass, 1.0, self.lum)
2314
+ if hasattr(self, 'teff'):
2315
+ teff = self.teff
2316
+ else:
2317
+ teff = self.simbad.teff
2318
+ return getHZ_period(teff, self.star_mass, 1.0, self.lum)
2302
2319
 
2303
2320
 
2304
2321
  @property
arvi/utils.py CHANGED
@@ -197,6 +197,18 @@ def ESPRESSO_cryostat_issues():
197
197
  return np.array(file_roots)
198
198
 
199
199
 
200
+ def get_ra_sexagesimal(ra):
201
+ """ Convert RA in degrees to sexagesimal string representation. """
202
+ from astropy.coordinates import Angle
203
+ from astropy import units as u
204
+ return Angle(ra, unit=u.deg).to(u.hourangle).to_string(sep=':', pad=True)
205
+
206
+ def get_dec_sexagesimal(dec):
207
+ """ Convert DEC in degrees to sexagesimal string representation. """
208
+ from astropy.coordinates import Angle
209
+ from astropy import units as u
210
+ return Angle(dec, unit=u.deg).to_string(sep=':', pad=True)
211
+
200
212
  def get_max_berv_span(self, n=None):
201
213
  """
202
214
  Return the indices of the n observations which maximize the BERV span.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.1.30
3
+ Version: 0.2.1
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -4,33 +4,33 @@ arvi/ariadne_wrapper.py,sha256=YvilopJa9T4NwPcj3Nah_U8smSeSAU5-HYZMb_GJ-BQ,2232
4
4
  arvi/berv.py,sha256=eKnpuPC1w45UrUEyFRbs9F9j3bXz3kxYzNXbnRgvFQM,17596
5
5
  arvi/binning.py,sha256=jbemJ-bM3aqoOsqMo_OhWt_co-JAQ0nhdG_GpTsrRsw,15403
6
6
  arvi/config.py,sha256=W-v8NNhRd_PROu0wCMilXmOhYcju4xbUalugd5u7SRU,1881
7
- arvi/dace_wrapper.py,sha256=C2TZwskbODMVRGi-xdCW9sCDQrUx5Z2ZM7yVWkeKde4,23463
7
+ arvi/dace_wrapper.py,sha256=EKKBIqxgmbqvyhsLFK65aOP0BcEH3jnuHHNgCOvKYlk,23169
8
8
  arvi/extra_data.py,sha256=cpJGMle0ZqY_dtrmbbMQcyU48PkNjfzUgQ-qY-2XTj8,3249
9
9
  arvi/gaia_wrapper.py,sha256=2q_7bm6MGvTLlegfNUCY_EhnMKYv1CZmcbanOm_ot-k,4197
10
10
  arvi/headers.py,sha256=uvdJebw1M5YkGjE3vJJwYBOnLikib75uuZE9FXB5JJM,1673
11
- arvi/instrument_specific.py,sha256=-pbm2Vk3iK_1K7nDa1avlJOKHBcXllwILI4lQn-Ze-A,7761
11
+ arvi/instrument_specific.py,sha256=ORjlw79EumEiGugmGn_2WBOuEPhsfgDNryEMBDe9RgM,10733
12
12
  arvi/kima_wrapper.py,sha256=BvNTVqzM4lMNhLCyBFVh3T84hHfGKAFpgiYiOi4lh0g,2731
13
13
  arvi/lbl_wrapper.py,sha256=_ViGVkpakvuBR_xhu9XJRV5EKHpj5Go6jBZGJZMIS2Y,11850
14
14
  arvi/nasaexo_wrapper.py,sha256=mWt7eHgSZe4MBKCmUvMPTyUPGuiwGTqKugNBvmjOg9s,7306
15
- arvi/plots.py,sha256=gZzwv1VbWMml3tF0ET8Z56ekA-VHd0nZw11XR-Qo0GA,31950
15
+ arvi/plots.py,sha256=Ut9_AOhhlp-fsYaOacZoYnKBVnbpgxKw1BXlEgTphOo,32152
16
16
  arvi/programs.py,sha256=BW7xBNKLei7NVLLW3_lsVskwzkaIoNRiHK2jn9Tn2ZM,8879
17
17
  arvi/reports.py,sha256=ayPdZ4HZO9iCDdnADQ18gQPJh79o-1UYG7TYkvm9Lrc,4051
18
18
  arvi/setup_logger.py,sha256=pBzaRTn0hntozjbaRVx0JIbWGuENkvYUApa6uB-FsRo,279
19
- arvi/simbad_wrapper.py,sha256=hyMnTeZ4DpnTzyEopkdUfNtJ_roSgdvYPXwYcmXVX2U,8238
19
+ arvi/simbad_wrapper.py,sha256=9hH7VczHAjLnOiqkrd1a6mTd-3Y-o_3d3SYJ7uyF_1Y,8406
20
20
  arvi/spectra.py,sha256=ebF1ocodTastLx0CyqLSpE8EZNDXBF8riyfxMr3L6H0,7491
21
21
  arvi/stats.py,sha256=ilzzGL9ew-SyVa9eEdrYCpD3DliOAwhoNUg9LIlHjzU,2583
22
- arvi/stellar.py,sha256=veuL_y9kJvvApU_jqYQqP3EkcRnQffTc8Us6iT5UrFI,3790
23
- arvi/timeseries.py,sha256=YJ5s3TvQthIdVrZWT3S23DagM4BzT2ol5ENcnwsc_zk,88079
22
+ arvi/stellar.py,sha256=GQ7yweuBRnfkJ0M5eWjvLd8uvGq_by81PbXfidBvWis,4918
23
+ arvi/timeseries.py,sha256=jZXO_dW0hzE0EmeUwZclMMywFSeBvMYZktNi7h-_kpc,88893
24
24
  arvi/translations.py,sha256=PUSrn4zvYO2MqGzUxlFGwev_tBkgJaJrIYs6NKHzbWo,951
25
- arvi/utils.py,sha256=LImV8iPjG8ZKjPCT9lp25_pDb-51ZZk42Hc8bzZt7M0,6568
25
+ arvi/utils.py,sha256=V4uSpr75YVjE0NP3T5PxnfVQQ06nd-O8X679BfVyD30,7068
26
26
  arvi/data/info.svg,sha256=0IMI6W-eFoTD8acnury79WJJakpBwLa4qKS4JWpsXiI,489
27
27
  arvi/data/obs_affected_ADC_issues.dat,sha256=tn93uOL0eCTYhireqp1wG-_c3CbxPA7C-Rf-pejVY8M,10853
28
28
  arvi/data/obs_affected_blue_cryostat_issues.dat,sha256=z4AK17xfz8tGTDv1FjRvQFnio4XA6PNNfDXuicewHk4,1771
29
29
  arvi/data/extra/HD86226_PFS1.rdb,sha256=vfAozbrKHM_j8dYkCBJsuHyD01KEM1asghe2KInwVao,3475
30
30
  arvi/data/extra/HD86226_PFS2.rdb,sha256=F2P7dB6gVyzCglUjNheB0hIHVClC5RmARrGwbrY1cfo,4114
31
31
  arvi/data/extra/metadata.json,sha256=C69hIw6CohyES6BI9vDWjxwSz7N4VOYX0PCgjXtYFmU,178
32
- arvi-0.1.30.dist-info/licenses/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
33
- arvi-0.1.30.dist-info/METADATA,sha256=tNPN97-xtW27OkE_vRHNtN4_VtZvJJzy2gITgy0Cupw,1921
34
- arvi-0.1.30.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
35
- arvi-0.1.30.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
36
- arvi-0.1.30.dist-info/RECORD,,
32
+ arvi-0.2.1.dist-info/licenses/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
33
+ arvi-0.2.1.dist-info/METADATA,sha256=VkJZ7gNuP1IB2IMYxgrU8FBubpRjzCU_av0ZgY5AkH4,1920
34
+ arvi-0.2.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
35
+ arvi-0.2.1.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
36
+ arvi-0.2.1.dist-info/RECORD,,
File without changes