arvi 0.2.4__tar.gz → 0.2.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arvi might be problematic. Click here for more details.

Files changed (63) hide show
  1. {arvi-0.2.4/arvi.egg-info → arvi-0.2.5}/PKG-INFO +1 -1
  2. {arvi-0.2.4 → arvi-0.2.5}/arvi/dace_wrapper.py +80 -45
  3. {arvi-0.2.4 → arvi-0.2.5}/arvi/instrument_specific.py +56 -25
  4. {arvi-0.2.4 → arvi-0.2.5}/arvi/timeseries.py +1 -1
  5. {arvi-0.2.4 → arvi-0.2.5/arvi.egg-info}/PKG-INFO +1 -1
  6. {arvi-0.2.4 → arvi-0.2.5}/tests/test_create_RV.py +7 -0
  7. {arvi-0.2.4 → arvi-0.2.5}/.github/dependabot.yml +0 -0
  8. {arvi-0.2.4 → arvi-0.2.5}/.github/workflows/docs-gh-pages.yml +0 -0
  9. {arvi-0.2.4 → arvi-0.2.5}/.github/workflows/install.yml +0 -0
  10. {arvi-0.2.4 → arvi-0.2.5}/.github/workflows/python-publish.yml +0 -0
  11. {arvi-0.2.4 → arvi-0.2.5}/.gitignore +0 -0
  12. {arvi-0.2.4 → arvi-0.2.5}/LICENSE +0 -0
  13. {arvi-0.2.4 → arvi-0.2.5}/README.md +0 -0
  14. {arvi-0.2.4 → arvi-0.2.5}/arvi/HZ.py +0 -0
  15. {arvi-0.2.4 → arvi-0.2.5}/arvi/__init__.py +0 -0
  16. {arvi-0.2.4 → arvi-0.2.5}/arvi/ariadne_wrapper.py +0 -0
  17. {arvi-0.2.4 → arvi-0.2.5}/arvi/berv.py +0 -0
  18. {arvi-0.2.4 → arvi-0.2.5}/arvi/binning.py +0 -0
  19. {arvi-0.2.4 → arvi-0.2.5}/arvi/config.py +0 -0
  20. {arvi-0.2.4 → arvi-0.2.5}/arvi/data/extra/HD86226_PFS1.rdb +0 -0
  21. {arvi-0.2.4 → arvi-0.2.5}/arvi/data/extra/HD86226_PFS2.rdb +0 -0
  22. {arvi-0.2.4 → arvi-0.2.5}/arvi/data/extra/metadata.json +0 -0
  23. {arvi-0.2.4 → arvi-0.2.5}/arvi/data/info.svg +0 -0
  24. {arvi-0.2.4 → arvi-0.2.5}/arvi/data/obs_affected_ADC_issues.dat +0 -0
  25. {arvi-0.2.4 → arvi-0.2.5}/arvi/data/obs_affected_blue_cryostat_issues.dat +0 -0
  26. {arvi-0.2.4 → arvi-0.2.5}/arvi/exofop_wrapper.py +0 -0
  27. {arvi-0.2.4 → arvi-0.2.5}/arvi/extra_data.py +0 -0
  28. {arvi-0.2.4 → arvi-0.2.5}/arvi/gaia_wrapper.py +0 -0
  29. {arvi-0.2.4 → arvi-0.2.5}/arvi/headers.py +0 -0
  30. {arvi-0.2.4 → arvi-0.2.5}/arvi/kima_wrapper.py +0 -0
  31. {arvi-0.2.4 → arvi-0.2.5}/arvi/lbl_wrapper.py +0 -0
  32. {arvi-0.2.4 → arvi-0.2.5}/arvi/nasaexo_wrapper.py +0 -0
  33. {arvi-0.2.4 → arvi-0.2.5}/arvi/plots.py +0 -0
  34. {arvi-0.2.4 → arvi-0.2.5}/arvi/programs.py +0 -0
  35. {arvi-0.2.4 → arvi-0.2.5}/arvi/reports.py +0 -0
  36. {arvi-0.2.4 → arvi-0.2.5}/arvi/setup_logger.py +0 -0
  37. {arvi-0.2.4 → arvi-0.2.5}/arvi/simbad_wrapper.py +0 -0
  38. {arvi-0.2.4 → arvi-0.2.5}/arvi/spectra.py +0 -0
  39. {arvi-0.2.4 → arvi-0.2.5}/arvi/stats.py +0 -0
  40. {arvi-0.2.4 → arvi-0.2.5}/arvi/stellar.py +0 -0
  41. {arvi-0.2.4 → arvi-0.2.5}/arvi/translations.py +0 -0
  42. {arvi-0.2.4 → arvi-0.2.5}/arvi/utils.py +0 -0
  43. {arvi-0.2.4 → arvi-0.2.5}/arvi.egg-info/SOURCES.txt +0 -0
  44. {arvi-0.2.4 → arvi-0.2.5}/arvi.egg-info/dependency_links.txt +0 -0
  45. {arvi-0.2.4 → arvi-0.2.5}/arvi.egg-info/requires.txt +0 -0
  46. {arvi-0.2.4 → arvi-0.2.5}/arvi.egg-info/top_level.txt +0 -0
  47. {arvi-0.2.4 → arvi-0.2.5}/docs/API.md +0 -0
  48. {arvi-0.2.4 → arvi-0.2.5}/docs/detailed.ipynb +0 -0
  49. {arvi-0.2.4 → arvi-0.2.5}/docs/downloading_data.md +0 -0
  50. {arvi-0.2.4 → arvi-0.2.5}/docs/index.md +0 -0
  51. {arvi-0.2.4 → arvi-0.2.5}/docs/logo/detective.png +0 -0
  52. {arvi-0.2.4 → arvi-0.2.5}/docs/logo/logo.png +0 -0
  53. {arvi-0.2.4 → arvi-0.2.5}/docs/stylesheets/extra.css +0 -0
  54. {arvi-0.2.4 → arvi-0.2.5}/mkdocs.yml +0 -0
  55. {arvi-0.2.4 → arvi-0.2.5}/pyproject.toml +0 -0
  56. {arvi-0.2.4 → arvi-0.2.5}/setup.cfg +0 -0
  57. {arvi-0.2.4 → arvi-0.2.5}/setup.py +0 -0
  58. {arvi-0.2.4 → arvi-0.2.5}/tests/HD10700-Bcor_ESPRESSO18.rdb +0 -0
  59. {arvi-0.2.4 → arvi-0.2.5}/tests/test_binning.py +0 -0
  60. {arvi-0.2.4 → arvi-0.2.5}/tests/test_config.py +0 -0
  61. {arvi-0.2.4 → arvi-0.2.5}/tests/test_import_object.py +0 -0
  62. {arvi-0.2.4 → arvi-0.2.5}/tests/test_simbad.py +0 -0
  63. {arvi-0.2.4 → arvi-0.2.5}/tests/test_stats.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.2.4
3
+ Version: 0.2.5
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -7,7 +7,7 @@ from itertools import islice
7
7
  import numpy as np
8
8
 
9
9
  from .setup_logger import setup_logger
10
- from .utils import create_directory, all_logging_disabled, stdout_disabled, tqdm
10
+ from .utils import create_directory, all_logging_disabled, stdout_disabled, timer, tqdm
11
11
 
12
12
 
13
13
  def load_spectroscopy(user=None):
@@ -86,6 +86,11 @@ def get_arrays(result, latest_pipeline=True, ESPRESSO_mode='HR11', NIRPS_mode='H
86
86
  i = [i for i, pipe in enumerate(pipelines) if ESPRESSO_mode in pipe][0]
87
87
  pipelines = [pipelines[i]]
88
88
 
89
+ # select NIRPS mode
90
+ if 'NIRPS' in inst:
91
+ if any(this_mode := [p for p in pipelines if NIRPS_mode in p]):
92
+ pipelines = this_mode
93
+
89
94
  if latest_pipeline:
90
95
  npipe = len(pipelines)
91
96
  if 'NIRPS' in inst and any(['LBL' in p for p in pipelines]):
@@ -164,19 +169,19 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
164
169
  dictionary with data from DACE
165
170
  """
166
171
  Spectroscopy = load_spectroscopy(user)
167
-
168
172
  found_dace_id = False
169
- try:
170
- dace_id = get_dace_id(star, verbose=verbose, raise_error=True)
171
- found_dace_id = True
172
- except ValueError as e:
173
- if main_id is not None:
174
- try:
175
- dace_id = get_dace_id(main_id, verbose=verbose, raise_error=True)
176
- found_dace_id = True
177
- except ValueError:
178
- pass
179
-
173
+ with timer('simbad query'):
174
+ try:
175
+ dace_id = get_dace_id(star, verbose=verbose, raise_error=True)
176
+ found_dace_id = True
177
+ except ValueError as e:
178
+ if main_id is not None:
179
+ try:
180
+ dace_id = get_dace_id(main_id, verbose=verbose, raise_error=True)
181
+ found_dace_id = True
182
+ except ValueError:
183
+ pass
184
+
180
185
  if not found_dace_id:
181
186
  try:
182
187
  with all_logging_disabled():
@@ -187,11 +192,16 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
187
192
  except TypeError:
188
193
  msg = f'no {instrument} observations for {star}'
189
194
  raise ValueError(msg) from None
190
-
191
- filters = {
192
- "ins_name": {"contains": [instrument]},
193
- "obj_id_daceid": {"contains": [dace_id]}
194
- }
195
+ if (isinstance(instrument, str)):
196
+ filters = {
197
+ "ins_name": {"contains": [instrument]},
198
+ "obj_id_daceid": {"contains": [dace_id]}
199
+ }
200
+ elif (isinstance(instrument, list)):
201
+ filters = {
202
+ "ins_name": {"contains": instrument},
203
+ "obj_id_daceid": {"contains": [dace_id]}
204
+ }
195
205
  with all_logging_disabled():
196
206
  result = Spectroscopy.query_database(filters=filters)
197
207
 
@@ -302,12 +312,14 @@ def get_observations(star, instrument=None, user=None, main_id=None, verbose=Tru
302
312
  result[inst] = dict(result[inst])
303
313
  #
304
314
 
305
- instruments = list(result.keys())
315
+ instruments = list(map(str, result.keys()))
306
316
 
307
317
  if instrument is not None:
308
318
  # select only the provided instrument (if it's there)
309
- instruments = [inst for inst in instruments if instrument in inst]
310
-
319
+ if (isinstance(instrument, str)):
320
+ instruments = [inst for inst in instruments if instrument in inst]
321
+ elif (isinstance(instrument, list)):
322
+ instruments = [inst for inst in instruments if any(i in inst for i in instrument)]
311
323
  if len(instruments) == 0:
312
324
  if instrument is None:
313
325
  msg = f'no observations for {star}'
@@ -331,30 +343,53 @@ def get_observations(star, instrument=None, user=None, main_id=None, verbose=Tru
331
343
  # else:
332
344
  # return -1
333
345
 
334
- # sort pipelines, must be extra careful with HARPS/HARPN pipeline version numbers
335
- # got here with the help of DeepSeek
336
- from re import match
337
- def custom_sort_key(s):
338
- s = s[0]
339
- # Check for version number pattern (e.g., 3.2.5 or 3.2.5-EGGS)
340
- version_match = match(r'^(\d+(?:\.\d+)*)(?:[-\s](.*))?$', s)
341
- if version_match:
342
- version_parts = list(map(int, version_match.group(1).split('.')))
343
- if len(version_parts) == 2:
344
- version_parts.insert(1, -1)
345
- return (0, 1, version_parts)
346
- # Check for scientific reference pattern (e.g., 2004A&A...)
347
- year_match = match(r'^(\d{4})', s)
348
- if year_match:
349
- year = int(year_match.group(1))
350
- return (1, year)
351
- # For all other strings, sort alphabetically
352
- return (2, s)
353
-
354
- # from functools import cmp_to_key
346
+ # # sort pipelines, must be extra careful with HARPS/HARPN pipeline version numbers
347
+ # # got here with the help of DeepSeek
348
+ # # from functools import cmp_to_key
349
+ # from re import match
350
+ # def custom_sort_key(s):
351
+ # s = s[0]
352
+ # # Check for version number pattern (e.g., 3.2.5 or 3.2.5-EGGS)
353
+ # version_match = match(r'^(\d+(?:\.\d+)*)(?:[-\s](.*))?$', s)
354
+ # if version_match:
355
+ # version_parts = list(map(int, version_match.group(1).split('.')))
356
+ # if len(version_parts) == 2:
357
+ # version_parts.insert(1, -1)
358
+ # # if version_match.group(2) and 'LBL' in version_match.group(2):
359
+ # # version_parts.append(-1)
360
+ # # else:
361
+ # # version_parts.append(0)
362
+ # if version_match.group(2) is None:
363
+ # version_parts.append('')
364
+ # else:
365
+ # version_parts.append(version_match.group(2))
366
+ # return (0, 1, version_parts)
367
+ # # Check for scientific reference pattern (e.g., 2004A&A...)
368
+ # year_match = match(r'^(\d{4})', s)
369
+ # if year_match:
370
+ # year = int(year_match.group(1))
371
+ # return (1, year)
372
+ # # For all other strings, sort alphabetically
373
+ # return (2, s)
374
+
375
+ def custom_key(val):
376
+ key = 0
377
+ key -= 2 if val == '3.5' else 0
378
+ key -= 1 if 'EGGS' in val else 0
379
+ key -= 1 if ('UHR' in val or 'MR' in val) else 0
380
+ key -= 1 if 'LBL' in val else 0
381
+ return str(key) if key != 0 else val
382
+
355
383
  new_result = {}
356
384
  for inst in instruments:
357
- new_result[inst] = dict(sorted(result[inst].items(), key=custom_sort_key, reverse=True))
385
+ # new_result[inst] = dict(
386
+ # sorted(result[inst].items(), key=custom_sort_key, reverse=True)
387
+ # )
388
+ # WARNING: not the same as reverse=True (not sure why)
389
+ sorted_keys = sorted(result[inst].keys(), key=custom_key)[::-1]
390
+ new_result[inst] = {}
391
+ for key in sorted_keys:
392
+ new_result[inst][key] = result[inst][key]
358
393
 
359
394
  if verbose:
360
395
  logger.info('RVs available from')
@@ -388,7 +423,7 @@ def check_existing(output_directory, files, type):
388
423
  ]
389
424
 
390
425
  if type == 'S2D':
391
- existing = [
426
+ existing += [
392
427
  f.partition('.fits')[0] for f in os.listdir(output_directory)
393
428
  if 'e2ds' in f
394
429
  ]
@@ -506,7 +541,7 @@ def do_download_filetype(type, raw_files, output_directory, clobber=False, user=
506
541
  # check existing files to avoid re-downloading
507
542
  if not clobber:
508
543
  raw_files = check_existing(output_directory, raw_files, type)
509
-
544
+
510
545
  n = raw_files.size
511
546
 
512
547
  # any file left to download?
@@ -5,7 +5,7 @@ from .setup_logger import setup_logger
5
5
  from .utils import ESPRESSO_ADC_issues, ESPRESSO_cryostat_issues
6
6
 
7
7
 
8
- # HARPS started operations in October 1st, 2003
8
+ # HARPS started operations on October 1st, 2003
9
9
  # https://www.eso.org/sci/facilities/lasilla/instruments/harps/news.html
10
10
  HARPS_start = 52913
11
11
 
@@ -21,6 +21,11 @@ HARPS_technical_intervention = 57170
21
21
  # when the instrument was handed back to Science Operations.
22
22
  HARPS_technical_intervention_range = (57161, 57176)
23
23
 
24
+
25
+ # ESPRESSO started operations on October 1st, 2018
26
+ # see Pepe et al. (2021, A&A 645, A96)
27
+ ESPRESSO_start = 58392
28
+
24
29
  # ESPRESSO fiber link upgrade (1 July 2019)
25
30
  ESPRESSO_technical_intervention = 58665
26
31
 
@@ -113,7 +118,7 @@ def check(self, instrument):
113
118
 
114
119
  # HARPS commissioning
115
120
  def HARPS_commissioning(self, mask=True, plot=True):
116
- """ Identify and optionally mask points during HARPS commissioning (HARPS).
121
+ """ Identify and optionally mask points during HARPS commissioning.
117
122
 
118
123
  Args:
119
124
  mask (bool, optional):
@@ -175,6 +180,38 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
175
180
  return affected
176
181
 
177
182
 
183
+ # ESPRESSO commissioning
184
+ def ESPRESSO_commissioning(self, mask=True, plot=True):
185
+ """ Identify and optionally mask points during ESPRESSO commissioning.
186
+
187
+ Args:
188
+ mask (bool, optional):
189
+ Whether to mask out the points.
190
+ plot (bool, optional):
191
+ Whether to plot the masked points.
192
+ """
193
+ logger = setup_logger()
194
+ if check(self, 'ESPRESSO') is None:
195
+ return
196
+
197
+ affected = self.time < ESPRESSO_start
198
+ total_affected = affected.sum()
199
+
200
+ if self.verbose:
201
+ n = total_affected
202
+ logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
203
+ "during ESPRESSO commissioning")
204
+
205
+ if mask:
206
+ self.mask[affected] = False
207
+ self._propagate_mask_changes()
208
+
209
+ if plot:
210
+ self.plot(show_masked=True)
211
+
212
+ return affected
213
+
214
+
178
215
  # ESPRESSO ADC issues
179
216
  def ADC_issues(self, mask=True, plot=True, check_headers=False):
180
217
  """ Identify and optionally mask points affected by ADC issues (ESPRESSO).
@@ -316,31 +353,25 @@ class ISSUES:
316
353
  plot (bool, optional): Whether to plot the masked points.
317
354
  """
318
355
  logger = setup_logger()
319
- try:
320
- adc = ADC_issues(self, mask, plot, **kwargs)
321
- except IndexError:
322
- logger.error('are the data binned? cannot proceed to mask these points...')
323
-
324
- try:
325
- cryostat = blue_cryostat_issues(self, mask, plot)
326
- except IndexError:
327
- logger.error('are the data binned? cannot proceed to mask these points...')
328
-
329
- try:
330
- harps_comm = HARPS_commissioning(self, mask, plot)
331
- except IndexError:
332
- logger.error('are the data binned? cannot proceed to mask these points...')
333
-
334
- try:
335
- harps_fibers = HARPS_fiber_commissioning(self, mask, plot)
336
- except IndexError:
337
- logger.error('are the data binned? cannot proceed to mask these points...')
338
356
 
339
- # if None in (adc, cryostat, harps_comm, harps_fibers):
340
- # return
357
+ functions = (
358
+ ESPRESSO_commissioning,
359
+ ADC_issues,
360
+ blue_cryostat_issues,
361
+ HARPS_commissioning,
362
+ HARPS_fiber_commissioning
363
+ )
364
+ results = []
365
+
366
+ for fun in functions:
367
+ try:
368
+ results.append(fun(self, mask, plot, **kwargs))
369
+ except IndexError:
370
+ logger.error('are the data binned? cannot proceed to mask these points...')
371
+
372
+ results = list(filter(lambda x: x is not None, results))
341
373
 
342
374
  try:
343
- # return adc | cryostat
344
- return np.logical_or.reduce((adc, cryostat, harps_comm, harps_fibers))
375
+ return np.logical_or.reduce(results)
345
376
  except UnboundLocalError:
346
377
  return
@@ -55,7 +55,7 @@ class RV(ISSUES, REPORTS):
55
55
  Information on the target from Simbad
56
56
  """
57
57
  star: str
58
- instrument: str = field(init=True, repr=False, default=None)
58
+ instrument: Union[str, list] = field(init=True, repr=False, default=None)
59
59
  verbose: bool = field(init=True, repr=False, default=True)
60
60
  do_maxerror: Union[bool, float] = field(init=True, repr=False, default=False)
61
61
  do_secular_acceleration: bool = field(init=True, repr=False, default=True)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.2.4
3
+ Version: 0.2.5
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -19,3 +19,10 @@ def test_from_rdb(change_test_dir):
19
19
  assert (s.bispan == 0).all()
20
20
  assert isnan(s.rhk).all()
21
21
 
22
+
23
+ def test_list_instruments():
24
+ from arvi import RV, config
25
+ config.request_as_public = True
26
+ _ = RV('HD28185', instrument='CORALIE')
27
+ _ = RV('HD28185', instrument=['CORALIE'])
28
+ _ = RV('HD28185', instrument=['CORALIE', 'HRS'])
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes