arvi 0.2.4__tar.gz → 0.2.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arvi might be problematic. Click here for more details.

Files changed (64) hide show
  1. {arvi-0.2.4/arvi.egg-info → arvi-0.2.6}/PKG-INFO +1 -1
  2. {arvi-0.2.4 → arvi-0.2.6}/arvi/dace_wrapper.py +80 -45
  3. {arvi-0.2.4 → arvi-0.2.6}/arvi/gaia_wrapper.py +2 -2
  4. {arvi-0.2.4 → arvi-0.2.6}/arvi/instrument_specific.py +57 -33
  5. {arvi-0.2.4 → arvi-0.2.6}/arvi/simbad_wrapper.py +18 -0
  6. arvi-0.2.6/arvi/sophie_wrapper.py +111 -0
  7. {arvi-0.2.4 → arvi-0.2.6}/arvi/timeseries.py +31 -14
  8. {arvi-0.2.4 → arvi-0.2.6}/arvi/utils.py +5 -2
  9. {arvi-0.2.4 → arvi-0.2.6/arvi.egg-info}/PKG-INFO +1 -1
  10. {arvi-0.2.4 → arvi-0.2.6}/arvi.egg-info/SOURCES.txt +1 -0
  11. {arvi-0.2.4 → arvi-0.2.6}/tests/test_create_RV.py +7 -0
  12. {arvi-0.2.4 → arvi-0.2.6}/.github/dependabot.yml +0 -0
  13. {arvi-0.2.4 → arvi-0.2.6}/.github/workflows/docs-gh-pages.yml +0 -0
  14. {arvi-0.2.4 → arvi-0.2.6}/.github/workflows/install.yml +0 -0
  15. {arvi-0.2.4 → arvi-0.2.6}/.github/workflows/python-publish.yml +0 -0
  16. {arvi-0.2.4 → arvi-0.2.6}/.gitignore +0 -0
  17. {arvi-0.2.4 → arvi-0.2.6}/LICENSE +0 -0
  18. {arvi-0.2.4 → arvi-0.2.6}/README.md +0 -0
  19. {arvi-0.2.4 → arvi-0.2.6}/arvi/HZ.py +0 -0
  20. {arvi-0.2.4 → arvi-0.2.6}/arvi/__init__.py +0 -0
  21. {arvi-0.2.4 → arvi-0.2.6}/arvi/ariadne_wrapper.py +0 -0
  22. {arvi-0.2.4 → arvi-0.2.6}/arvi/berv.py +0 -0
  23. {arvi-0.2.4 → arvi-0.2.6}/arvi/binning.py +0 -0
  24. {arvi-0.2.4 → arvi-0.2.6}/arvi/config.py +0 -0
  25. {arvi-0.2.4 → arvi-0.2.6}/arvi/data/extra/HD86226_PFS1.rdb +0 -0
  26. {arvi-0.2.4 → arvi-0.2.6}/arvi/data/extra/HD86226_PFS2.rdb +0 -0
  27. {arvi-0.2.4 → arvi-0.2.6}/arvi/data/extra/metadata.json +0 -0
  28. {arvi-0.2.4 → arvi-0.2.6}/arvi/data/info.svg +0 -0
  29. {arvi-0.2.4 → arvi-0.2.6}/arvi/data/obs_affected_ADC_issues.dat +0 -0
  30. {arvi-0.2.4 → arvi-0.2.6}/arvi/data/obs_affected_blue_cryostat_issues.dat +0 -0
  31. {arvi-0.2.4 → arvi-0.2.6}/arvi/exofop_wrapper.py +0 -0
  32. {arvi-0.2.4 → arvi-0.2.6}/arvi/extra_data.py +0 -0
  33. {arvi-0.2.4 → arvi-0.2.6}/arvi/headers.py +0 -0
  34. {arvi-0.2.4 → arvi-0.2.6}/arvi/kima_wrapper.py +0 -0
  35. {arvi-0.2.4 → arvi-0.2.6}/arvi/lbl_wrapper.py +0 -0
  36. {arvi-0.2.4 → arvi-0.2.6}/arvi/nasaexo_wrapper.py +0 -0
  37. {arvi-0.2.4 → arvi-0.2.6}/arvi/plots.py +0 -0
  38. {arvi-0.2.4 → arvi-0.2.6}/arvi/programs.py +0 -0
  39. {arvi-0.2.4 → arvi-0.2.6}/arvi/reports.py +0 -0
  40. {arvi-0.2.4 → arvi-0.2.6}/arvi/setup_logger.py +0 -0
  41. {arvi-0.2.4 → arvi-0.2.6}/arvi/spectra.py +0 -0
  42. {arvi-0.2.4 → arvi-0.2.6}/arvi/stats.py +0 -0
  43. {arvi-0.2.4 → arvi-0.2.6}/arvi/stellar.py +0 -0
  44. {arvi-0.2.4 → arvi-0.2.6}/arvi/translations.py +0 -0
  45. {arvi-0.2.4 → arvi-0.2.6}/arvi.egg-info/dependency_links.txt +0 -0
  46. {arvi-0.2.4 → arvi-0.2.6}/arvi.egg-info/requires.txt +0 -0
  47. {arvi-0.2.4 → arvi-0.2.6}/arvi.egg-info/top_level.txt +0 -0
  48. {arvi-0.2.4 → arvi-0.2.6}/docs/API.md +0 -0
  49. {arvi-0.2.4 → arvi-0.2.6}/docs/detailed.ipynb +0 -0
  50. {arvi-0.2.4 → arvi-0.2.6}/docs/downloading_data.md +0 -0
  51. {arvi-0.2.4 → arvi-0.2.6}/docs/index.md +0 -0
  52. {arvi-0.2.4 → arvi-0.2.6}/docs/logo/detective.png +0 -0
  53. {arvi-0.2.4 → arvi-0.2.6}/docs/logo/logo.png +0 -0
  54. {arvi-0.2.4 → arvi-0.2.6}/docs/stylesheets/extra.css +0 -0
  55. {arvi-0.2.4 → arvi-0.2.6}/mkdocs.yml +0 -0
  56. {arvi-0.2.4 → arvi-0.2.6}/pyproject.toml +0 -0
  57. {arvi-0.2.4 → arvi-0.2.6}/setup.cfg +0 -0
  58. {arvi-0.2.4 → arvi-0.2.6}/setup.py +0 -0
  59. {arvi-0.2.4 → arvi-0.2.6}/tests/HD10700-Bcor_ESPRESSO18.rdb +0 -0
  60. {arvi-0.2.4 → arvi-0.2.6}/tests/test_binning.py +0 -0
  61. {arvi-0.2.4 → arvi-0.2.6}/tests/test_config.py +0 -0
  62. {arvi-0.2.4 → arvi-0.2.6}/tests/test_import_object.py +0 -0
  63. {arvi-0.2.4 → arvi-0.2.6}/tests/test_simbad.py +0 -0
  64. {arvi-0.2.4 → arvi-0.2.6}/tests/test_stats.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.2.4
3
+ Version: 0.2.6
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -7,7 +7,7 @@ from itertools import islice
7
7
  import numpy as np
8
8
 
9
9
  from .setup_logger import setup_logger
10
- from .utils import create_directory, all_logging_disabled, stdout_disabled, tqdm
10
+ from .utils import create_directory, all_logging_disabled, stdout_disabled, timer, tqdm
11
11
 
12
12
 
13
13
  def load_spectroscopy(user=None):
@@ -86,6 +86,11 @@ def get_arrays(result, latest_pipeline=True, ESPRESSO_mode='HR11', NIRPS_mode='H
86
86
  i = [i for i, pipe in enumerate(pipelines) if ESPRESSO_mode in pipe][0]
87
87
  pipelines = [pipelines[i]]
88
88
 
89
+ # select NIRPS mode
90
+ if 'NIRPS' in inst:
91
+ if any(this_mode := [p for p in pipelines if NIRPS_mode in p]):
92
+ pipelines = this_mode
93
+
89
94
  if latest_pipeline:
90
95
  npipe = len(pipelines)
91
96
  if 'NIRPS' in inst and any(['LBL' in p for p in pipelines]):
@@ -164,19 +169,19 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
164
169
  dictionary with data from DACE
165
170
  """
166
171
  Spectroscopy = load_spectroscopy(user)
167
-
168
172
  found_dace_id = False
169
- try:
170
- dace_id = get_dace_id(star, verbose=verbose, raise_error=True)
171
- found_dace_id = True
172
- except ValueError as e:
173
- if main_id is not None:
174
- try:
175
- dace_id = get_dace_id(main_id, verbose=verbose, raise_error=True)
176
- found_dace_id = True
177
- except ValueError:
178
- pass
179
-
173
+ with timer('simbad query'):
174
+ try:
175
+ dace_id = get_dace_id(star, verbose=verbose, raise_error=True)
176
+ found_dace_id = True
177
+ except ValueError as e:
178
+ if main_id is not None:
179
+ try:
180
+ dace_id = get_dace_id(main_id, verbose=verbose, raise_error=True)
181
+ found_dace_id = True
182
+ except ValueError:
183
+ pass
184
+
180
185
  if not found_dace_id:
181
186
  try:
182
187
  with all_logging_disabled():
@@ -187,11 +192,16 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
187
192
  except TypeError:
188
193
  msg = f'no {instrument} observations for {star}'
189
194
  raise ValueError(msg) from None
190
-
191
- filters = {
192
- "ins_name": {"contains": [instrument]},
193
- "obj_id_daceid": {"contains": [dace_id]}
194
- }
195
+ if (isinstance(instrument, str)):
196
+ filters = {
197
+ "ins_name": {"contains": [instrument]},
198
+ "obj_id_daceid": {"contains": [dace_id]}
199
+ }
200
+ elif (isinstance(instrument, list)):
201
+ filters = {
202
+ "ins_name": {"contains": instrument},
203
+ "obj_id_daceid": {"contains": [dace_id]}
204
+ }
195
205
  with all_logging_disabled():
196
206
  result = Spectroscopy.query_database(filters=filters)
197
207
 
@@ -302,12 +312,14 @@ def get_observations(star, instrument=None, user=None, main_id=None, verbose=Tru
302
312
  result[inst] = dict(result[inst])
303
313
  #
304
314
 
305
- instruments = list(result.keys())
315
+ instruments = list(map(str, result.keys()))
306
316
 
307
317
  if instrument is not None:
308
318
  # select only the provided instrument (if it's there)
309
- instruments = [inst for inst in instruments if instrument in inst]
310
-
319
+ if (isinstance(instrument, str)):
320
+ instruments = [inst for inst in instruments if instrument in inst]
321
+ elif (isinstance(instrument, list)):
322
+ instruments = [inst for inst in instruments if any(i in inst for i in instrument)]
311
323
  if len(instruments) == 0:
312
324
  if instrument is None:
313
325
  msg = f'no observations for {star}'
@@ -331,30 +343,53 @@ def get_observations(star, instrument=None, user=None, main_id=None, verbose=Tru
331
343
  # else:
332
344
  # return -1
333
345
 
334
- # sort pipelines, must be extra careful with HARPS/HARPN pipeline version numbers
335
- # got here with the help of DeepSeek
336
- from re import match
337
- def custom_sort_key(s):
338
- s = s[0]
339
- # Check for version number pattern (e.g., 3.2.5 or 3.2.5-EGGS)
340
- version_match = match(r'^(\d+(?:\.\d+)*)(?:[-\s](.*))?$', s)
341
- if version_match:
342
- version_parts = list(map(int, version_match.group(1).split('.')))
343
- if len(version_parts) == 2:
344
- version_parts.insert(1, -1)
345
- return (0, 1, version_parts)
346
- # Check for scientific reference pattern (e.g., 2004A&A...)
347
- year_match = match(r'^(\d{4})', s)
348
- if year_match:
349
- year = int(year_match.group(1))
350
- return (1, year)
351
- # For all other strings, sort alphabetically
352
- return (2, s)
353
-
354
- # from functools import cmp_to_key
346
+ # # sort pipelines, must be extra careful with HARPS/HARPN pipeline version numbers
347
+ # # got here with the help of DeepSeek
348
+ # # from functools import cmp_to_key
349
+ # from re import match
350
+ # def custom_sort_key(s):
351
+ # s = s[0]
352
+ # # Check for version number pattern (e.g., 3.2.5 or 3.2.5-EGGS)
353
+ # version_match = match(r'^(\d+(?:\.\d+)*)(?:[-\s](.*))?$', s)
354
+ # if version_match:
355
+ # version_parts = list(map(int, version_match.group(1).split('.')))
356
+ # if len(version_parts) == 2:
357
+ # version_parts.insert(1, -1)
358
+ # # if version_match.group(2) and 'LBL' in version_match.group(2):
359
+ # # version_parts.append(-1)
360
+ # # else:
361
+ # # version_parts.append(0)
362
+ # if version_match.group(2) is None:
363
+ # version_parts.append('')
364
+ # else:
365
+ # version_parts.append(version_match.group(2))
366
+ # return (0, 1, version_parts)
367
+ # # Check for scientific reference pattern (e.g., 2004A&A...)
368
+ # year_match = match(r'^(\d{4})', s)
369
+ # if year_match:
370
+ # year = int(year_match.group(1))
371
+ # return (1, year)
372
+ # # For all other strings, sort alphabetically
373
+ # return (2, s)
374
+
375
+ def custom_key(val):
376
+ key = 0
377
+ key -= 2 if val == '3.5' else 0
378
+ key -= 1 if 'EGGS' in val else 0
379
+ key -= 1 if ('UHR' in val or 'MR' in val) else 0
380
+ key -= 1 if 'LBL' in val else 0
381
+ return str(key) if key != 0 else val
382
+
355
383
  new_result = {}
356
384
  for inst in instruments:
357
- new_result[inst] = dict(sorted(result[inst].items(), key=custom_sort_key, reverse=True))
385
+ # new_result[inst] = dict(
386
+ # sorted(result[inst].items(), key=custom_sort_key, reverse=True)
387
+ # )
388
+ # WARNING: not the same as reverse=True (not sure why)
389
+ sorted_keys = sorted(result[inst].keys(), key=custom_key)[::-1]
390
+ new_result[inst] = {}
391
+ for key in sorted_keys:
392
+ new_result[inst][key] = result[inst][key]
358
393
 
359
394
  if verbose:
360
395
  logger.info('RVs available from')
@@ -388,7 +423,7 @@ def check_existing(output_directory, files, type):
388
423
  ]
389
424
 
390
425
  if type == 'S2D':
391
- existing = [
426
+ existing += [
392
427
  f.partition('.fits')[0] for f in os.listdir(output_directory)
393
428
  if 'e2ds' in f
394
429
  ]
@@ -506,7 +541,7 @@ def do_download_filetype(type, raw_files, output_directory, clobber=False, user=
506
541
  # check existing files to avoid re-downloading
507
542
  if not clobber:
508
543
  raw_files = check_existing(output_directory, raw_files, type)
509
-
544
+
510
545
  n = raw_files.size
511
546
 
512
547
  # any file left to download?
@@ -3,8 +3,6 @@ from io import StringIO
3
3
  from csv import DictReader
4
4
  import requests
5
5
 
6
- from astropy.coordinates import SkyCoord
7
-
8
6
  DATA_PATH = os.path.dirname(__file__)
9
7
  DATA_PATH = os.path.join(DATA_PATH, 'data')
10
8
 
@@ -78,6 +76,8 @@ class gaia:
78
76
  Args:
79
77
  star (str): The name of the star to query simbad
80
78
  """
79
+ from astropy.coordinates import SkyCoord
80
+
81
81
  self.star = star
82
82
 
83
83
  if simbad is None:
@@ -5,7 +5,7 @@ from .setup_logger import setup_logger
5
5
  from .utils import ESPRESSO_ADC_issues, ESPRESSO_cryostat_issues
6
6
 
7
7
 
8
- # HARPS started operations in October 1st, 2003
8
+ # HARPS started operations on October 1st, 2003
9
9
  # https://www.eso.org/sci/facilities/lasilla/instruments/harps/news.html
10
10
  HARPS_start = 52913
11
11
 
@@ -21,6 +21,11 @@ HARPS_technical_intervention = 57170
21
21
  # when the instrument was handed back to Science Operations.
22
22
  HARPS_technical_intervention_range = (57161, 57176)
23
23
 
24
+
25
+ # ESPRESSO started operations on October 1st, 2018
26
+ # see Pepe et al. (2021, A&A 645, A96)
27
+ ESPRESSO_start = 58392
28
+
24
29
  # ESPRESSO fiber link upgrade (1 July 2019)
25
30
  ESPRESSO_technical_intervention = 58665
26
31
 
@@ -102,18 +107,11 @@ def divide_HARPS(self):
102
107
 
103
108
 
104
109
  def check(self, instrument):
105
- logger = setup_logger()
106
- instruments = self._check_instrument(instrument)
107
- if instruments is None:
108
- if self.verbose:
109
- logger.error(f"HARPS_fiber_commissioning: no data from {instrument}")
110
- return None
111
- return instruments
112
-
110
+ return self._check_instrument(instrument)
113
111
 
114
112
  # HARPS commissioning
115
113
  def HARPS_commissioning(self, mask=True, plot=True):
116
- """ Identify and optionally mask points during HARPS commissioning (HARPS).
114
+ """ Identify and optionally mask points during HARPS commissioning.
117
115
 
118
116
  Args:
119
117
  mask (bool, optional):
@@ -175,6 +173,38 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
175
173
  return affected
176
174
 
177
175
 
176
+ # ESPRESSO commissioning
177
+ def ESPRESSO_commissioning(self, mask=True, plot=True):
178
+ """ Identify and optionally mask points during ESPRESSO commissioning.
179
+
180
+ Args:
181
+ mask (bool, optional):
182
+ Whether to mask out the points.
183
+ plot (bool, optional):
184
+ Whether to plot the masked points.
185
+ """
186
+ logger = setup_logger()
187
+ if check(self, 'ESPRESSO') is None:
188
+ return
189
+
190
+ affected = self.time < ESPRESSO_start
191
+ total_affected = affected.sum()
192
+
193
+ if self.verbose:
194
+ n = total_affected
195
+ logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
196
+ "during ESPRESSO commissioning")
197
+
198
+ if mask:
199
+ self.mask[affected] = False
200
+ self._propagate_mask_changes()
201
+
202
+ if plot:
203
+ self.plot(show_masked=True)
204
+
205
+ return affected
206
+
207
+
178
208
  # ESPRESSO ADC issues
179
209
  def ADC_issues(self, mask=True, plot=True, check_headers=False):
180
210
  """ Identify and optionally mask points affected by ADC issues (ESPRESSO).
@@ -316,31 +346,25 @@ class ISSUES:
316
346
  plot (bool, optional): Whether to plot the masked points.
317
347
  """
318
348
  logger = setup_logger()
319
- try:
320
- adc = ADC_issues(self, mask, plot, **kwargs)
321
- except IndexError:
322
- logger.error('are the data binned? cannot proceed to mask these points...')
323
-
324
- try:
325
- cryostat = blue_cryostat_issues(self, mask, plot)
326
- except IndexError:
327
- logger.error('are the data binned? cannot proceed to mask these points...')
328
-
329
- try:
330
- harps_comm = HARPS_commissioning(self, mask, plot)
331
- except IndexError:
332
- logger.error('are the data binned? cannot proceed to mask these points...')
333
-
334
- try:
335
- harps_fibers = HARPS_fiber_commissioning(self, mask, plot)
336
- except IndexError:
337
- logger.error('are the data binned? cannot proceed to mask these points...')
338
349
 
339
- # if None in (adc, cryostat, harps_comm, harps_fibers):
340
- # return
350
+ functions = (
351
+ ESPRESSO_commissioning,
352
+ ADC_issues,
353
+ blue_cryostat_issues,
354
+ HARPS_commissioning,
355
+ HARPS_fiber_commissioning
356
+ )
357
+ results = []
358
+
359
+ for fun in functions:
360
+ try:
361
+ results.append(fun(self, mask, plot, **kwargs))
362
+ except IndexError:
363
+ logger.error('are the data binned? cannot proceed to mask these points...')
364
+
365
+ results = list(filter(lambda x: x is not None, results))
341
366
 
342
367
  try:
343
- # return adc | cryostat
344
- return np.logical_or.reduce((adc, cryostat, harps_comm, harps_fibers))
368
+ return np.logical_or.reduce(results)
345
369
  except UnboundLocalError:
346
370
  return
@@ -1,6 +1,7 @@
1
1
  import os
2
2
  import requests
3
3
  from dataclasses import dataclass
4
+ from functools import partial
4
5
 
5
6
  import numpy as np
6
7
 
@@ -63,6 +64,23 @@ JOIN ident ON oidref = oid
63
64
  WHERE id = '{star}';
64
65
  """
65
66
 
67
+ HD_GJ_HIP_QUERY = """
68
+ SELECT id2.id
69
+ FROM ident AS id1 JOIN ident AS id2 USING(oidref)
70
+ WHERE id1.id = '{star}' AND id2.id LIKE '{name}%';
71
+ """
72
+
73
+ def find_identifier(identifier, star):
74
+ response = run_query(HD_GJ_HIP_QUERY.format(name=identifier, star=star))
75
+ if identifier in response:
76
+ return response.split('"')[1]
77
+ raise ValueError(f'no {identifier} identifier found for "{star}"')
78
+
79
+ find_HD = partial(find_identifier, 'HD')
80
+ find_GJ = partial(find_identifier, 'GJ')
81
+ find_HIP = partial(find_identifier, 'HIP')
82
+
83
+
66
84
  @dataclass
67
85
  class Measurements:
68
86
  teff: list
@@ -0,0 +1,111 @@
1
+ from multiprocessing.pool import ThreadPool
2
+ import re
3
+ import requests
4
+ from io import StringIO
5
+
6
+ import numpy as np
7
+
8
+ from .setup_logger import setup_logger
9
+
10
+ URL_CCF = "http://atlas.obs-hp.fr/sophie/sophie.cgi?n=sophiecc&ob=date&a=t&o={target}"
11
+ URL_HEADER = "http://atlas.obs-hp.fr/sophie/sophie.cgi?n=sophiecc&c=i&z=fd&a=t&o=sophie:[ccf,{seq},{mask},0]"
12
+
13
+ def extract_keyword(keyword, text, raise_error=True):
14
+ for line in text.splitlines():
15
+ if keyword in line:
16
+ value = re.findall(fr'{keyword}\s+([\'\w\d.]+)', line)[0]
17
+ value = value.replace("'", "")
18
+ try:
19
+ return float(value)
20
+ except ValueError:
21
+ return value
22
+ if raise_error:
23
+ raise KeyError(f'Keyword {keyword} not found')
24
+
25
+ def query_sophie_archive(star: str, verbose=True):
26
+ from .timeseries import RV
27
+ logger = setup_logger()
28
+
29
+ resp = requests.get(URL_CCF.format(target=star))
30
+ if 'leda did not return a position for the name' in resp.text:
31
+ raise ValueError(f'no SOPHIE observations for {star}')
32
+
33
+ data = np.genfromtxt(StringIO(resp.text), dtype=None, usecols=(0, 4),
34
+ names=("seq", "mask"))
35
+
36
+ if verbose:
37
+ logger.info(f'found {len(data)} SOPHIE observations for {star}')
38
+
39
+ urls = [URL_HEADER.format(seq=seq, mask=mask) for seq, mask in data]
40
+ with ThreadPool(8) as pool:
41
+ responses = pool.map(requests.get, urls)
42
+
43
+ bjd, vrad, svrad = [], [], []
44
+ fwhm, contrast = [], []
45
+ ccf_mask = []
46
+ _quantities = []
47
+ errors = []
48
+
49
+ for i, resp in enumerate(responses):
50
+ if resp.text == '':
51
+ errors.append(i)
52
+ continue
53
+
54
+ try:
55
+ t, v = map(lambda k: extract_keyword(k, resp.text),
56
+ ("OHP DRS BJD", "OHP DRS CCF RV"))
57
+ except KeyError:
58
+ errors.append(i)
59
+ continue
60
+ else:
61
+ bjd.append(t)
62
+ vrad.append(v)
63
+
64
+ try:
65
+ svrad.append(extract_keyword("OHP DRS CCF ERR", resp.text))
66
+ except KeyError:
67
+ try:
68
+ svrad.append(1e-3 * extract_keyword("OHP DRS DVRMS", resp.text))
69
+ except KeyError:
70
+ bjd.pop(-1)
71
+ vrad.pop(-1)
72
+ errors.append(i)
73
+ continue
74
+
75
+ fwhm.append(extract_keyword('OHP DRS CCF FWHM', resp.text))
76
+ _quantities.append('fwhm')
77
+
78
+ contrast.append(extract_keyword('OHP DRS CCF CONTRAST', resp.text))
79
+ _quantities.append('contrast')
80
+
81
+ ccf_mask.append(extract_keyword('OHP DRS CCF MASK', resp.text))
82
+ _quantities.append('ccf_mask')
83
+
84
+ if len(errors) > 0:
85
+ logger.warning(f'Could not retrieve {len(errors)} observation'
86
+ f'{"s" if len(errors) > 1 else ""}')
87
+
88
+ bjd = np.array(bjd) - 2400000.5
89
+
90
+ s = RV.from_arrays(star, bjd, vrad, svrad, 'SOPHIE',
91
+ fwhm=fwhm, fwhm_err=2*np.array(svrad),
92
+ contrast=contrast,
93
+ ccf_mask=ccf_mask)
94
+ s.units = 'km/s'
95
+
96
+ # strings
97
+ for q in ['date_night', 'prog_id', 'raw_file', 'pub_reference']:
98
+ setattr(s, q, np.full(bjd.size, ''))
99
+ _quantities.append(q)
100
+
101
+ s._quantities = np.array(_quantities)
102
+
103
+ setattr(s, 'SOPHIE', s)
104
+ s._child = False
105
+ s.verbose = False
106
+ s._build_arrays()
107
+ s.change_units('m/s')
108
+ s.verbose = verbose
109
+
110
+ return s
111
+
@@ -55,7 +55,7 @@ class RV(ISSUES, REPORTS):
55
55
  Information on the target from Simbad
56
56
  """
57
57
  star: str
58
- instrument: str = field(init=True, repr=False, default=None)
58
+ instrument: Union[str, list] = field(init=True, repr=False, default=None)
59
59
  verbose: bool = field(init=True, repr=False, default=True)
60
60
  do_maxerror: Union[bool, float] = field(init=True, repr=False, default=False)
61
61
  do_secular_acceleration: bool = field(init=True, repr=False, default=True)
@@ -64,19 +64,21 @@ class RV(ISSUES, REPORTS):
64
64
  only_latest_pipeline: bool = field(init=True, repr=False, default=True)
65
65
  load_extra_data: Union[bool, str] = field(init=True, repr=False, default=False)
66
66
  check_drs_qc: bool = field(init=True, repr=False, default=True)
67
- user: bool = field(init=True, repr=False, default=None)
67
+ check_sophie_archive: bool = field(init=True, repr=False, default=False)
68
+ user: Union[str, None] = field(init=True, repr=False, default=None)
68
69
  #
69
70
  units = 'm/s'
70
71
  _child: bool = field(init=True, repr=False, default=False)
71
- _did_secular_acceleration: bool = field(init=False, repr=False, default=False)
72
- _did_sigma_clip: bool = field(init=False, repr=False, default=False)
73
- _did_adjust_means: bool = field(init=False, repr=False, default=False)
74
- _did_simbad_query: bool = field(init=False, repr=False, default=False)
75
- _did_gaia_query: bool = field(init=False, repr=False, default=False)
76
- _did_toi_query: bool = field(init=False, repr=False, default=False)
77
- _raise_on_error: bool = field(init=True, repr=False, default=True)
78
- __masked_numbers: bool = field(init=False, repr=False, default=False)
79
- #
72
+ #
73
+ _did_secular_acceleration : bool = field(init=False, repr=False, default=False)
74
+ _did_sigma_clip : bool = field(init=False, repr=False, default=False)
75
+ _did_adjust_means : bool = field(init=False, repr=False, default=False)
76
+ _did_simbad_query : bool = field(init=False, repr=False, default=False)
77
+ _did_gaia_query : bool = field(init=False, repr=False, default=False)
78
+ _did_toi_query : bool = field(init=False, repr=False, default=False)
79
+ _raise_on_error : bool = field(init=True, repr=False, default=True)
80
+ __masked_numbers : bool = field(init=False, repr=False, default=False)
81
+ #
80
82
  _simbad = None
81
83
  _gaia = None
82
84
  _toi = None
@@ -104,7 +106,7 @@ class RV(ISSUES, REPORTS):
104
106
 
105
107
  if self._child:
106
108
  return None
107
-
109
+
108
110
  if self._did_simbad_query:
109
111
  return None
110
112
 
@@ -288,6 +290,21 @@ class RV(ISSUES, REPORTS):
288
290
  # all other quantities
289
291
  self._build_arrays()
290
292
 
293
+ # self.actin = get_actin_data(self, verbose=self.verbose)
294
+
295
+
296
+ # check for SOPHIE observations
297
+ cond = not self._child
298
+ cond = cond and self.instrument is None
299
+ cond = cond and self.check_sophie_archive
300
+ if cond:
301
+ try:
302
+ from arvi.sophie_wrapper import query_sophie_archive
303
+ self.__add__(query_sophie_archive(self.star, verbose=self.verbose),
304
+ inplace=True)
305
+ except Exception as e:
306
+ print(e)
307
+
291
308
  # do clip_maxerror, secular_acceleration, sigmaclip, adjust_means
292
309
  if not self._child:
293
310
  if self.do_maxerror:
@@ -1376,7 +1393,7 @@ class RV(ISSUES, REPORTS):
1376
1393
  instrument_index = self.obs[index]
1377
1394
  np.array(self.instruments)[instrument_index - 1]
1378
1395
  except IndexError:
1379
- logger.errors(f'index {index} is out of bounds for N={self.N}')
1396
+ logger.error(f'index {index} is out of bounds for N={self.N}')
1380
1397
  return
1381
1398
 
1382
1399
  if self.verbose:
@@ -1408,7 +1425,7 @@ class RV(ISSUES, REPORTS):
1408
1425
  instrument_index = self.obs[index]
1409
1426
  np.array(self.instruments)[instrument_index - 1]
1410
1427
  except IndexError:
1411
- logger.errors(f'index {index} is out of bounds for N={self.N}')
1428
+ logger.error(f'index {index} is out of bounds for N={self.N}')
1412
1429
  return
1413
1430
 
1414
1431
  if self.verbose:
@@ -79,14 +79,17 @@ def timer(name=None):
79
79
  if name is None:
80
80
  logger.debug('starting timer')
81
81
  else:
82
- logger.debug(f'starting timer: {name}')
82
+ logger.debug(f'{name}: starting timer')
83
83
 
84
84
  start = time.time()
85
85
  try:
86
86
  yield
87
87
  finally:
88
88
  end = time.time()
89
- logger.debug(f'elapsed time: {end - start:.2f} seconds')
89
+ if name is None:
90
+ logger.debug(f'elapsed time {end - start:.2f} seconds')
91
+ else:
92
+ logger.debug(f'{name}: elapsed time {end - start:.2f} seconds')
90
93
 
91
94
 
92
95
  def sanitize_path(path):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.2.4
3
+ Version: 0.2.6
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -28,6 +28,7 @@ arvi/programs.py
28
28
  arvi/reports.py
29
29
  arvi/setup_logger.py
30
30
  arvi/simbad_wrapper.py
31
+ arvi/sophie_wrapper.py
31
32
  arvi/spectra.py
32
33
  arvi/stats.py
33
34
  arvi/stellar.py
@@ -19,3 +19,10 @@ def test_from_rdb(change_test_dir):
19
19
  assert (s.bispan == 0).all()
20
20
  assert isnan(s.rhk).all()
21
21
 
22
+
23
+ def test_list_instruments():
24
+ from arvi import RV, config
25
+ config.request_as_public = True
26
+ _ = RV('HD28185', instrument='CORALIE')
27
+ _ = RV('HD28185', instrument=['CORALIE'])
28
+ _ = RV('HD28185', instrument=['CORALIE', 'HRS'])
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes