arvi 0.2.4__tar.gz → 0.2.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. {arvi-0.2.4 → arvi-0.2.11}/.github/workflows/docs-gh-pages.yml +2 -2
  2. {arvi-0.2.4 → arvi-0.2.11}/.github/workflows/install.yml +2 -2
  3. {arvi-0.2.4 → arvi-0.2.11}/.github/workflows/python-publish.yml +3 -3
  4. {arvi-0.2.4/arvi.egg-info → arvi-0.2.11}/PKG-INFO +1 -1
  5. {arvi-0.2.4 → arvi-0.2.11}/arvi/dace_wrapper.py +110 -58
  6. {arvi-0.2.4 → arvi-0.2.11}/arvi/gaia_wrapper.py +2 -2
  7. {arvi-0.2.4 → arvi-0.2.11}/arvi/instrument_specific.py +93 -54
  8. arvi-0.2.11/arvi/kepmodel_wrapper.py +296 -0
  9. {arvi-0.2.4 → arvi-0.2.11}/arvi/kima_wrapper.py +42 -7
  10. {arvi-0.2.4 → arvi-0.2.11}/arvi/nasaexo_wrapper.py +7 -3
  11. {arvi-0.2.4 → arvi-0.2.11}/arvi/plots.py +1 -3
  12. {arvi-0.2.4 → arvi-0.2.11}/arvi/programs.py +8 -4
  13. {arvi-0.2.4 → arvi-0.2.11}/arvi/reports.py +108 -1
  14. {arvi-0.2.4 → arvi-0.2.11}/arvi/simbad_wrapper.py +38 -0
  15. arvi-0.2.11/arvi/sophie_wrapper.py +111 -0
  16. {arvi-0.2.4 → arvi-0.2.11}/arvi/stats.py +30 -5
  17. {arvi-0.2.4 → arvi-0.2.11}/arvi/timeseries.py +568 -189
  18. {arvi-0.2.4 → arvi-0.2.11}/arvi/utils.py +137 -10
  19. {arvi-0.2.4 → arvi-0.2.11/arvi.egg-info}/PKG-INFO +1 -1
  20. {arvi-0.2.4 → arvi-0.2.11}/arvi.egg-info/SOURCES.txt +2 -0
  21. arvi-0.2.11/tests/test_create_RV.py +43 -0
  22. arvi-0.2.4/tests/test_create_RV.py +0 -21
  23. {arvi-0.2.4 → arvi-0.2.11}/.github/dependabot.yml +0 -0
  24. {arvi-0.2.4 → arvi-0.2.11}/.gitignore +0 -0
  25. {arvi-0.2.4 → arvi-0.2.11}/LICENSE +0 -0
  26. {arvi-0.2.4 → arvi-0.2.11}/README.md +0 -0
  27. {arvi-0.2.4 → arvi-0.2.11}/arvi/HZ.py +0 -0
  28. {arvi-0.2.4 → arvi-0.2.11}/arvi/__init__.py +0 -0
  29. {arvi-0.2.4 → arvi-0.2.11}/arvi/ariadne_wrapper.py +0 -0
  30. {arvi-0.2.4 → arvi-0.2.11}/arvi/berv.py +0 -0
  31. {arvi-0.2.4 → arvi-0.2.11}/arvi/binning.py +0 -0
  32. {arvi-0.2.4 → arvi-0.2.11}/arvi/config.py +0 -0
  33. {arvi-0.2.4 → arvi-0.2.11}/arvi/data/extra/HD86226_PFS1.rdb +0 -0
  34. {arvi-0.2.4 → arvi-0.2.11}/arvi/data/extra/HD86226_PFS2.rdb +0 -0
  35. {arvi-0.2.4 → arvi-0.2.11}/arvi/data/extra/metadata.json +0 -0
  36. {arvi-0.2.4 → arvi-0.2.11}/arvi/data/info.svg +0 -0
  37. {arvi-0.2.4 → arvi-0.2.11}/arvi/data/obs_affected_ADC_issues.dat +0 -0
  38. {arvi-0.2.4 → arvi-0.2.11}/arvi/data/obs_affected_blue_cryostat_issues.dat +0 -0
  39. {arvi-0.2.4 → arvi-0.2.11}/arvi/exofop_wrapper.py +0 -0
  40. {arvi-0.2.4 → arvi-0.2.11}/arvi/extra_data.py +0 -0
  41. {arvi-0.2.4 → arvi-0.2.11}/arvi/headers.py +0 -0
  42. {arvi-0.2.4 → arvi-0.2.11}/arvi/lbl_wrapper.py +0 -0
  43. {arvi-0.2.4 → arvi-0.2.11}/arvi/setup_logger.py +0 -0
  44. {arvi-0.2.4 → arvi-0.2.11}/arvi/spectra.py +0 -0
  45. {arvi-0.2.4 → arvi-0.2.11}/arvi/stellar.py +0 -0
  46. {arvi-0.2.4 → arvi-0.2.11}/arvi/translations.py +0 -0
  47. {arvi-0.2.4 → arvi-0.2.11}/arvi.egg-info/dependency_links.txt +0 -0
  48. {arvi-0.2.4 → arvi-0.2.11}/arvi.egg-info/requires.txt +0 -0
  49. {arvi-0.2.4 → arvi-0.2.11}/arvi.egg-info/top_level.txt +0 -0
  50. {arvi-0.2.4 → arvi-0.2.11}/docs/API.md +0 -0
  51. {arvi-0.2.4 → arvi-0.2.11}/docs/detailed.ipynb +0 -0
  52. {arvi-0.2.4 → arvi-0.2.11}/docs/downloading_data.md +0 -0
  53. {arvi-0.2.4 → arvi-0.2.11}/docs/index.md +0 -0
  54. {arvi-0.2.4 → arvi-0.2.11}/docs/logo/detective.png +0 -0
  55. {arvi-0.2.4 → arvi-0.2.11}/docs/logo/logo.png +0 -0
  56. {arvi-0.2.4 → arvi-0.2.11}/docs/stylesheets/extra.css +0 -0
  57. {arvi-0.2.4 → arvi-0.2.11}/mkdocs.yml +0 -0
  58. {arvi-0.2.4 → arvi-0.2.11}/pyproject.toml +0 -0
  59. {arvi-0.2.4 → arvi-0.2.11}/setup.cfg +0 -0
  60. {arvi-0.2.4 → arvi-0.2.11}/setup.py +0 -0
  61. {arvi-0.2.4 → arvi-0.2.11}/tests/HD10700-Bcor_ESPRESSO18.rdb +0 -0
  62. {arvi-0.2.4 → arvi-0.2.11}/tests/test_binning.py +0 -0
  63. {arvi-0.2.4 → arvi-0.2.11}/tests/test_config.py +0 -0
  64. {arvi-0.2.4 → arvi-0.2.11}/tests/test_import_object.py +0 -0
  65. {arvi-0.2.4 → arvi-0.2.11}/tests/test_simbad.py +0 -0
  66. {arvi-0.2.4 → arvi-0.2.11}/tests/test_stats.py +0 -0
@@ -26,13 +26,13 @@ jobs:
26
26
  runs-on: ubuntu-latest
27
27
  steps:
28
28
  - name: Checkout
29
- uses: actions/checkout@v4
29
+ uses: actions/checkout@v5
30
30
 
31
31
  - name: Setup Pages
32
32
  uses: actions/configure-pages@v5
33
33
 
34
34
  - name: Set up Python
35
- uses: actions/setup-python@v5
35
+ uses: actions/setup-python@v6
36
36
  with:
37
37
  python-version: "3.10"
38
38
 
@@ -16,9 +16,9 @@ jobs:
16
16
  python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
17
17
 
18
18
  steps:
19
- - uses: actions/checkout@v4
19
+ - uses: actions/checkout@v5
20
20
  - name: Set up Python ${{ matrix.python-version }}
21
- uses: actions/setup-python@v5
21
+ uses: actions/setup-python@v6
22
22
  with:
23
23
  python-version: ${{ matrix.python-version }}
24
24
 
@@ -22,9 +22,9 @@ jobs:
22
22
  runs-on: ubuntu-latest
23
23
 
24
24
  steps:
25
- - uses: actions/checkout@v4
25
+ - uses: actions/checkout@v5
26
26
  - name: Set up Python
27
- uses: actions/setup-python@v5
27
+ uses: actions/setup-python@v6
28
28
  with:
29
29
  python-version: '3.x'
30
30
  - name: Install dependencies
@@ -34,7 +34,7 @@ jobs:
34
34
  - name: Build package
35
35
  run: python -m build
36
36
  - name: Publish package
37
- uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
37
+ uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e
38
38
  with:
39
39
  user: __token__
40
40
  password: ${{ secrets.PYPI_API_TOKEN }}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.2.4
3
+ Version: 0.2.11
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -2,15 +2,15 @@ import os
2
2
  import sys
3
3
  import tarfile
4
4
  import collections
5
- from functools import lru_cache
5
+ from functools import lru_cache, partial
6
6
  from itertools import islice
7
7
  import numpy as np
8
8
 
9
9
  from .setup_logger import setup_logger
10
- from .utils import create_directory, all_logging_disabled, stdout_disabled, tqdm
10
+ from .utils import create_directory, all_logging_disabled, stdout_disabled, timer, tqdm
11
11
 
12
12
 
13
- def load_spectroscopy(user=None):
13
+ def load_spectroscopy(user=None, verbose=True):
14
14
  logger = setup_logger()
15
15
  with all_logging_disabled():
16
16
  from dace_query.spectroscopy import SpectroscopyClass, Spectroscopy as default_Spectroscopy
@@ -19,7 +19,8 @@ def load_spectroscopy(user=None):
19
19
  from .config import config
20
20
  # requesting as public
21
21
  if config.request_as_public:
22
- logger.warning('requesting DACE data as public')
22
+ if verbose:
23
+ logger.warning('requesting DACE data as public')
23
24
  with all_logging_disabled():
24
25
  dace = DaceClass(dace_rc_config_path='none')
25
26
  return SpectroscopyClass(dace_instance=dace)
@@ -86,11 +87,16 @@ def get_arrays(result, latest_pipeline=True, ESPRESSO_mode='HR11', NIRPS_mode='H
86
87
  i = [i for i, pipe in enumerate(pipelines) if ESPRESSO_mode in pipe][0]
87
88
  pipelines = [pipelines[i]]
88
89
 
90
+ # select NIRPS mode
91
+ if 'NIRPS' in inst:
92
+ if any(this_mode := [p for p in pipelines if NIRPS_mode in p]):
93
+ pipelines = this_mode
94
+
89
95
  if latest_pipeline:
90
96
  npipe = len(pipelines)
91
97
  if 'NIRPS' in inst and any(['LBL' in p for p in pipelines]):
92
98
  # TODO: correctly load both CCF and LBL
93
- pipelines = [pipelines[1]]
99
+ pipelines = [pipelines[0]]
94
100
  if 'HARPS' in inst and npipe > 1 and pipelines[1] == pipelines[0] + '-EGGS':
95
101
  pipelines = pipelines[:2]
96
102
  else:
@@ -163,20 +169,20 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
163
169
  dict:
164
170
  dictionary with data from DACE
165
171
  """
166
- Spectroscopy = load_spectroscopy(user)
167
-
172
+ Spectroscopy = load_spectroscopy(user, verbose)
168
173
  found_dace_id = False
169
- try:
170
- dace_id = get_dace_id(star, verbose=verbose, raise_error=True)
171
- found_dace_id = True
172
- except ValueError as e:
173
- if main_id is not None:
174
- try:
175
- dace_id = get_dace_id(main_id, verbose=verbose, raise_error=True)
176
- found_dace_id = True
177
- except ValueError:
178
- pass
179
-
174
+ with timer('dace_id query'):
175
+ try:
176
+ dace_id = get_dace_id(star, verbose=verbose, raise_error=True)
177
+ found_dace_id = True
178
+ except ValueError as e:
179
+ if main_id is not None:
180
+ try:
181
+ dace_id = get_dace_id(main_id, verbose=verbose, raise_error=True)
182
+ found_dace_id = True
183
+ except ValueError:
184
+ pass
185
+
180
186
  if not found_dace_id:
181
187
  try:
182
188
  with all_logging_disabled():
@@ -188,10 +194,16 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
188
194
  msg = f'no {instrument} observations for {star}'
189
195
  raise ValueError(msg) from None
190
196
 
191
- filters = {
192
- "ins_name": {"contains": [instrument]},
193
- "obj_id_daceid": {"contains": [dace_id]}
194
- }
197
+ if (isinstance(instrument, str)):
198
+ filters = {
199
+ "ins_name": {"contains": [instrument]},
200
+ "obj_id_daceid": {"contains": [dace_id]}
201
+ }
202
+ elif (isinstance(instrument, (list, tuple, np.ndarray))):
203
+ filters = {
204
+ "ins_name": {"contains": instrument},
205
+ "obj_id_daceid": {"contains": [dace_id]}
206
+ }
195
207
  with all_logging_disabled():
196
208
  result = Spectroscopy.query_database(filters=filters)
197
209
 
@@ -202,17 +214,22 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
202
214
 
203
215
  for inst in np.unique(result['ins_name']):
204
216
  mask1 = result['ins_name'] == inst
205
- r[inst] = {}
217
+ r[str(inst)] = {}
218
+
219
+ key2 = 'ins_drs_version'
220
+ n_key2 = len(np.unique(result[key2][mask1]))
221
+ if len(np.unique(result['pub_bibcode'][mask1])) >= n_key2:
222
+ key2 = 'pub_bibcode'
206
223
 
207
- for pipe in np.unique(result['ins_drs_version'][mask1]):
208
- mask2 = mask1 & (result['ins_drs_version'] == pipe)
209
- r[inst][pipe] = {}
224
+ for pipe in np.unique(result[key2][mask1]):
225
+ mask2 = mask1 & (result[key2] == pipe)
226
+ r[str(inst)][str(pipe)] = {}
210
227
 
211
228
  for ins_mode in np.unique(result['ins_mode'][mask2]):
212
229
  mask3 = mask2 & (result['ins_mode'] == ins_mode)
213
230
  _nan = np.full(mask3.sum(), np.nan)
214
231
 
215
- r[inst][pipe][ins_mode] = {
232
+ r[str(inst)][str(pipe)][str(ins_mode)] = {
216
233
  'texp': result['texp'][mask3],
217
234
  'bispan': result['spectro_ccf_bispan'][mask3],
218
235
  'bispan_err': result['spectro_ccf_bispan_err'][mask3],
@@ -224,7 +241,9 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
224
241
  'rv': result['spectro_ccf_rv'][mask3],
225
242
  'rv_err': result['spectro_ccf_rv_err'][mask3],
226
243
  'berv': result['spectro_cal_berv'][mask3],
227
- 'ccf_noise': _nan,
244
+ 'ccf_noise': np.sqrt(
245
+ np.square(result['spectro_ccf_rv_err'][mask3]) - np.square(result['spectro_cal_drift_noise'][mask3])
246
+ ),
228
247
  'rhk': result['spectro_analysis_rhk'][mask3],
229
248
  'rhk_err': result['spectro_analysis_rhk_err'][mask3],
230
249
  'contrast': result['spectro_ccf_contrast'][mask3],
@@ -268,7 +287,7 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
268
287
  def get_observations(star, instrument=None, user=None, main_id=None, verbose=True):
269
288
  logger = setup_logger()
270
289
  if instrument is None:
271
- Spectroscopy = load_spectroscopy(user)
290
+ Spectroscopy = load_spectroscopy(user, verbose)
272
291
 
273
292
  try:
274
293
  with stdout_disabled(), all_logging_disabled():
@@ -302,12 +321,14 @@ def get_observations(star, instrument=None, user=None, main_id=None, verbose=Tru
302
321
  result[inst] = dict(result[inst])
303
322
  #
304
323
 
305
- instruments = list(result.keys())
324
+ instruments = list(map(str, result.keys()))
306
325
 
307
326
  if instrument is not None:
308
327
  # select only the provided instrument (if it's there)
309
- instruments = [inst for inst in instruments if instrument in inst]
310
-
328
+ if (isinstance(instrument, str)):
329
+ instruments = [inst for inst in instruments if instrument in inst]
330
+ elif (isinstance(instrument, list)):
331
+ instruments = [inst for inst in instruments if any(i in inst for i in instrument)]
311
332
  if len(instruments) == 0:
312
333
  if instrument is None:
313
334
  msg = f'no observations for {star}'
@@ -331,30 +352,57 @@ def get_observations(star, instrument=None, user=None, main_id=None, verbose=Tru
331
352
  # else:
332
353
  # return -1
333
354
 
334
- # sort pipelines, must be extra careful with HARPS/HARPN pipeline version numbers
335
- # got here with the help of DeepSeek
336
- from re import match
337
- def custom_sort_key(s):
338
- s = s[0]
339
- # Check for version number pattern (e.g., 3.2.5 or 3.2.5-EGGS)
340
- version_match = match(r'^(\d+(?:\.\d+)*)(?:[-\s](.*))?$', s)
341
- if version_match:
342
- version_parts = list(map(int, version_match.group(1).split('.')))
343
- if len(version_parts) == 2:
344
- version_parts.insert(1, -1)
345
- return (0, 1, version_parts)
346
- # Check for scientific reference pattern (e.g., 2004A&A...)
347
- year_match = match(r'^(\d{4})', s)
348
- if year_match:
349
- year = int(year_match.group(1))
350
- return (1, year)
351
- # For all other strings, sort alphabetically
352
- return (2, s)
353
-
354
- # from functools import cmp_to_key
355
+ # # sort pipelines, must be extra careful with HARPS/HARPN pipeline version numbers
356
+ # # got here with the help of DeepSeek
357
+ # # from functools import cmp_to_key
358
+ # from re import match
359
+ # def custom_sort_key(s):
360
+ # s = s[0]
361
+ # # Check for version number pattern (e.g., 3.2.5 or 3.2.5-EGGS)
362
+ # version_match = match(r'^(\d+(?:\.\d+)*)(?:[-\s](.*))?$', s)
363
+ # if version_match:
364
+ # version_parts = list(map(int, version_match.group(1).split('.')))
365
+ # if len(version_parts) == 2:
366
+ # version_parts.insert(1, -1)
367
+ # # if version_match.group(2) and 'LBL' in version_match.group(2):
368
+ # # version_parts.append(-1)
369
+ # # else:
370
+ # # version_parts.append(0)
371
+ # if version_match.group(2) is None:
372
+ # version_parts.append('')
373
+ # else:
374
+ # version_parts.append(version_match.group(2))
375
+ # return (0, 1, version_parts)
376
+ # # Check for scientific reference pattern (e.g., 2004A&A...)
377
+ # year_match = match(r'^(\d{4})', s)
378
+ # if year_match:
379
+ # year = int(year_match.group(1))
380
+ # return (1, year)
381
+ # # For all other strings, sort alphabetically
382
+ # return (2, s)
383
+
384
+ def custom_key(val, strip_EGGS=False):
385
+ if strip_EGGS:
386
+ val = val.replace('-EGGS', '').replace(' EGGS', '')
387
+ key = 0
388
+ key -= 1 if '3.5' in val else 0
389
+ key -= 1 if 'EGGS' in val else 0
390
+ key -= 1 if ('UHR' in val or 'MR' in val) else 0
391
+ key -= 1 if 'LBL' in val else 0
392
+ return str(key) if key != 0 else val
393
+
355
394
  new_result = {}
356
395
  for inst in instruments:
357
- new_result[inst] = dict(sorted(result[inst].items(), key=custom_sort_key, reverse=True))
396
+ # new_result[inst] = dict(
397
+ # sorted(result[inst].items(), key=custom_sort_key, reverse=True)
398
+ # )
399
+ if all(['EGGS' in k for k in result[inst].keys()]):
400
+ custom_key = partial(custom_key, strip_EGGS=True)
401
+ # WARNING: not the same as reverse=True (not sure why)
402
+ sorted_keys = sorted(result[inst].keys(), key=custom_key)[::-1]
403
+ new_result[inst] = {}
404
+ for key in sorted_keys:
405
+ new_result[inst][key] = result[inst][key]
358
406
 
359
407
  if verbose:
360
408
  logger.info('RVs available from')
@@ -388,7 +436,7 @@ def check_existing(output_directory, files, type):
388
436
  ]
389
437
 
390
438
  if type == 'S2D':
391
- existing = [
439
+ existing += [
392
440
  f.partition('.fits')[0] for f in os.listdir(output_directory)
393
441
  if 'e2ds' in f
394
442
  ]
@@ -500,20 +548,21 @@ def do_download_filetype(type, raw_files, output_directory, clobber=False, user=
500
548
  """ Download CCFs / S1Ds / S2Ds from DACE """
501
549
  logger = setup_logger()
502
550
  raw_files = np.atleast_1d(raw_files)
551
+ raw_files_original = raw_files.copy()
503
552
 
504
553
  create_directory(output_directory)
505
554
 
506
555
  # check existing files to avoid re-downloading
507
556
  if not clobber:
508
557
  raw_files = check_existing(output_directory, raw_files, type)
509
-
558
+
510
559
  n = raw_files.size
511
560
 
512
561
  # any file left to download?
513
562
  if n == 0:
514
563
  if verbose:
515
564
  logger.info('no files to download')
516
- return
565
+ return list(map(os.path.basename, raw_files_original))
517
566
 
518
567
  # avoid an empty chunk
519
568
  if chunk_size > n:
@@ -531,7 +580,9 @@ def do_download_filetype(type, raw_files, output_directory, clobber=False, user=
531
580
 
532
581
  if n < parallel_limit:
533
582
  iterator = [raw_files[i:i + chunk_size] for i in range(0, n, chunk_size)]
534
- for files in tqdm(iterator, total=len(iterator)):
583
+ if len(iterator) > 1:
584
+ iterator = tqdm(iterator, total=len(iterator))
585
+ for files in iterator:
535
586
  download(files, type, output_directory, quiet=False, user=user)
536
587
  extract_fits(output_directory)
537
588
 
@@ -560,6 +611,7 @@ def do_download_filetype(type, raw_files, output_directory, clobber=False, user=
560
611
 
561
612
  sys.stdout.flush()
562
613
  logger.info('extracted .fits files')
614
+ return list(map(os.path.basename, raw_files_original))
563
615
 
564
616
 
565
617
  # def do_download_s1d(raw_files, output_directory, clobber=False, verbose=True):
@@ -3,8 +3,6 @@ from io import StringIO
3
3
  from csv import DictReader
4
4
  import requests
5
5
 
6
- from astropy.coordinates import SkyCoord
7
-
8
6
  DATA_PATH = os.path.dirname(__file__)
9
7
  DATA_PATH = os.path.join(DATA_PATH, 'data')
10
8
 
@@ -78,6 +76,8 @@ class gaia:
78
76
  Args:
79
77
  star (str): The name of the star to query simbad
80
78
  """
79
+ from astropy.coordinates import SkyCoord
80
+
81
81
  self.star = star
82
82
 
83
83
  if simbad is None:
@@ -5,7 +5,7 @@ from .setup_logger import setup_logger
5
5
  from .utils import ESPRESSO_ADC_issues, ESPRESSO_cryostat_issues
6
6
 
7
7
 
8
- # HARPS started operations in October 1st, 2003
8
+ # HARPS started operations on October 1st, 2003
9
9
  # https://www.eso.org/sci/facilities/lasilla/instruments/harps/news.html
10
10
  HARPS_start = 52913
11
11
 
@@ -21,6 +21,11 @@ HARPS_technical_intervention = 57170
21
21
  # when the instrument was handed back to Science Operations.
22
22
  HARPS_technical_intervention_range = (57161, 57176)
23
23
 
24
+
25
+ # ESPRESSO started operations on October 1st, 2018
26
+ # see Pepe et al. (2021, A&A 645, A96)
27
+ ESPRESSO_start = 58392
28
+
24
29
  # ESPRESSO fiber link upgrade (1 July 2019)
25
30
  ESPRESSO_technical_intervention = 58665
26
31
 
@@ -102,18 +107,11 @@ def divide_HARPS(self):
102
107
 
103
108
 
104
109
  def check(self, instrument):
105
- logger = setup_logger()
106
- instruments = self._check_instrument(instrument)
107
- if instruments is None:
108
- if self.verbose:
109
- logger.error(f"HARPS_fiber_commissioning: no data from {instrument}")
110
- return None
111
- return instruments
112
-
110
+ return self._check_instrument(instrument)
113
111
 
114
112
  # HARPS commissioning
115
113
  def HARPS_commissioning(self, mask=True, plot=True):
116
- """ Identify and optionally mask points during HARPS commissioning (HARPS).
114
+ """ Identify and optionally mask points during HARPS commissioning.
117
115
 
118
116
  Args:
119
117
  mask (bool, optional):
@@ -125,17 +123,20 @@ def HARPS_commissioning(self, mask=True, plot=True):
125
123
  if check(self, 'HARPS') is None:
126
124
  return
127
125
 
128
- affected = self.time < HARPS_start
126
+ affected = np.logical_and(
127
+ self.instrument_array == 'HARPS03',
128
+ self.time < HARPS_start
129
+ )
129
130
  total_affected = affected.sum()
130
131
 
131
132
  if self.verbose:
132
- n = total_affected
133
- logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
134
- "during HARPS commissioning")
133
+ n, i = total_affected, int(total_affected != 1)
134
+ logger.info(f"there {['is', 'are'][i]} {n} frame{['', 's'][i]} "
135
+ "during HARPS commissioning")
135
136
 
136
137
  if mask:
137
138
  self.mask[affected] = False
138
- self._propagate_mask_changes()
139
+ self._propagate_mask_changes(_remove_instrument=False)
139
140
 
140
141
  if plot:
141
142
  self.plot(show_masked=True)
@@ -157,17 +158,24 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
157
158
  if check(self, 'HARPS') is None:
158
159
  return
159
160
 
160
- affected = (self.time >= HARPS_technical_intervention_range[0]) & (self.time <= HARPS_technical_intervention_range[1])
161
+ affected = np.logical_and(
162
+ self.time >= HARPS_technical_intervention_range[0],
163
+ self.time <= HARPS_technical_intervention_range[1]
164
+ )
165
+ affected = np.logical_and(
166
+ affected,
167
+ np.char.find(self.instrument_array, 'HARPS') == 0
168
+ )
161
169
  total_affected = affected.sum()
162
170
 
163
171
  if self.verbose:
164
- n = total_affected
165
- logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
166
- "during the HARPS fiber commissioning period")
172
+ n, i = total_affected, int(total_affected != 1)
173
+ logger.info(f"there {['is', 'are'][i]} {n} frame{['', 's'][i]} "
174
+ "during the HARPS fiber commissioning period")
167
175
 
168
176
  if mask:
169
177
  self.mask[affected] = False
170
- self._propagate_mask_changes()
178
+ self._propagate_mask_changes(_remove_instrument=False)
171
179
 
172
180
  if plot:
173
181
  self.plot(show_masked=True)
@@ -175,6 +183,41 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
175
183
  return affected
176
184
 
177
185
 
186
+ # ESPRESSO commissioning
187
+ def ESPRESSO_commissioning(self, mask=True, plot=True):
188
+ """ Identify and optionally mask points during ESPRESSO commissioning.
189
+
190
+ Args:
191
+ mask (bool, optional):
192
+ Whether to mask out the points.
193
+ plot (bool, optional):
194
+ Whether to plot the masked points.
195
+ """
196
+ logger = setup_logger()
197
+ if check(self, 'ESPRESSO') is None:
198
+ return
199
+
200
+ affected = np.logical_and(
201
+ self.instrument_array == 'ESPRESSO18',
202
+ self.time < ESPRESSO_start
203
+ )
204
+ total_affected = affected.sum()
205
+
206
+ if self.verbose:
207
+ n, i = total_affected, int(total_affected != 1)
208
+ logger.info(f"there {['is', 'are'][i]} {n} frame{['', 's'][i]} "
209
+ "during ESPRESSO commissioning")
210
+
211
+ if mask:
212
+ self.mask[affected] = False
213
+ self._propagate_mask_changes(_remove_instrument=False)
214
+
215
+ if plot and total_affected > 0:
216
+ self.plot(show_masked=True)
217
+
218
+ return affected
219
+
220
+
178
221
  # ESPRESSO ADC issues
179
222
  def ADC_issues(self, mask=True, plot=True, check_headers=False):
180
223
  """ Identify and optionally mask points affected by ADC issues (ESPRESSO).
@@ -210,13 +253,13 @@ def ADC_issues(self, mask=True, plot=True, check_headers=False):
210
253
  total_affected = intersect.sum()
211
254
 
212
255
  if self.verbose:
213
- n = total_affected
214
- logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
215
- "affected by ADC issues")
256
+ n, i = total_affected, int(total_affected != 1)
257
+ logger.info(f"there {['is', 'are'][i]} {n} frame{['', 's'][i]} "
258
+ "affected by ADC issues")
216
259
 
217
260
  if mask:
218
261
  self.mask[intersect] = False
219
- self._propagate_mask_changes()
262
+ self._propagate_mask_changes(_remove_instrument=False)
220
263
 
221
264
  if plot:
222
265
  self.plot(show_masked=True)
@@ -246,13 +289,13 @@ def blue_cryostat_issues(self, mask=True, plot=True):
246
289
  total_affected = intersect.sum()
247
290
 
248
291
  if self.verbose:
249
- n = total_affected
250
- logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
251
- "affected by blue cryostat issues")
292
+ n, i = total_affected, int(total_affected != 1)
293
+ logger.info(f"there {['is', 'are'][i]} {n} frame{['', 's'][i]} "
294
+ "affected by blue cryostat issues")
252
295
 
253
296
  if mask:
254
297
  self.mask[intersect] = False
255
- self._propagate_mask_changes()
298
+ self._propagate_mask_changes(_remove_instrument=False)
256
299
 
257
300
  if plot:
258
301
  self.plot(show_masked=True)
@@ -292,14 +335,15 @@ def qc_scired_issues(self, plot=False, **kwargs):
292
335
  n = affected.sum()
293
336
 
294
337
  if self.verbose:
295
- logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
296
- "where QC SCIRED CHECK is 0")
338
+ i = int(n != 1)
339
+ logger.info(f"there {['is', 'are'][i]} {n} frame{['', 's'][i]} "
340
+ "where QC SCIRED CHECK is 0")
297
341
 
298
342
  if n == 0:
299
343
  return
300
344
 
301
345
  self.mask[affected] = False
302
- self._propagate_mask_changes()
346
+ self._propagate_mask_changes(_remove_instrument=False)
303
347
 
304
348
  if plot:
305
349
  self.plot(show_masked=True)
@@ -316,31 +360,26 @@ class ISSUES:
316
360
  plot (bool, optional): Whether to plot the masked points.
317
361
  """
318
362
  logger = setup_logger()
319
- try:
320
- adc = ADC_issues(self, mask, plot, **kwargs)
321
- except IndexError:
322
- logger.error('are the data binned? cannot proceed to mask these points...')
323
-
324
- try:
325
- cryostat = blue_cryostat_issues(self, mask, plot)
326
- except IndexError:
327
- logger.error('are the data binned? cannot proceed to mask these points...')
328
363
 
329
- try:
330
- harps_comm = HARPS_commissioning(self, mask, plot)
331
- except IndexError:
332
- logger.error('are the data binned? cannot proceed to mask these points...')
333
-
334
- try:
335
- harps_fibers = HARPS_fiber_commissioning(self, mask, plot)
336
- except IndexError:
337
- logger.error('are the data binned? cannot proceed to mask these points...')
338
-
339
- # if None in (adc, cryostat, harps_comm, harps_fibers):
340
- # return
364
+ functions = (
365
+ ESPRESSO_commissioning,
366
+ ADC_issues,
367
+ blue_cryostat_issues,
368
+ HARPS_commissioning,
369
+ HARPS_fiber_commissioning
370
+ )
371
+ results = []
372
+
373
+ for fun in functions:
374
+ try:
375
+ results.append(fun(self, mask, plot, **kwargs))
376
+ except IndexError:
377
+ logger.error('are the data binned? cannot proceed to mask these points...')
378
+
379
+ results = list(filter(lambda x: x is not None, results))
380
+ self._propagate_mask_changes()
341
381
 
342
382
  try:
343
- # return adc | cryostat
344
- return np.logical_or.reduce((adc, cryostat, harps_comm, harps_fibers))
383
+ return np.logical_or.reduce(results)
345
384
  except UnboundLocalError:
346
385
  return