arvi 0.2.3__tar.gz → 0.2.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arvi might be problematic. Click here for more details.
- {arvi-0.2.3/arvi.egg-info → arvi-0.2.5}/PKG-INFO +2 -2
- {arvi-0.2.3 → arvi-0.2.5}/README.md +1 -1
- {arvi-0.2.3 → arvi-0.2.5}/arvi/config.py +3 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/dace_wrapper.py +86 -48
- {arvi-0.2.3 → arvi-0.2.5}/arvi/instrument_specific.py +56 -25
- {arvi-0.2.3 → arvi-0.2.5}/arvi/plots.py +88 -5
- {arvi-0.2.3 → arvi-0.2.5}/arvi/setup_logger.py +1 -1
- {arvi-0.2.3 → arvi-0.2.5}/arvi/timeseries.py +1 -1
- {arvi-0.2.3 → arvi-0.2.5/arvi.egg-info}/PKG-INFO +2 -2
- {arvi-0.2.3 → arvi-0.2.5}/tests/test_create_RV.py +7 -0
- {arvi-0.2.3 → arvi-0.2.5}/.github/dependabot.yml +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/.github/workflows/docs-gh-pages.yml +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/.github/workflows/install.yml +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/.github/workflows/python-publish.yml +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/.gitignore +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/LICENSE +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/HZ.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/__init__.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/ariadne_wrapper.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/berv.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/binning.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/data/extra/HD86226_PFS1.rdb +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/data/extra/HD86226_PFS2.rdb +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/data/extra/metadata.json +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/data/info.svg +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/data/obs_affected_ADC_issues.dat +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/data/obs_affected_blue_cryostat_issues.dat +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/exofop_wrapper.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/extra_data.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/gaia_wrapper.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/headers.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/kima_wrapper.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/lbl_wrapper.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/nasaexo_wrapper.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/programs.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/reports.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/simbad_wrapper.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/spectra.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/stats.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/stellar.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/translations.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi/utils.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi.egg-info/SOURCES.txt +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi.egg-info/dependency_links.txt +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi.egg-info/requires.txt +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/arvi.egg-info/top_level.txt +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/docs/API.md +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/docs/detailed.ipynb +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/docs/downloading_data.md +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/docs/index.md +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/docs/logo/detective.png +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/docs/logo/logo.png +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/docs/stylesheets/extra.css +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/mkdocs.yml +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/pyproject.toml +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/setup.cfg +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/setup.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/tests/HD10700-Bcor_ESPRESSO18.rdb +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/tests/test_binning.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/tests/test_config.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/tests/test_import_object.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/tests/test_simbad.py +0 -0
- {arvi-0.2.3 → arvi-0.2.5}/tests/test_stats.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: arvi
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.5
|
|
4
4
|
Summary: The Automated RV Inspector
|
|
5
5
|
Author-email: João Faria <joao.faria@unige.ch>
|
|
6
6
|
License: MIT
|
|
@@ -23,7 +23,7 @@ Requires-Dist: kepmodel
|
|
|
23
23
|
Dynamic: license-file
|
|
24
24
|
|
|
25
25
|
<p align="center">
|
|
26
|
-
<img width = "140" src="https://
|
|
26
|
+
<img width = "140" src="https://raw.githubusercontent.com/j-faria/arvi/refs/heads/main/docs/logo/logo.png"/>
|
|
27
27
|
</p>
|
|
28
28
|
|
|
29
29
|
This package sits alongside [DACE](https://dace.unige.ch/) to help with the
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
<p align="center">
|
|
2
|
-
<img width = "140" src="https://
|
|
2
|
+
<img width = "140" src="https://raw.githubusercontent.com/j-faria/arvi/refs/heads/main/docs/logo/logo.png"/>
|
|
3
3
|
</p>
|
|
4
4
|
|
|
5
5
|
This package sits alongside [DACE](https://dace.unige.ch/) to help with the
|
|
@@ -8,6 +8,7 @@ def get_config_path():
|
|
|
8
8
|
|
|
9
9
|
def get_config():
|
|
10
10
|
config = configparser.ConfigParser()
|
|
11
|
+
config.add_section('config')
|
|
11
12
|
if (path := get_config_path()).exists():
|
|
12
13
|
config.read(path)
|
|
13
14
|
return config
|
|
@@ -33,6 +34,8 @@ class config:
|
|
|
33
34
|
'request_as_public': False,
|
|
34
35
|
# enable from arvi import star_name
|
|
35
36
|
'fancy_import': True,
|
|
37
|
+
# use the 'dark_background' matplotlib theme
|
|
38
|
+
'dark_plots': False,
|
|
36
39
|
# debug
|
|
37
40
|
'debug': False,
|
|
38
41
|
}
|
|
@@ -7,7 +7,7 @@ from itertools import islice
|
|
|
7
7
|
import numpy as np
|
|
8
8
|
|
|
9
9
|
from .setup_logger import setup_logger
|
|
10
|
-
from .utils import create_directory, all_logging_disabled, stdout_disabled, tqdm
|
|
10
|
+
from .utils import create_directory, all_logging_disabled, stdout_disabled, timer, tqdm
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
def load_spectroscopy(user=None):
|
|
@@ -47,8 +47,9 @@ def load_spectroscopy(user=None):
|
|
|
47
47
|
logger.warning('requesting DACE data as public (no .dacerc file found)')
|
|
48
48
|
return default_Spectroscopy
|
|
49
49
|
|
|
50
|
-
|
|
51
|
-
|
|
50
|
+
|
|
51
|
+
@lru_cache(maxsize=1024)
|
|
52
|
+
def get_dace_id(star, verbose=True, raise_error=False):
|
|
52
53
|
logger = setup_logger()
|
|
53
54
|
filters = {"obj_id_catname": {"equal": [star]}}
|
|
54
55
|
try:
|
|
@@ -58,8 +59,11 @@ def get_dace_id(star, verbose=True):
|
|
|
58
59
|
except KeyError:
|
|
59
60
|
if verbose:
|
|
60
61
|
logger.error(f"Could not find DACE ID for {star}")
|
|
62
|
+
if not raise_error:
|
|
63
|
+
return None
|
|
61
64
|
raise ValueError from None
|
|
62
65
|
|
|
66
|
+
|
|
63
67
|
def get_arrays(result, latest_pipeline=True, ESPRESSO_mode='HR11', NIRPS_mode='HE', verbose=True):
|
|
64
68
|
logger = setup_logger()
|
|
65
69
|
arrays = []
|
|
@@ -70,7 +74,6 @@ def get_arrays(result, latest_pipeline=True, ESPRESSO_mode='HR11', NIRPS_mode='H
|
|
|
70
74
|
|
|
71
75
|
# select ESPRESSO mode, which is defined at the level of the pipeline
|
|
72
76
|
if 'ESPRESSO' in inst:
|
|
73
|
-
|
|
74
77
|
find_mode = [ESPRESSO_mode in pipe for pipe in pipelines]
|
|
75
78
|
# the mode was not found
|
|
76
79
|
if not any(find_mode):
|
|
@@ -83,6 +86,11 @@ def get_arrays(result, latest_pipeline=True, ESPRESSO_mode='HR11', NIRPS_mode='H
|
|
|
83
86
|
i = [i for i, pipe in enumerate(pipelines) if ESPRESSO_mode in pipe][0]
|
|
84
87
|
pipelines = [pipelines[i]]
|
|
85
88
|
|
|
89
|
+
# select NIRPS mode
|
|
90
|
+
if 'NIRPS' in inst:
|
|
91
|
+
if any(this_mode := [p for p in pipelines if NIRPS_mode in p]):
|
|
92
|
+
pipelines = this_mode
|
|
93
|
+
|
|
86
94
|
if latest_pipeline:
|
|
87
95
|
npipe = len(pipelines)
|
|
88
96
|
if 'NIRPS' in inst and any(['LBL' in p for p in pipelines]):
|
|
@@ -161,19 +169,19 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
|
|
|
161
169
|
dictionary with data from DACE
|
|
162
170
|
"""
|
|
163
171
|
Spectroscopy = load_spectroscopy(user)
|
|
164
|
-
|
|
165
172
|
found_dace_id = False
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
173
|
+
with timer('simbad query'):
|
|
174
|
+
try:
|
|
175
|
+
dace_id = get_dace_id(star, verbose=verbose, raise_error=True)
|
|
176
|
+
found_dace_id = True
|
|
177
|
+
except ValueError as e:
|
|
178
|
+
if main_id is not None:
|
|
179
|
+
try:
|
|
180
|
+
dace_id = get_dace_id(main_id, verbose=verbose, raise_error=True)
|
|
181
|
+
found_dace_id = True
|
|
182
|
+
except ValueError:
|
|
183
|
+
pass
|
|
184
|
+
|
|
177
185
|
if not found_dace_id:
|
|
178
186
|
try:
|
|
179
187
|
with all_logging_disabled():
|
|
@@ -184,11 +192,16 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
|
|
|
184
192
|
except TypeError:
|
|
185
193
|
msg = f'no {instrument} observations for {star}'
|
|
186
194
|
raise ValueError(msg) from None
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
195
|
+
if (isinstance(instrument, str)):
|
|
196
|
+
filters = {
|
|
197
|
+
"ins_name": {"contains": [instrument]},
|
|
198
|
+
"obj_id_daceid": {"contains": [dace_id]}
|
|
199
|
+
}
|
|
200
|
+
elif (isinstance(instrument, list)):
|
|
201
|
+
filters = {
|
|
202
|
+
"ins_name": {"contains": instrument},
|
|
203
|
+
"obj_id_daceid": {"contains": [dace_id]}
|
|
204
|
+
}
|
|
192
205
|
with all_logging_disabled():
|
|
193
206
|
result = Spectroscopy.query_database(filters=filters)
|
|
194
207
|
|
|
@@ -299,12 +312,14 @@ def get_observations(star, instrument=None, user=None, main_id=None, verbose=Tru
|
|
|
299
312
|
result[inst] = dict(result[inst])
|
|
300
313
|
#
|
|
301
314
|
|
|
302
|
-
instruments = list(result.keys())
|
|
315
|
+
instruments = list(map(str, result.keys()))
|
|
303
316
|
|
|
304
317
|
if instrument is not None:
|
|
305
318
|
# select only the provided instrument (if it's there)
|
|
306
|
-
|
|
307
|
-
|
|
319
|
+
if (isinstance(instrument, str)):
|
|
320
|
+
instruments = [inst for inst in instruments if instrument in inst]
|
|
321
|
+
elif (isinstance(instrument, list)):
|
|
322
|
+
instruments = [inst for inst in instruments if any(i in inst for i in instrument)]
|
|
308
323
|
if len(instruments) == 0:
|
|
309
324
|
if instrument is None:
|
|
310
325
|
msg = f'no observations for {star}'
|
|
@@ -328,30 +343,53 @@ def get_observations(star, instrument=None, user=None, main_id=None, verbose=Tru
|
|
|
328
343
|
# else:
|
|
329
344
|
# return -1
|
|
330
345
|
|
|
331
|
-
# sort pipelines, must be extra careful with HARPS/HARPN pipeline version numbers
|
|
332
|
-
# got here with the help of DeepSeek
|
|
333
|
-
from
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
#
|
|
346
|
+
# # sort pipelines, must be extra careful with HARPS/HARPN pipeline version numbers
|
|
347
|
+
# # got here with the help of DeepSeek
|
|
348
|
+
# # from functools import cmp_to_key
|
|
349
|
+
# from re import match
|
|
350
|
+
# def custom_sort_key(s):
|
|
351
|
+
# s = s[0]
|
|
352
|
+
# # Check for version number pattern (e.g., 3.2.5 or 3.2.5-EGGS)
|
|
353
|
+
# version_match = match(r'^(\d+(?:\.\d+)*)(?:[-\s](.*))?$', s)
|
|
354
|
+
# if version_match:
|
|
355
|
+
# version_parts = list(map(int, version_match.group(1).split('.')))
|
|
356
|
+
# if len(version_parts) == 2:
|
|
357
|
+
# version_parts.insert(1, -1)
|
|
358
|
+
# # if version_match.group(2) and 'LBL' in version_match.group(2):
|
|
359
|
+
# # version_parts.append(-1)
|
|
360
|
+
# # else:
|
|
361
|
+
# # version_parts.append(0)
|
|
362
|
+
# if version_match.group(2) is None:
|
|
363
|
+
# version_parts.append('')
|
|
364
|
+
# else:
|
|
365
|
+
# version_parts.append(version_match.group(2))
|
|
366
|
+
# return (0, 1, version_parts)
|
|
367
|
+
# # Check for scientific reference pattern (e.g., 2004A&A...)
|
|
368
|
+
# year_match = match(r'^(\d{4})', s)
|
|
369
|
+
# if year_match:
|
|
370
|
+
# year = int(year_match.group(1))
|
|
371
|
+
# return (1, year)
|
|
372
|
+
# # For all other strings, sort alphabetically
|
|
373
|
+
# return (2, s)
|
|
374
|
+
|
|
375
|
+
def custom_key(val):
|
|
376
|
+
key = 0
|
|
377
|
+
key -= 2 if val == '3.5' else 0
|
|
378
|
+
key -= 1 if 'EGGS' in val else 0
|
|
379
|
+
key -= 1 if ('UHR' in val or 'MR' in val) else 0
|
|
380
|
+
key -= 1 if 'LBL' in val else 0
|
|
381
|
+
return str(key) if key != 0 else val
|
|
382
|
+
|
|
352
383
|
new_result = {}
|
|
353
384
|
for inst in instruments:
|
|
354
|
-
new_result[inst] = dict(
|
|
385
|
+
# new_result[inst] = dict(
|
|
386
|
+
# sorted(result[inst].items(), key=custom_sort_key, reverse=True)
|
|
387
|
+
# )
|
|
388
|
+
# WARNING: not the same as reverse=True (not sure why)
|
|
389
|
+
sorted_keys = sorted(result[inst].keys(), key=custom_key)[::-1]
|
|
390
|
+
new_result[inst] = {}
|
|
391
|
+
for key in sorted_keys:
|
|
392
|
+
new_result[inst][key] = result[inst][key]
|
|
355
393
|
|
|
356
394
|
if verbose:
|
|
357
395
|
logger.info('RVs available from')
|
|
@@ -385,7 +423,7 @@ def check_existing(output_directory, files, type):
|
|
|
385
423
|
]
|
|
386
424
|
|
|
387
425
|
if type == 'S2D':
|
|
388
|
-
existing
|
|
426
|
+
existing += [
|
|
389
427
|
f.partition('.fits')[0] for f in os.listdir(output_directory)
|
|
390
428
|
if 'e2ds' in f
|
|
391
429
|
]
|
|
@@ -503,7 +541,7 @@ def do_download_filetype(type, raw_files, output_directory, clobber=False, user=
|
|
|
503
541
|
# check existing files to avoid re-downloading
|
|
504
542
|
if not clobber:
|
|
505
543
|
raw_files = check_existing(output_directory, raw_files, type)
|
|
506
|
-
|
|
544
|
+
|
|
507
545
|
n = raw_files.size
|
|
508
546
|
|
|
509
547
|
# any file left to download?
|
|
@@ -5,7 +5,7 @@ from .setup_logger import setup_logger
|
|
|
5
5
|
from .utils import ESPRESSO_ADC_issues, ESPRESSO_cryostat_issues
|
|
6
6
|
|
|
7
7
|
|
|
8
|
-
# HARPS started operations
|
|
8
|
+
# HARPS started operations on October 1st, 2003
|
|
9
9
|
# https://www.eso.org/sci/facilities/lasilla/instruments/harps/news.html
|
|
10
10
|
HARPS_start = 52913
|
|
11
11
|
|
|
@@ -21,6 +21,11 @@ HARPS_technical_intervention = 57170
|
|
|
21
21
|
# when the instrument was handed back to Science Operations.
|
|
22
22
|
HARPS_technical_intervention_range = (57161, 57176)
|
|
23
23
|
|
|
24
|
+
|
|
25
|
+
# ESPRESSO started operations on October 1st, 2018
|
|
26
|
+
# see Pepe et al. (2021, A&A 645, A96)
|
|
27
|
+
ESPRESSO_start = 58392
|
|
28
|
+
|
|
24
29
|
# ESPRESSO fiber link upgrade (1 July 2019)
|
|
25
30
|
ESPRESSO_technical_intervention = 58665
|
|
26
31
|
|
|
@@ -113,7 +118,7 @@ def check(self, instrument):
|
|
|
113
118
|
|
|
114
119
|
# HARPS commissioning
|
|
115
120
|
def HARPS_commissioning(self, mask=True, plot=True):
|
|
116
|
-
""" Identify and optionally mask points during HARPS commissioning
|
|
121
|
+
""" Identify and optionally mask points during HARPS commissioning.
|
|
117
122
|
|
|
118
123
|
Args:
|
|
119
124
|
mask (bool, optional):
|
|
@@ -175,6 +180,38 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
|
|
|
175
180
|
return affected
|
|
176
181
|
|
|
177
182
|
|
|
183
|
+
# ESPRESSO commissioning
|
|
184
|
+
def ESPRESSO_commissioning(self, mask=True, plot=True):
|
|
185
|
+
""" Identify and optionally mask points during ESPRESSO commissioning.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
mask (bool, optional):
|
|
189
|
+
Whether to mask out the points.
|
|
190
|
+
plot (bool, optional):
|
|
191
|
+
Whether to plot the masked points.
|
|
192
|
+
"""
|
|
193
|
+
logger = setup_logger()
|
|
194
|
+
if check(self, 'ESPRESSO') is None:
|
|
195
|
+
return
|
|
196
|
+
|
|
197
|
+
affected = self.time < ESPRESSO_start
|
|
198
|
+
total_affected = affected.sum()
|
|
199
|
+
|
|
200
|
+
if self.verbose:
|
|
201
|
+
n = total_affected
|
|
202
|
+
logger.info(f"there {'are'[:n^1]}{'is'[n^1:]} {n} frame{'s'[:n^1]} "
|
|
203
|
+
"during ESPRESSO commissioning")
|
|
204
|
+
|
|
205
|
+
if mask:
|
|
206
|
+
self.mask[affected] = False
|
|
207
|
+
self._propagate_mask_changes()
|
|
208
|
+
|
|
209
|
+
if plot:
|
|
210
|
+
self.plot(show_masked=True)
|
|
211
|
+
|
|
212
|
+
return affected
|
|
213
|
+
|
|
214
|
+
|
|
178
215
|
# ESPRESSO ADC issues
|
|
179
216
|
def ADC_issues(self, mask=True, plot=True, check_headers=False):
|
|
180
217
|
""" Identify and optionally mask points affected by ADC issues (ESPRESSO).
|
|
@@ -316,31 +353,25 @@ class ISSUES:
|
|
|
316
353
|
plot (bool, optional): Whether to plot the masked points.
|
|
317
354
|
"""
|
|
318
355
|
logger = setup_logger()
|
|
319
|
-
try:
|
|
320
|
-
adc = ADC_issues(self, mask, plot, **kwargs)
|
|
321
|
-
except IndexError:
|
|
322
|
-
logger.error('are the data binned? cannot proceed to mask these points...')
|
|
323
|
-
|
|
324
|
-
try:
|
|
325
|
-
cryostat = blue_cryostat_issues(self, mask, plot)
|
|
326
|
-
except IndexError:
|
|
327
|
-
logger.error('are the data binned? cannot proceed to mask these points...')
|
|
328
|
-
|
|
329
|
-
try:
|
|
330
|
-
harps_comm = HARPS_commissioning(self, mask, plot)
|
|
331
|
-
except IndexError:
|
|
332
|
-
logger.error('are the data binned? cannot proceed to mask these points...')
|
|
333
|
-
|
|
334
|
-
try:
|
|
335
|
-
harps_fibers = HARPS_fiber_commissioning(self, mask, plot)
|
|
336
|
-
except IndexError:
|
|
337
|
-
logger.error('are the data binned? cannot proceed to mask these points...')
|
|
338
356
|
|
|
339
|
-
|
|
340
|
-
|
|
357
|
+
functions = (
|
|
358
|
+
ESPRESSO_commissioning,
|
|
359
|
+
ADC_issues,
|
|
360
|
+
blue_cryostat_issues,
|
|
361
|
+
HARPS_commissioning,
|
|
362
|
+
HARPS_fiber_commissioning
|
|
363
|
+
)
|
|
364
|
+
results = []
|
|
365
|
+
|
|
366
|
+
for fun in functions:
|
|
367
|
+
try:
|
|
368
|
+
results.append(fun(self, mask, plot, **kwargs))
|
|
369
|
+
except IndexError:
|
|
370
|
+
logger.error('are the data binned? cannot proceed to mask these points...')
|
|
371
|
+
|
|
372
|
+
results = list(filter(lambda x: x is not None, results))
|
|
341
373
|
|
|
342
374
|
try:
|
|
343
|
-
|
|
344
|
-
return np.logical_or.reduce((adc, cryostat, harps_comm, harps_fibers))
|
|
375
|
+
return np.logical_or.reduce(results)
|
|
345
376
|
except UnboundLocalError:
|
|
346
377
|
return
|
|
@@ -13,10 +13,12 @@ from .utils import lazy_import
|
|
|
13
13
|
plt = lazy_import('matplotlib.pyplot')
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
def
|
|
16
|
+
def plot_settings(func):
|
|
17
17
|
@wraps(func)
|
|
18
18
|
def wrapper(*args, **kwargs):
|
|
19
|
-
with plt.style.context('fast'):
|
|
19
|
+
# with plt.style.context('fast'):
|
|
20
|
+
theme = 'dark_background' if config.dark_plots else 'fast'
|
|
21
|
+
with plt.style.context(theme):
|
|
20
22
|
return func(*args, **kwargs)
|
|
21
23
|
return wrapper
|
|
22
24
|
|
|
@@ -135,7 +137,7 @@ def clickable_legend(fig, ax, leg):
|
|
|
135
137
|
pass
|
|
136
138
|
return on_pick_legend
|
|
137
139
|
|
|
138
|
-
|
|
140
|
+
@plot_settings
|
|
139
141
|
def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
|
|
140
142
|
remove_50000=False, tooltips=True, show_title=False, show_legend=True, label=None,
|
|
141
143
|
jitter=None, N_in_label=False, versus_n=False, show_histogram=False, bw=False, **kwargs):
|
|
@@ -403,7 +405,7 @@ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
|
|
|
403
405
|
return fig, ax
|
|
404
406
|
|
|
405
407
|
|
|
406
|
-
@plot_fast
|
|
408
|
+
# @plot_fast
|
|
407
409
|
def plot_quantity(self, quantity, ax=None, show_masked=False, instrument=None,
|
|
408
410
|
time_offset=0, remove_50000=False, tooltips=False, show_legend=True,
|
|
409
411
|
N_in_label=False, **kwargs):
|
|
@@ -506,7 +508,88 @@ plot_rhk = partialmethod(plot_quantity, quantity='rhk')
|
|
|
506
508
|
plot_berv = partialmethod(plot_quantity, quantity='berv')
|
|
507
509
|
|
|
508
510
|
|
|
509
|
-
|
|
511
|
+
def plot_xy(self, x, y, ax=None, instrument=None, show_legend=True, **kwargs):
|
|
512
|
+
logger = setup_logger()
|
|
513
|
+
if self.N == 0:
|
|
514
|
+
if self.verbose:
|
|
515
|
+
logger.error('no data to plot')
|
|
516
|
+
return
|
|
517
|
+
|
|
518
|
+
if ax is None:
|
|
519
|
+
fig, ax = plt.subplots(1, 1, constrained_layout=True)
|
|
520
|
+
else:
|
|
521
|
+
fig = ax.figure
|
|
522
|
+
|
|
523
|
+
kwargs.setdefault('marker', 'o')
|
|
524
|
+
kwargs.setdefault('ls', '')
|
|
525
|
+
kwargs.setdefault('capsize', 0)
|
|
526
|
+
kwargs.setdefault('ms', 4)
|
|
527
|
+
|
|
528
|
+
instruments = self._check_instrument(instrument)
|
|
529
|
+
|
|
530
|
+
for inst in instruments:
|
|
531
|
+
s = self if self._child else getattr(self, inst)
|
|
532
|
+
label = inst
|
|
533
|
+
|
|
534
|
+
missing = False
|
|
535
|
+
try:
|
|
536
|
+
xdata = getattr(s, x).copy()
|
|
537
|
+
except AttributeError:
|
|
538
|
+
missing = True
|
|
539
|
+
try:
|
|
540
|
+
e_xdata = getattr(s, x + '_err').copy()
|
|
541
|
+
except AttributeError:
|
|
542
|
+
e_xdata = np.zeros_like(xdata)
|
|
543
|
+
|
|
544
|
+
try:
|
|
545
|
+
ydata = getattr(s, y).copy()
|
|
546
|
+
except AttributeError:
|
|
547
|
+
missing = True
|
|
548
|
+
try:
|
|
549
|
+
e_ydata = getattr(s, y + '_err').copy()
|
|
550
|
+
except AttributeError:
|
|
551
|
+
e_ydata = np.zeros_like(ydata)
|
|
552
|
+
|
|
553
|
+
if missing:
|
|
554
|
+
lines, *_ = ax.errorbar([], [], [],
|
|
555
|
+
label=label, picker=True, **kwargs)
|
|
556
|
+
continue
|
|
557
|
+
|
|
558
|
+
ax.errorbar(xdata[s.mask], ydata[s.mask], e_xdata[s.mask], e_ydata[s.mask],
|
|
559
|
+
label=label, **kwargs)
|
|
560
|
+
|
|
561
|
+
# if show_masked:
|
|
562
|
+
# ax.errorbar(self.time[~self.mask] - time_offset,
|
|
563
|
+
# getattr(self, quantity)[~self.mask],
|
|
564
|
+
# getattr(self, quantity + '_err')[~self.mask],
|
|
565
|
+
# label='masked', fmt='x', ms=10, color='k', zorder=-2)
|
|
566
|
+
|
|
567
|
+
if show_legend:
|
|
568
|
+
leg = ax.legend()
|
|
569
|
+
on_pick_legend = clickable_legend(fig, ax, leg)
|
|
570
|
+
plt.connect('pick_event', on_pick_legend)
|
|
571
|
+
|
|
572
|
+
ax.minorticks_on()
|
|
573
|
+
|
|
574
|
+
delta = 'Δ' if self._did_adjust_means else ''
|
|
575
|
+
|
|
576
|
+
# ylabel = {
|
|
577
|
+
# quantity.lower(): quantity,
|
|
578
|
+
# 'fwhm': f'{delta}FWHM [{self.units}]',
|
|
579
|
+
# 'bispan': f'{delta}BIS [{self.units}]',
|
|
580
|
+
# 'rhk': r"$\log$ R'$_{HK}$",
|
|
581
|
+
# 'berv': 'BERV [km/s]',
|
|
582
|
+
# }
|
|
583
|
+
|
|
584
|
+
# ax.set_ylabel(ylabel[quantity.lower()])
|
|
585
|
+
|
|
586
|
+
if config.return_self:
|
|
587
|
+
return self
|
|
588
|
+
else:
|
|
589
|
+
return fig, ax
|
|
590
|
+
|
|
591
|
+
|
|
592
|
+
# @plot_fast
|
|
510
593
|
def gls(self, ax=None, label=None, instrument=None,
|
|
511
594
|
fap=True, fap_method='baluev', adjust_means=config.adjust_means_gls,
|
|
512
595
|
picker=True, **kwargs):
|
|
@@ -55,7 +55,7 @@ class RV(ISSUES, REPORTS):
|
|
|
55
55
|
Information on the target from Simbad
|
|
56
56
|
"""
|
|
57
57
|
star: str
|
|
58
|
-
instrument: str = field(init=True, repr=False, default=None)
|
|
58
|
+
instrument: Union[str, list] = field(init=True, repr=False, default=None)
|
|
59
59
|
verbose: bool = field(init=True, repr=False, default=True)
|
|
60
60
|
do_maxerror: Union[bool, float] = field(init=True, repr=False, default=False)
|
|
61
61
|
do_secular_acceleration: bool = field(init=True, repr=False, default=True)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: arvi
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.5
|
|
4
4
|
Summary: The Automated RV Inspector
|
|
5
5
|
Author-email: João Faria <joao.faria@unige.ch>
|
|
6
6
|
License: MIT
|
|
@@ -23,7 +23,7 @@ Requires-Dist: kepmodel
|
|
|
23
23
|
Dynamic: license-file
|
|
24
24
|
|
|
25
25
|
<p align="center">
|
|
26
|
-
<img width = "140" src="https://
|
|
26
|
+
<img width = "140" src="https://raw.githubusercontent.com/j-faria/arvi/refs/heads/main/docs/logo/logo.png"/>
|
|
27
27
|
</p>
|
|
28
28
|
|
|
29
29
|
This package sits alongside [DACE](https://dace.unige.ch/) to help with the
|
|
@@ -19,3 +19,10 @@ def test_from_rdb(change_test_dir):
|
|
|
19
19
|
assert (s.bispan == 0).all()
|
|
20
20
|
assert isnan(s.rhk).all()
|
|
21
21
|
|
|
22
|
+
|
|
23
|
+
def test_list_instruments():
|
|
24
|
+
from arvi import RV, config
|
|
25
|
+
config.request_as_public = True
|
|
26
|
+
_ = RV('HD28185', instrument='CORALIE')
|
|
27
|
+
_ = RV('HD28185', instrument=['CORALIE'])
|
|
28
|
+
_ = RV('HD28185', instrument=['CORALIE', 'HRS'])
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|