arvi 0.1.26__tar.gz → 0.1.27__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arvi might be problematic. Click here for more details.

Files changed (59) hide show
  1. arvi-0.1.27/.github/dependabot.yml +6 -0
  2. {arvi-0.1.26 → arvi-0.1.27}/.github/workflows/docs-gh-pages.yml +5 -5
  3. {arvi-0.1.26 → arvi-0.1.27}/.github/workflows/install.yml +0 -3
  4. {arvi-0.1.26 → arvi-0.1.27}/PKG-INFO +2 -2
  5. {arvi-0.1.26 → arvi-0.1.27}/arvi/dace_wrapper.py +39 -13
  6. {arvi-0.1.26 → arvi-0.1.27}/arvi/gaia_wrapper.py +14 -2
  7. {arvi-0.1.26 → arvi-0.1.27}/arvi/simbad_wrapper.py +83 -4
  8. {arvi-0.1.26 → arvi-0.1.27}/arvi/timeseries.py +12 -8
  9. {arvi-0.1.26 → arvi-0.1.27}/arvi.egg-info/PKG-INFO +2 -2
  10. {arvi-0.1.26 → arvi-0.1.27}/arvi.egg-info/SOURCES.txt +1 -0
  11. {arvi-0.1.26 → arvi-0.1.27}/.github/workflows/python-publish.yml +0 -0
  12. {arvi-0.1.26 → arvi-0.1.27}/.gitignore +0 -0
  13. {arvi-0.1.26 → arvi-0.1.27}/LICENSE +0 -0
  14. {arvi-0.1.26 → arvi-0.1.27}/README.md +0 -0
  15. {arvi-0.1.26 → arvi-0.1.27}/arvi/HZ.py +0 -0
  16. {arvi-0.1.26 → arvi-0.1.27}/arvi/__init__.py +0 -0
  17. {arvi-0.1.26 → arvi-0.1.27}/arvi/ariadne_wrapper.py +0 -0
  18. {arvi-0.1.26 → arvi-0.1.27}/arvi/berv.py +0 -0
  19. {arvi-0.1.26 → arvi-0.1.27}/arvi/binning.py +0 -0
  20. {arvi-0.1.26 → arvi-0.1.27}/arvi/config.py +0 -0
  21. {arvi-0.1.26 → arvi-0.1.27}/arvi/data/extra/HD86226_PFS1.rdb +0 -0
  22. {arvi-0.1.26 → arvi-0.1.27}/arvi/data/extra/HD86226_PFS2.rdb +0 -0
  23. {arvi-0.1.26 → arvi-0.1.27}/arvi/data/extra/metadata.json +0 -0
  24. {arvi-0.1.26 → arvi-0.1.27}/arvi/data/info.svg +0 -0
  25. {arvi-0.1.26 → arvi-0.1.27}/arvi/data/obs_affected_ADC_issues.dat +0 -0
  26. {arvi-0.1.26 → arvi-0.1.27}/arvi/data/obs_affected_blue_cryostat_issues.dat +0 -0
  27. {arvi-0.1.26 → arvi-0.1.27}/arvi/extra_data.py +0 -0
  28. {arvi-0.1.26 → arvi-0.1.27}/arvi/headers.py +0 -0
  29. {arvi-0.1.26 → arvi-0.1.27}/arvi/instrument_specific.py +0 -0
  30. {arvi-0.1.26 → arvi-0.1.27}/arvi/kima_wrapper.py +0 -0
  31. {arvi-0.1.26 → arvi-0.1.27}/arvi/lbl_wrapper.py +0 -0
  32. {arvi-0.1.26 → arvi-0.1.27}/arvi/nasaexo_wrapper.py +0 -0
  33. {arvi-0.1.26 → arvi-0.1.27}/arvi/plots.py +0 -0
  34. {arvi-0.1.26 → arvi-0.1.27}/arvi/programs.py +0 -0
  35. {arvi-0.1.26 → arvi-0.1.27}/arvi/reports.py +0 -0
  36. {arvi-0.1.26 → arvi-0.1.27}/arvi/setup_logger.py +0 -0
  37. {arvi-0.1.26 → arvi-0.1.27}/arvi/spectra.py +0 -0
  38. {arvi-0.1.26 → arvi-0.1.27}/arvi/stats.py +0 -0
  39. {arvi-0.1.26 → arvi-0.1.27}/arvi/stellar.py +0 -0
  40. {arvi-0.1.26 → arvi-0.1.27}/arvi/translations.py +0 -0
  41. {arvi-0.1.26 → arvi-0.1.27}/arvi/utils.py +0 -0
  42. {arvi-0.1.26 → arvi-0.1.27}/arvi.egg-info/dependency_links.txt +0 -0
  43. {arvi-0.1.26 → arvi-0.1.27}/arvi.egg-info/requires.txt +0 -0
  44. {arvi-0.1.26 → arvi-0.1.27}/arvi.egg-info/top_level.txt +0 -0
  45. {arvi-0.1.26 → arvi-0.1.27}/docs/API.md +0 -0
  46. {arvi-0.1.26 → arvi-0.1.27}/docs/detailed.md +0 -0
  47. {arvi-0.1.26 → arvi-0.1.27}/docs/index.md +0 -0
  48. {arvi-0.1.26 → arvi-0.1.27}/docs/logo/detective.png +0 -0
  49. {arvi-0.1.26 → arvi-0.1.27}/docs/logo/logo.png +0 -0
  50. {arvi-0.1.26 → arvi-0.1.27}/mkdocs.yml +0 -0
  51. {arvi-0.1.26 → arvi-0.1.27}/pyproject.toml +0 -0
  52. {arvi-0.1.26 → arvi-0.1.27}/setup.cfg +0 -0
  53. {arvi-0.1.26 → arvi-0.1.27}/setup.py +0 -0
  54. {arvi-0.1.26 → arvi-0.1.27}/tests/HD10700-Bcor_ESPRESSO18.rdb +0 -0
  55. {arvi-0.1.26 → arvi-0.1.27}/tests/test_binning.py +0 -0
  56. {arvi-0.1.26 → arvi-0.1.27}/tests/test_create_RV.py +0 -0
  57. {arvi-0.1.26 → arvi-0.1.27}/tests/test_import_object.py +0 -0
  58. {arvi-0.1.26 → arvi-0.1.27}/tests/test_simbad.py +0 -0
  59. {arvi-0.1.26 → arvi-0.1.27}/tests/test_stats.py +0 -0
@@ -0,0 +1,6 @@
1
+ version: 2
2
+ updates:
3
+ - package-ecosystem: "github-actions"
4
+ directory: ".github/workflows/" # Location of the directory containing the GitHub Actions workflows
5
+ schedule:
6
+ interval: "weekly" # Check for updates weekly
@@ -26,13 +26,13 @@ jobs:
26
26
  runs-on: ubuntu-latest
27
27
  steps:
28
28
  - name: Checkout
29
- uses: actions/checkout@v3
29
+ uses: actions/checkout@v4
30
30
 
31
31
  - name: Setup Pages
32
- uses: actions/configure-pages@v3
32
+ uses: actions/configure-pages@v5
33
33
 
34
34
  - name: Set up Python
35
- uses: actions/setup-python@v3
35
+ uses: actions/setup-python@v5
36
36
  with:
37
37
  python-version: "3.10"
38
38
 
@@ -48,7 +48,7 @@ jobs:
48
48
  mkdocs build
49
49
 
50
50
  - name: Upload artifact
51
- uses: actions/upload-pages-artifact@v2
51
+ uses: actions/upload-pages-artifact@v3
52
52
 
53
53
  # Deployment job
54
54
  deploy:
@@ -60,4 +60,4 @@ jobs:
60
60
  steps:
61
61
  - name: Deploy to GitHub Pages
62
62
  id: deployment
63
- uses: actions/deploy-pages@v2
63
+ uses: actions/deploy-pages@v4
@@ -5,9 +5,6 @@ name: Install-Test
5
5
 
6
6
  on:
7
7
  push:
8
- #branches: [ "main" ]
9
- pull_request:
10
- branches: [ "main" ]
11
8
 
12
9
  jobs:
13
10
  build:
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: arvi
3
- Version: 0.1.26
3
+ Version: 0.1.27
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -11,16 +11,33 @@ from .setup_logger import logger
11
11
  from .utils import create_directory, all_logging_disabled, stdout_disabled, tqdm
12
12
 
13
13
 
14
- def load_spectroscopy() -> SpectroscopyClass:
14
+ def load_spectroscopy(user=None) -> SpectroscopyClass:
15
15
  from .config import config
16
+ # requesting as public
16
17
  if config.request_as_public:
17
18
  with all_logging_disabled():
18
19
  dace = DaceClass(dace_rc_config_path='none')
19
20
  return SpectroscopyClass(dace_instance=dace)
21
+ # DACERC environment variable is set, should point to a dacerc file with credentials
20
22
  if 'DACERC' in os.environ:
21
23
  dace = DaceClass(dace_rc_config_path=os.environ['DACERC'])
22
24
  return SpectroscopyClass(dace_instance=dace)
23
- # elif os.path.exists(os.path.expanduser('~/.dacerc')):
25
+ # user provided, should be a section in ~/.dacerc
26
+ if user is not None:
27
+ import configparser
28
+ import tempfile
29
+ config = configparser.ConfigParser()
30
+ config.read(os.path.expanduser('~/.dacerc'))
31
+ if user not in config.sections():
32
+ raise ValueError(f'Section for user "{user}" not found in ~/.dacerc')
33
+ with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
34
+ new_config = configparser.ConfigParser()
35
+ new_config['user'] = config[user]
36
+ new_config.write(f)
37
+ dace = DaceClass(dace_rc_config_path=f.name)
38
+ logger.info(f'using credentials for user {user} in ~/.dacerc')
39
+ return SpectroscopyClass(dace_instance=dace)
40
+ # default
24
41
  return default_Spectroscopy
25
42
 
26
43
  @lru_cache()
@@ -111,7 +128,7 @@ def get_arrays(result, latest_pipeline=True, ESPRESSO_mode='HR11', NIRPS_mode='H
111
128
 
112
129
  return arrays
113
130
 
114
- def get_observations_from_instrument(star, instrument, main_id=None, verbose=True):
131
+ def get_observations_from_instrument(star, instrument, user=None, main_id=None, verbose=True):
115
132
  """ Query DACE for all observations of a given star and instrument
116
133
 
117
134
  Args:
@@ -119,6 +136,8 @@ def get_observations_from_instrument(star, instrument, main_id=None, verbose=Tru
119
136
  name of the star
120
137
  instrument (str):
121
138
  instrument name
139
+ user (str, optional):
140
+ DACERC user name. Defaults to None.
122
141
  main_id (str, optional):
123
142
  Simbad main id of target to query DACE id. Defaults to None.
124
143
  verbose (bool, optional):
@@ -132,7 +151,7 @@ def get_observations_from_instrument(star, instrument, main_id=None, verbose=Tru
132
151
  dict:
133
152
  dictionary with data from DACE
134
153
  """
135
- Spectroscopy = load_spectroscopy()
154
+ Spectroscopy = load_spectroscopy(user)
136
155
  found_dace_id = False
137
156
  try:
138
157
  dace_id = get_dace_id(star, verbose=verbose)
@@ -233,9 +252,9 @@ def get_observations_from_instrument(star, instrument, main_id=None, verbose=Tru
233
252
  # print([r[k1][k2].keys() for k1 in r.keys() for k2 in r[k1].keys()])
234
253
  return r
235
254
 
236
- def get_observations(star, instrument=None, main_id=None, verbose=True):
255
+ def get_observations(star, instrument=None, user=None, main_id=None, verbose=True):
237
256
  if instrument is None:
238
- Spectroscopy = load_spectroscopy()
257
+ Spectroscopy = load_spectroscopy(user)
239
258
  try:
240
259
  with stdout_disabled(), all_logging_disabled():
241
260
  result = Spectroscopy.get_timeseries(target=star,
@@ -249,7 +268,7 @@ def get_observations(star, instrument=None, main_id=None, verbose=True):
249
268
  raise ValueError(msg) from None
250
269
  else:
251
270
  try:
252
- result = get_observations_from_instrument(star, instrument, main_id, verbose)
271
+ result = get_observations_from_instrument(star, instrument, user, main_id, verbose)
253
272
  except ValueError:
254
273
  msg = f'no {instrument} observations for {star}'
255
274
  raise ValueError(msg) from None
@@ -333,6 +352,12 @@ def check_existing(output_directory, files, type):
333
352
  if type in f
334
353
  ]
335
354
 
355
+ if type == 'S2D':
356
+ existing = [
357
+ f.partition('.fits')[0] for f in os.listdir(output_directory)
358
+ if 'e2ds' in f
359
+ ]
360
+
336
361
  # also check for lowercase type
337
362
  existing += [
338
363
  f.partition('.fits')[0] for f in os.listdir(output_directory)
@@ -345,7 +370,8 @@ def check_existing(output_directory, files, type):
345
370
 
346
371
  # remove type of file (e.g. _CCF_A)
347
372
  existing = [f.partition('_')[0] for f in existing]
348
-
373
+ existing = np.unique(existing)
374
+
349
375
  missing = []
350
376
  for file in files:
351
377
  if any(other in file for other in existing):
@@ -354,9 +380,9 @@ def check_existing(output_directory, files, type):
354
380
 
355
381
  return np.array(missing)
356
382
 
357
- def download(files, type, output_directory, output_filename=None, quiet=True, pbar=None):
383
+ def download(files, type, output_directory, output_filename=None, user=None, quiet=True, pbar=None):
358
384
  """ Download files from DACE """
359
- Spectroscopy = load_spectroscopy()
385
+ Spectroscopy = load_spectroscopy(user)
360
386
  if isinstance(files, str):
361
387
  files = [files]
362
388
  if quiet:
@@ -433,7 +459,7 @@ def do_symlink_filetype(type, raw_files, output_directory, clobber=False, top_le
433
459
  logger.warning(f'file not found: {file}')
434
460
 
435
461
 
436
- def do_download_filetype(type, raw_files, output_directory, clobber=False,
462
+ def do_download_filetype(type, raw_files, output_directory, clobber=False, user=None,
437
463
  verbose=True, chunk_size=20, parallel_limit=30):
438
464
  """ Download CCFs / S1Ds / S2Ds from DACE """
439
465
  raw_files = np.atleast_1d(raw_files)
@@ -469,7 +495,7 @@ def do_download_filetype(type, raw_files, output_directory, clobber=False,
469
495
  if n < parallel_limit:
470
496
  iterator = [raw_files[i:i + chunk_size] for i in range(0, n, chunk_size)]
471
497
  for files in tqdm(iterator, total=len(iterator)):
472
- download(files, type, output_directory, quiet=False)
498
+ download(files, type, output_directory, quiet=False, user=user)
473
499
  extract_fits(output_directory)
474
500
 
475
501
  else:
@@ -481,7 +507,7 @@ def do_download_filetype(type, raw_files, output_directory, clobber=False,
481
507
  chunks = list(chunker(raw_files, chunk_size))
482
508
  pbar = tqdm(total=len(chunks))
483
509
  it1 = [
484
- (files, type, output_directory, f'spectroscopy_download{i+1}.tar.gz', True, pbar)
510
+ (files, type, output_directory, f'spectroscopy_download{i+1}.tar.gz', user, True, pbar)
485
511
  for i, files in enumerate(chunks)
486
512
  ]
487
513
  it2 = [(output_directory, f'spectroscopy_download{i+1}.tar.gz') for i in range(len(chunks))]
@@ -9,7 +9,11 @@ DATA_PATH = os.path.dirname(__file__)
9
9
  DATA_PATH = os.path.join(DATA_PATH, 'data')
10
10
 
11
11
  QUERY = """
12
- SELECT TOP 20 gaia_source.designation,gaia_source.source_id,gaia_source.ra,gaia_source.dec,gaia_source.parallax,gaia_source.pmra,gaia_source.pmdec,gaia_source.ruwe,gaia_source.phot_g_mean_mag,gaia_source.bp_rp,gaia_source.radial_velocity,gaia_source.phot_variable_flag,gaia_source.non_single_star,gaia_source.has_xp_continuous,gaia_source.has_xp_sampled,gaia_source.has_rvs,gaia_source.has_epoch_photometry,gaia_source.has_epoch_rv,gaia_source.has_mcmc_gspphot,gaia_source.has_mcmc_msc,gaia_source.teff_gspphot,gaia_source.logg_gspphot,gaia_source.mh_gspphot,gaia_source.distance_gspphot,gaia_source.azero_gspphot,gaia_source.ag_gspphot,gaia_source.ebpminrp_gspphot
12
+ SELECT TOP 20 gaia_source.designation, gaia_source.source_id,
13
+ gaia_source.ra, gaia_source.dec,
14
+ gaia_source.parallax, gaia_source.pmra, gaia_source.pmdec,
15
+ gaia_source.ruwe, gaia_source.phot_g_mean_mag, gaia_source.bp_rp,
16
+ gaia_source.radial_velocity, gaia_source.radial_velocity_error
13
17
  FROM gaiadr3.gaia_source
14
18
  WHERE
15
19
  CONTAINS(
@@ -23,7 +27,11 @@ CONTAINS(
23
27
  """
24
28
 
25
29
  QUERY_ID = """
26
- SELECT TOP 20 gaia_source.designation,gaia_source.source_id,gaia_source.ra,gaia_source.dec,gaia_source.parallax,gaia_source.pmra,gaia_source.pmdec,gaia_source.ruwe,gaia_source.phot_g_mean_mag,gaia_source.bp_rp,gaia_source.radial_velocity,gaia_source.phot_variable_flag,gaia_source.non_single_star,gaia_source.has_xp_continuous,gaia_source.has_xp_sampled,gaia_source.has_rvs,gaia_source.has_epoch_photometry,gaia_source.has_epoch_rv,gaia_source.has_mcmc_gspphot,gaia_source.has_mcmc_msc,gaia_source.teff_gspphot,gaia_source.logg_gspphot,gaia_source.mh_gspphot,gaia_source.distance_gspphot,gaia_source.azero_gspphot,gaia_source.ag_gspphot,gaia_source.ebpminrp_gspphot
30
+ SELECT TOP 20 gaia_source.designation, gaia_source.source_id,
31
+ gaia_source.ra, gaia_source.dec,
32
+ gaia_source.parallax, gaia_source.pmra, gaia_source.pmdec,
33
+ gaia_source.ruwe, gaia_source.phot_g_mean_mag, gaia_source.bp_rp,
34
+ gaia_source.radial_velocity, gaia_source.radial_velocity_error
27
35
  FROM gaiadr3.gaia_source
28
36
  WHERE
29
37
  gaia_source.source_id = {id}
@@ -110,6 +118,10 @@ class gaia:
110
118
  self.radial_velocity = float(results['radial_velocity'])
111
119
  except ValueError:
112
120
  self.radial_velocity = None
121
+ try:
122
+ self.radial_velocity_error = float(results['radial_velocity_error'])
123
+ except ValueError:
124
+ self.radial_velocity_error = None
113
125
 
114
126
  return
115
127
 
@@ -1,8 +1,15 @@
1
1
  import os
2
+ import numpy as np
2
3
  import requests
4
+ from dataclasses import dataclass
3
5
 
4
6
  import pysweetcat
5
7
 
8
+ try:
9
+ from uncertainties import ufloat
10
+ except ImportError:
11
+ ufloat = lambda x, y: x
12
+
6
13
  from .translations import translate
7
14
 
8
15
  DATA_PATH = os.path.dirname(__file__)
@@ -15,19 +22,35 @@ SELECT basic.OID,
15
22
  main_id,
16
23
  pmra,
17
24
  pmdec,
18
- plx_value,
25
+ plx_value, plx_err,
19
26
  rvz_radvel,
20
27
  sp_type
21
28
  FROM basic JOIN ident ON oidref = oid
22
29
  WHERE id = '{star}';
23
30
  """
24
31
 
32
+ # SELECT filter, flux, flux_err
33
+ # FROM basic JOIN ident ON oid = ident.oidref JOIN flux ON oid = flux.oidref
34
+ # WHERE id = 'HD23079';
35
+
25
36
  BV_QUERY = """
26
37
  SELECT B, V FROM allfluxes
27
38
  JOIN ident USING(oidref)
28
39
  WHERE id = '{star}';
29
40
  """
30
41
 
42
+ FILTERS_QUERY = """
43
+ SELECT filter, flux, flux_err, bibcode FROM flux
44
+ JOIN ident USING(oidref)
45
+ WHERE id = '{star}';
46
+ """
47
+
48
+ MEAS_QUERY = """
49
+ SELECT teff, log_g, log_g_prec, fe_h, fe_h_prec, bibcode FROM mesFe_H
50
+ JOIN ident USING(oidref)
51
+ WHERE id = '{star}';
52
+ """
53
+
31
54
  IDS_QUERY = """
32
55
  SELECT ids FROM ids
33
56
  JOIN ident USING(oidref)
@@ -40,6 +63,13 @@ JOIN ident ON oidref = oid
40
63
  WHERE id = '{star}';
41
64
  """
42
65
 
66
+ @dataclass
67
+ class Measurements:
68
+ teff: list
69
+ logg: list
70
+ feh: list
71
+ bibcode: list
72
+
43
73
 
44
74
  def run_query(query):
45
75
  url = 'http://simbad.u-strasbg.fr/simbad/sim-tap/sync'
@@ -52,7 +82,7 @@ def run_query(query):
52
82
  raise IndexError(err)
53
83
  return response.content.decode()
54
84
 
55
- def parse_table(table, cols=None, values=None):
85
+ def parse_table1(table, cols=None, values=None):
56
86
  header = table.splitlines()[0].split('|')
57
87
  if cols is None:
58
88
  cols = list(map(str.strip, header))
@@ -66,6 +96,29 @@ def parse_table(table, cols=None, values=None):
66
96
  values = [value.replace('"', '') for value in values]
67
97
  return cols, values
68
98
 
99
+ def parse_tablen(table, cols=None, values=None):
100
+ header = table.splitlines()[0].split('|')
101
+ cols = list(map(str.strip, header))
102
+ values = [list(map(str.strip, row.split('|'))) for row in table.splitlines()[2:]]
103
+ return cols, values
104
+
105
+ def parse_value(value, err=None, prec=None):
106
+ try:
107
+ v = float(value)
108
+ if err:
109
+ try:
110
+ v = ufloat(float(value), float(err))
111
+ except ValueError:
112
+ pass
113
+ if prec:
114
+ try:
115
+ v = ufloat(float(value), 10**-int(prec))
116
+ except ValueError:
117
+ pass
118
+ except ValueError:
119
+ v = np.nan
120
+ return v
121
+
69
122
 
70
123
  effective_temperatures = {
71
124
  'F0': 7350, 'F2': 7050, 'F3': 6850, 'F5': 6700, 'F6': 6550, 'F7': 6400, 'F8': 6300,
@@ -120,14 +173,37 @@ class simbad:
120
173
 
121
174
  try:
122
175
  table1 = run_query(query=QUERY.format(star=self.star))
123
- cols, values = parse_table(table1)
176
+ cols, values = parse_table1(table1)
124
177
 
125
178
  table2 = run_query(query=BV_QUERY.format(star=self.star))
126
- cols, values = parse_table(table2, cols, values)
179
+ cols, values = parse_table1(table2, cols, values)
127
180
 
128
181
  table3 = run_query(query=IDS_QUERY.format(star=self.star))
129
182
  line = table3.splitlines()[2]
130
183
  self.ids = line.replace('"', '').replace(' ', ' ').replace(' ', ' ').replace(' ', ' ').split('|')
184
+
185
+ table4 = run_query(query=FILTERS_QUERY.format(star=self.star))
186
+ for row in table4.splitlines()[2:]:
187
+ filter_name, mag, mag_err, bibcode = row.replace('"', '').split('|')
188
+ filter_name = filter_name.strip()
189
+ try:
190
+ setattr(self, '_' + filter_name, ufloat(float(mag), float(mag_err)))
191
+ except ValueError:
192
+ setattr(self, '_' + filter_name, float(mag))
193
+
194
+ # measurements table
195
+ table5 = run_query(query=MEAS_QUERY.format(star=self.star))
196
+ _teff, _logg, _feh, _bibcode = [], [], [], []
197
+ for row in table5.splitlines()[2:]:
198
+ teff, log_g, log_g_prec, fe_h, fe_h_prec, bibcode = row.replace('"', '').split('|')
199
+ _bibcode.append(bibcode)
200
+ _teff.append(parse_value(teff))
201
+ _logg.append(parse_value(log_g, prec=log_g_prec))
202
+ _feh.append(parse_value(fe_h, prec=fe_h_prec))
203
+
204
+ self.measurements = Measurements(_teff, _logg, _feh, _bibcode)
205
+
206
+
131
207
  except IndexError:
132
208
  raise ValueError(f'simbad query for {star} failed')
133
209
 
@@ -176,6 +252,9 @@ class simbad:
176
252
  sp_type = self.sp_type
177
253
  return f'{self.star} ({V=}, {sp_type=})'
178
254
 
255
+ @property
256
+ def bmv(self):
257
+ return self.B - self.V
179
258
 
180
259
 
181
260
  def argsort_by_spectral_type(sptypes):
@@ -59,6 +59,7 @@ class RV:
59
59
  only_latest_pipeline: bool = field(init=True, repr=False, default=True)
60
60
  load_extra_data: Union[bool, str] = field(init=True, repr=False, default=False)
61
61
  check_drs_qc: bool = field(init=True, repr=False, default=True)
62
+ user: bool = field(init=True, repr=False, default=None)
62
63
  #
63
64
  units = 'm/s'
64
65
  _child: bool = field(init=True, repr=False, default=False)
@@ -185,7 +186,7 @@ class RV:
185
186
 
186
187
  with timer():
187
188
  self.dace_result = get_observations(self.__star__, self.instrument,
188
- main_id=mid, verbose=self.verbose)
189
+ user=self.user, main_id=mid, verbose=self.verbose)
189
190
  except ValueError as e:
190
191
  # querying DACE failed, should we raise an error?
191
192
  if self._raise_on_error:
@@ -1029,7 +1030,8 @@ class RV:
1029
1030
  logger.warning('may need to provide `top_level` in kwargs to find file')
1030
1031
  do_symlink_filetype('CCF', files[:limit], directory, **kwargs)
1031
1032
  else:
1032
- do_download_filetype('CCF', files[:limit], directory, verbose=self.verbose, **kwargs)
1033
+ do_download_filetype('CCF', files[:limit], directory,
1034
+ verbose=self.verbose, user=self.user, **kwargs)
1033
1035
 
1034
1036
  if load:
1035
1037
  try:
@@ -1083,7 +1085,8 @@ class RV:
1083
1085
  logger.warning('may need to provide `top_level` in kwargs to find file')
1084
1086
  do_symlink_filetype('S1D', files[:limit], directory, **kwargs)
1085
1087
  else:
1086
- do_download_filetype('S1D', files[:limit], directory, verbose=self.verbose, **kwargs)
1088
+ do_download_filetype('S1D', files[:limit], directory,
1089
+ verbose=self.verbose, user=self.user, **kwargs)
1087
1090
 
1088
1091
  def download_s2d(self, instrument=None, index=None, limit=None,
1089
1092
  directory=None, symlink=False, **kwargs):
@@ -1115,7 +1118,9 @@ class RV:
1115
1118
  logger.warning('may need to provide `top_level` in kwargs to find file')
1116
1119
  do_symlink_filetype('S2D', files[:limit], directory, **kwargs)
1117
1120
  else:
1118
- do_download_filetype('S2D', files[:limit], directory, verbose=self.verbose, **kwargs)
1121
+ do_download_filetype('S2D', files[:limit], directory,
1122
+ verbose=self.verbose, user=self.user, **kwargs)
1123
+
1119
1124
 
1120
1125
 
1121
1126
  from .plots import plot, plot_fwhm, plot_bispan, plot_contrast, plot_rhk, plot_berv, plot_quantity
@@ -1300,10 +1305,9 @@ class RV:
1300
1305
 
1301
1306
  def remove_single_observations(self):
1302
1307
  """ Remove instruments for which there is a single observation """
1303
- instruments = deepcopy(self.instruments)
1304
- for inst in instruments:
1305
- if getattr(self, inst).mtime.size == 1:
1306
- self.remove_instrument(inst)
1308
+ singles = [i for i in self.instruments if getattr(self, i).mtime.size == 1]
1309
+ for inst in singles:
1310
+ self.remove_instrument(inst, strict=True)
1307
1311
 
1308
1312
  def remove_prog_id(self, prog_id):
1309
1313
  """ Remove observations from a given program ID """
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: arvi
3
- Version: 0.1.26
3
+ Version: 0.1.27
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -4,6 +4,7 @@ README.md
4
4
  mkdocs.yml
5
5
  pyproject.toml
6
6
  setup.py
7
+ .github/dependabot.yml
7
8
  .github/workflows/docs-gh-pages.yml
8
9
  .github/workflows/install.yml
9
10
  .github/workflows/python-publish.yml
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes