arvi 0.1.11__tar.gz → 0.1.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. {arvi-0.1.11 → arvi-0.1.12}/PKG-INFO +1 -1
  2. {arvi-0.1.11 → arvi-0.1.12}/arvi/__init__.py +1 -0
  3. arvi-0.1.12/arvi/ariadne_wrapper.py +70 -0
  4. arvi-0.1.12/arvi/config.py +8 -0
  5. {arvi-0.1.11 → arvi-0.1.12}/arvi/dace_wrapper.py +23 -9
  6. arvi-0.1.12/arvi/gaia_wrapper.py +94 -0
  7. {arvi-0.1.11 → arvi-0.1.12}/arvi/programs.py +41 -18
  8. {arvi-0.1.11 → arvi-0.1.12}/arvi/simbad_wrapper.py +7 -1
  9. {arvi-0.1.11 → arvi-0.1.12}/arvi/timeseries.py +115 -53
  10. arvi-0.1.12/arvi/translations.py +21 -0
  11. {arvi-0.1.11 → arvi-0.1.12}/arvi.egg-info/PKG-INFO +1 -1
  12. {arvi-0.1.11 → arvi-0.1.12}/arvi.egg-info/SOURCES.txt +3 -0
  13. {arvi-0.1.11 → arvi-0.1.12}/pyproject.toml +1 -1
  14. arvi-0.1.12/tests/test_simbad.py +20 -0
  15. arvi-0.1.11/arvi/config.py +0 -2
  16. arvi-0.1.11/arvi/translations.py +0 -10
  17. {arvi-0.1.11 → arvi-0.1.12}/.github/workflows/docs-gh-pages.yml +0 -0
  18. {arvi-0.1.11 → arvi-0.1.12}/.github/workflows/install.yml +0 -0
  19. {arvi-0.1.11 → arvi-0.1.12}/.github/workflows/python-publish.yml +0 -0
  20. {arvi-0.1.11 → arvi-0.1.12}/.gitignore +0 -0
  21. {arvi-0.1.11 → arvi-0.1.12}/LICENSE +0 -0
  22. {arvi-0.1.11 → arvi-0.1.12}/README.md +0 -0
  23. {arvi-0.1.11 → arvi-0.1.12}/arvi/HZ.py +0 -0
  24. {arvi-0.1.11 → arvi-0.1.12}/arvi/binning.py +0 -0
  25. {arvi-0.1.11 → arvi-0.1.12}/arvi/data/extra/HD86226_PFS1.rdb +0 -0
  26. {arvi-0.1.11 → arvi-0.1.12}/arvi/data/extra/HD86226_PFS2.rdb +0 -0
  27. {arvi-0.1.11 → arvi-0.1.12}/arvi/data/extra/metadata.json +0 -0
  28. {arvi-0.1.11 → arvi-0.1.12}/arvi/data/info.svg +0 -0
  29. {arvi-0.1.11 → arvi-0.1.12}/arvi/data/obs_affected_ADC_issues.dat +0 -0
  30. {arvi-0.1.11 → arvi-0.1.12}/arvi/data/obs_affected_blue_cryostat_issues.dat +0 -0
  31. {arvi-0.1.11 → arvi-0.1.12}/arvi/extra_data.py +0 -0
  32. {arvi-0.1.11 → arvi-0.1.12}/arvi/instrument_specific.py +0 -0
  33. {arvi-0.1.11 → arvi-0.1.12}/arvi/lbl_wrapper.py +0 -0
  34. {arvi-0.1.11 → arvi-0.1.12}/arvi/nasaexo_wrapper.py +0 -0
  35. {arvi-0.1.11 → arvi-0.1.12}/arvi/plots.py +0 -0
  36. {arvi-0.1.11 → arvi-0.1.12}/arvi/reports.py +0 -0
  37. {arvi-0.1.11 → arvi-0.1.12}/arvi/setup_logger.py +0 -0
  38. {arvi-0.1.11 → arvi-0.1.12}/arvi/stats.py +0 -0
  39. {arvi-0.1.11 → arvi-0.1.12}/arvi/utils.py +0 -0
  40. {arvi-0.1.11 → arvi-0.1.12}/arvi.egg-info/dependency_links.txt +0 -0
  41. {arvi-0.1.11 → arvi-0.1.12}/arvi.egg-info/requires.txt +0 -0
  42. {arvi-0.1.11 → arvi-0.1.12}/arvi.egg-info/top_level.txt +0 -0
  43. {arvi-0.1.11 → arvi-0.1.12}/docs/API.md +0 -0
  44. {arvi-0.1.11 → arvi-0.1.12}/docs/detailed.md +0 -0
  45. {arvi-0.1.11 → arvi-0.1.12}/docs/index.md +0 -0
  46. {arvi-0.1.11 → arvi-0.1.12}/docs/logo/detective.png +0 -0
  47. {arvi-0.1.11 → arvi-0.1.12}/docs/logo/logo.png +0 -0
  48. {arvi-0.1.11 → arvi-0.1.12}/mkdocs.yml +0 -0
  49. {arvi-0.1.11 → arvi-0.1.12}/setup.cfg +0 -0
  50. {arvi-0.1.11 → arvi-0.1.12}/setup.py +0 -0
  51. {arvi-0.1.11 → arvi-0.1.12}/tests/test_binning.py +0 -0
  52. {arvi-0.1.11 → arvi-0.1.12}/tests/test_import_object.py +0 -0
  53. {arvi-0.1.11 → arvi-0.1.12}/tests/test_stats.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: arvi
3
- Version: 0.1.11
3
+ Version: 0.1.12
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -8,6 +8,7 @@ def __getattr__(name: str):
8
8
  if name in (
9
9
  '_ipython_canary_method_should_not_exist_',
10
10
  '_repr_mimebundle_',
11
+ '__wrapped__'
11
12
  ):
12
13
  return
13
14
 
@@ -0,0 +1,70 @@
1
+ import os
2
+ import sys
3
+ from matplotlib import pyplot as plt
4
+
5
+ try:
6
+ from astroARIADNE.star import Star
7
+ from astroARIADNE.fitter import Fitter
8
+ except ImportError:
9
+ print('This module requires astroARIADNE. Install with `pip install astroARIADNE`')
10
+ sys.exit(0)
11
+
12
+
13
+ def run_ariadne(self, fit=True, plot=True, priors={},
14
+ models = ('phoenix', 'btsettl', 'btnextgen', 'btcond', 'kurucz', 'ck04'),
15
+ nlive=300, dlogz=1, threads=6, dynamic=False, **kwargs):
16
+ if hasattr(self, 'gaia'):
17
+ s = Star(self.star, self.gaia.ra, self.gaia.dec, g_id=self.gaia.dr3_id,
18
+ search_radius=1)
19
+ else:
20
+ s = Star(self.star, self.simbad.ra, self.simbad.dec, g_id=self.simbad.gaia_id,
21
+ search_radius=1)
22
+
23
+ out_folder = f'{self.star}_ariadne'
24
+
25
+ setup = dict(engine='dynesty', nlive=nlive, dlogz=dlogz,
26
+ bound='multi', sample='auto', threads=threads, dynamic=dynamic)
27
+ setup = list(setup.values())
28
+
29
+ f = Fitter()
30
+ f.star = s
31
+ f.setup = setup
32
+ f.av_law = 'fitzpatrick'
33
+ f.out_folder = out_folder
34
+ f.bma = True
35
+ f.models = models
36
+ f.n_samples = 10_000
37
+
38
+ f.prior_setup = {
39
+ 'teff': priors.get('teff', ('default')),
40
+ 'logg': ('default'),
41
+ 'z': priors.get('feh', ('default')),
42
+ 'dist': ('default'),
43
+ 'rad': ('default'),
44
+ 'Av': ('default')
45
+ }
46
+
47
+ if fit:
48
+ f.initialize()
49
+ f.fit_bma()
50
+
51
+ if plot:
52
+ from pkg_resources import resource_filename
53
+ from astroARIADNE.plotter import SEDPlotter
54
+ modelsdir = resource_filename('astroARIADNE', 'Datafiles/models')
55
+ artist = SEDPlotter(os.path.join(out_folder, 'BMA.pkl'), out_folder, models_dir=modelsdir)
56
+
57
+ artist.plot_SED_no_model()
58
+ try:
59
+ artist.plot_SED()
60
+ except FileNotFoundError as e:
61
+ print('No model found:', e)
62
+ except IndexError as e:
63
+ print('Error!')
64
+ artist.plot_bma_hist()
65
+ artist.plot_bma_HR(10)
66
+ artist.plot_corner()
67
+ plt.close('all')
68
+ return s, f, artist
69
+
70
+ return s, f
@@ -0,0 +1,8 @@
1
+ # whether to return self from (some) RV methods
2
+ return_self = False
3
+
4
+ # whether to check internet connection before querying DACE
5
+ check_internet = False
6
+
7
+ # make all DACE requests without using a .dacerc file
8
+ request_as_public = False
@@ -9,6 +9,11 @@ from .utils import create_directory, all_logging_disabled, stdout_disabled, tqdm
9
9
 
10
10
 
11
11
  def load_spectroscopy() -> SpectroscopyClass:
12
+ from .config import request_as_public
13
+ if request_as_public:
14
+ with all_logging_disabled():
15
+ dace = DaceClass(dace_rc_config_path='none')
16
+ return SpectroscopyClass(dace_instance=dace)
12
17
  if 'DACERC' in os.environ:
13
18
  dace = DaceClass(dace_rc_config_path=os.environ['DACERC'])
14
19
  return SpectroscopyClass(dace_instance=dace)
@@ -181,9 +186,9 @@ def check_existing(output_directory, files, type):
181
186
  def download(files, type, output_directory):
182
187
  """ Download files from DACE """
183
188
  Spectroscopy = load_spectroscopy()
184
- # with stdout_disabled(), all_logging_disabled():
185
- Spectroscopy.download_files(files, file_type=type.lower(),
186
- output_directory=output_directory)
189
+ with stdout_disabled(), all_logging_disabled():
190
+ Spectroscopy.download_files(files, file_type=type.lower(),
191
+ output_directory=output_directory)
187
192
 
188
193
  def extract_fits(output_directory):
189
194
  """ Extract fits files from tar.gz file """
@@ -212,21 +217,30 @@ def do_download_filetype(type, raw_files, output_directory, clobber=False,
212
217
  if not clobber:
213
218
  raw_files = check_existing(output_directory, raw_files, type)
214
219
 
220
+ n = raw_files.size
221
+
215
222
  # any file left to download?
216
- if raw_files.size == 0:
223
+ if n == 0:
217
224
  if verbose:
218
225
  logger.info('no files to download')
219
226
  return
220
227
 
221
- if verbose:
222
- n = raw_files.size
223
- logger.info(f"Downloading {n} {type}s into '{output_directory}'...")
224
-
225
228
  # avoid an empty chunk
226
229
  if chunk_size > n:
227
230
  chunk_size = n
228
231
 
229
- for files in tqdm(zip(*(iter(raw_files),) * chunk_size), total=n // chunk_size):
232
+ if verbose:
233
+ if chunk_size < n:
234
+ msg = f"Downloading {n} {type}s "
235
+ msg += f"(in chunks of {chunk_size}) "
236
+ msg += f"into '{output_directory}'..."
237
+ logger.info(msg)
238
+ else:
239
+ msg = f"Downloading {n} {type}s into '{output_directory}'..."
240
+ logger.info(msg)
241
+
242
+ iterator = [raw_files[i:i + chunk_size] for i in range(0, n, chunk_size)]
243
+ for files in tqdm(iterator, total=len(iterator)):
230
244
  download(files, type, output_directory)
231
245
  extract_fits(output_directory)
232
246
 
@@ -0,0 +1,94 @@
1
+ import os
2
+ from io import StringIO
3
+ from csv import DictReader
4
+ from dataclasses import dataclass, field
5
+ import requests
6
+
7
+ from astropy.coordinates import SkyCoord
8
+ import pysweetcat
9
+
10
+ DATA_PATH = os.path.dirname(__file__)
11
+ DATA_PATH = os.path.join(DATA_PATH, 'data')
12
+
13
+ QUERY = """
14
+ SELECT TOP 20 gaia_source.designation,gaia_source.source_id,gaia_source.ra,gaia_source.dec,gaia_source.parallax,gaia_source.pmra,gaia_source.pmdec,gaia_source.ruwe,gaia_source.phot_g_mean_mag,gaia_source.bp_rp,gaia_source.radial_velocity,gaia_source.phot_variable_flag,gaia_source.non_single_star,gaia_source.has_xp_continuous,gaia_source.has_xp_sampled,gaia_source.has_rvs,gaia_source.has_epoch_photometry,gaia_source.has_epoch_rv,gaia_source.has_mcmc_gspphot,gaia_source.has_mcmc_msc,gaia_source.teff_gspphot,gaia_source.logg_gspphot,gaia_source.mh_gspphot,gaia_source.distance_gspphot,gaia_source.azero_gspphot,gaia_source.ag_gspphot,gaia_source.ebpminrp_gspphot
15
+ FROM gaiadr3.gaia_source
16
+ WHERE
17
+ CONTAINS(
18
+ POINT('ICRS',gaiadr3.gaia_source.ra,gaiadr3.gaia_source.dec),
19
+ CIRCLE(
20
+ 'ICRS',
21
+ COORD1(EPOCH_PROP_POS({ra},{dec},{plx},{pmra},{pmdec},{rv},2000,2016.0)),
22
+ COORD2(EPOCH_PROP_POS({ra},{dec},{plx},{pmra},{pmdec},{rv},2000,2016.0)),
23
+ 0.001388888888888889)
24
+ )=1
25
+ """
26
+
27
+ def run_query(query):
28
+ url = 'https://gea.esac.esa.int/tap-server/tap/sync'
29
+ data = dict(query=query, request='doQuery', lang='ADQL', format='csv')
30
+ try:
31
+ response = requests.post(url, data=data, timeout=10)
32
+ except requests.ReadTimeout as err:
33
+ raise IndexError(err)
34
+ except requests.ConnectionError as err:
35
+ raise IndexError(err)
36
+ return response.content.decode()
37
+
38
+ def parse_csv(csv):
39
+ reader = DictReader(StringIO(csv))
40
+ return list(reader)
41
+
42
+
43
+ class gaia:
44
+ """
45
+ A very simple wrapper around a TAP query to gaia for a given target. This
46
+ class simply runs a few TAP queries and stores the result as attributes.
47
+
48
+ Attributes:
49
+ ra (float): right ascension
50
+ dec (float): declination
51
+ coords (SkyCoord): coordinates as a SkyCoord object
52
+ dr3_id (int): Gaia DR3 identifier
53
+ plx (float): parallax
54
+ radial_velocity (float): radial velocity
55
+ """
56
+ def __init__(self, star:str, simbad=None):
57
+ """
58
+ Args:
59
+ star (str): The name of the star to query simbad
60
+ """
61
+ self.star = star
62
+
63
+ if simbad is None:
64
+ from .simbad_wrapper import simbad as Simbad
65
+ simbad = Simbad(star)
66
+
67
+ ra = simbad.ra
68
+ dec = simbad.dec
69
+ plx = simbad.plx
70
+ pmra = simbad.pmra
71
+ pmdec = simbad.pmdec
72
+ rv = simbad.rvz_radvel
73
+ args = dict(ra=ra, dec=dec, plx=plx, pmra=pmra, pmdec=pmdec, rv=rv)
74
+
75
+ try:
76
+ table1 = run_query(query=QUERY.format(**args))
77
+ results = parse_csv(table1)[0]
78
+ except IndexError:
79
+ raise ValueError(f'Gaia query for {star} failed')
80
+
81
+ self.dr3_id = int(results['source_id'])
82
+
83
+ self.ra = float(results['ra'])
84
+ self.dec = float(results['dec'])
85
+ self.pmra = float(results['pmra'])
86
+ self.pmdec = float(results['pmdec'])
87
+ self.coords = SkyCoord(self.ra, self.dec, unit='deg')
88
+ self.plx = float(results['parallax'])
89
+ self.radial_velocity = float(results['radial_velocity'])
90
+
91
+ return
92
+
93
+ def __repr__(self):
94
+ return f'{self.star} (DR3 id={self.dr3_id})'
@@ -1,5 +1,7 @@
1
1
  import os
2
- import concurrent.futures
2
+ import multiprocessing
3
+ from functools import partial
4
+ from itertools import chain
3
5
  from collections import namedtuple
4
6
  from tqdm import tqdm
5
7
  # import numpy as np
@@ -13,7 +15,8 @@ path = os.path.join(os.path.dirname(__file__), 'data')
13
15
 
14
16
 
15
17
  def get_star(star, instrument=None):
16
- return RV(star, verbose=False, instrument=instrument, _raise_on_error=False)
18
+ return RV(star, instrument=instrument,
19
+ _raise_on_error=False, verbose=False, load_extra_data=False)
17
20
 
18
21
 
19
22
  class LazyRV:
@@ -22,6 +25,7 @@ class LazyRV:
22
25
  if isinstance(self.stars, str):
23
26
  self.stars = [self.stars]
24
27
  self.instrument = instrument
28
+ self._saved = None
25
29
 
26
30
  @property
27
31
  def N(self):
@@ -31,30 +35,49 @@ class LazyRV:
31
35
  return f"RV({self.N} stars)"
32
36
 
33
37
  def _get(self):
34
- result = []
35
- # use a with statement to ensure threads are cleaned up promptly
36
- with concurrent.futures.ThreadPoolExecutor(max_workers=8) as pool:
37
- star_to_RV = {
38
- pool.submit(get_star, star, self.instrument): star
39
- for star in self.stars
40
- }
38
+ if self.N > 10:
39
+ # logger.info('Querying DACE...')
40
+ _get_star = partial(get_star, instrument=self.instrument)
41
+ with multiprocessing.Pool() as pool:
42
+ result = list(tqdm(pool.imap(_get_star, self.stars),
43
+ total=self.N, unit='star', desc='Querying DACE'))
44
+ # result = pool.map(get_star, self.stars)
45
+ else:
46
+ result = []
41
47
  logger.info('Querying DACE...')
42
- pbar = tqdm(concurrent.futures.as_completed(star_to_RV),
43
- total=self.N, unit='star')
44
- for future in pbar:
45
- star = star_to_RV[future]
48
+ pbar = tqdm(self.stars, total=self.N, unit='star')
49
+ for star in pbar:
46
50
  pbar.set_description(star)
47
- try:
48
- result.append(future.result())
49
- except Exception:
50
- print(f'{star} generated an exception')
51
+ result.append(get_star(star, self.instrument))
52
+
51
53
  return result
52
54
 
55
+ # # use a with statement to ensure threads are cleaned up promptly
56
+ # with concurrent.futures.ThreadPoolExecutor(max_workers=8) as pool:
57
+ # star_to_RV = {
58
+ # pool.submit(get_star, star, self.instrument): star
59
+ # for star in self.stars
60
+ # }
61
+ # logger.info('Querying DACE...')
62
+ # pbar = tqdm(concurrent.futures.as_completed(star_to_RV),
63
+ # total=self.N, unit='star')
64
+ # for future in pbar:
65
+ # star = star_to_RV[future]
66
+ # pbar.set_description(star)
67
+ # try:
68
+ # result.append(future.result())
69
+ # except ValueError:
70
+ # print(f'{star} generated an exception')
71
+ # result.append(None)
72
+ # return result
73
+
53
74
  def __iter__(self):
54
75
  return self._get()
55
76
 
56
77
  def __call__(self):
57
- return self._get()
78
+ if not self._saved:
79
+ self._saved = self._get()
80
+ return self._saved
58
81
 
59
82
 
60
83
  # sorted by spectral type
@@ -85,7 +85,8 @@ class simbad:
85
85
  dec (float): declination
86
86
  coords (SkyCoord): coordinates as a SkyCoord object
87
87
  main_id (str): main identifier
88
- plx_value (float): parallax
88
+ gaia_id (int): Gaia DR3 identifier
89
+ plx (float): parallax
89
90
  rvz_radvel (float): radial velocity
90
91
  sp_type (str): spectral type
91
92
  B (float): B magnitude
@@ -125,6 +126,8 @@ class simbad:
125
126
  except IndexError:
126
127
  raise ValueError(f'simbad query for {star} failed')
127
128
 
129
+ self.gaia_id = int([i for i in self.ids if 'Gaia DR3' in i][0].split('Gaia DR3')[-1])
130
+
128
131
  for col, val in zip(cols, values):
129
132
  if col == 'oid':
130
133
  setattr(self, col, str(val))
@@ -138,6 +141,9 @@ class simbad:
138
141
 
139
142
  if self.plx_value == '':
140
143
  self.plx_value = None
144
+
145
+ self.plx = self._plx_value = self.plx_value
146
+ del self.plx_value
141
147
 
142
148
  try:
143
149
  swc_data = pysweetcat.get_data()
@@ -15,6 +15,7 @@ from .config import return_self, check_internet
15
15
  from .translations import translate
16
16
  from .dace_wrapper import do_download_filetype, get_observations, get_arrays
17
17
  from .simbad_wrapper import simbad
18
+ from .gaia_wrapper import gaia
18
19
  from .extra_data import get_extra_data
19
20
  from .stats import wmean, wrms
20
21
  from .binning import bin_ccf_mask, binRV
@@ -49,7 +50,7 @@ class RV:
49
50
  do_sigma_clip: bool = field(init=True, repr=False, default=False)
50
51
  do_adjust_means: bool = field(init=True, repr=False, default=True)
51
52
  only_latest_pipeline: bool = field(init=True, repr=False, default=True)
52
- load_extra_data: Union[bool, str] = field(init=True, repr=False, default=True)
53
+ load_extra_data: Union[bool, str] = field(init=True, repr=False, default=False)
53
54
  #
54
55
  _child: bool = field(init=True, repr=False, default=False)
55
56
  _did_secular_acceleration: bool = field(init=False, repr=False, default=False)
@@ -72,6 +73,7 @@ class RV:
72
73
  if not self._child:
73
74
  if check_internet and not there_is_internet():
74
75
  raise ConnectionError('There is no internet connection?')
76
+
75
77
  # complicated way to query Simbad with self.__star__ or, if that
76
78
  # fails, try after removing a trailing 'A'
77
79
  for target in (self.__star__, self.__star__.replace('A', '')):
@@ -84,11 +86,23 @@ class RV:
84
86
  if self.verbose:
85
87
  logger.error(f'simbad query for {self.__star__} failed')
86
88
 
89
+ # complicated way to query Gaia with self.__star__ or, if that
90
+ # fails, try after removing a trailing 'A'
91
+ for target in (self.__star__, self.__star__.replace('A', '')):
92
+ try:
93
+ self.gaia = gaia(target)
94
+ break
95
+ except ValueError:
96
+ continue
97
+ else:
98
+ if self.verbose:
99
+ logger.error(f'Gaia query for {self.__star__} failed')
100
+
87
101
  # query DACE
88
102
  if self.verbose:
89
103
  logger.info(f'querying DACE for {self.__star__}...')
90
104
  try:
91
- self.dace_result = get_observations(self.__star__, self.instrument,
105
+ self.dace_result = get_observations(self.__star__, self.instrument,
92
106
  verbose=self.verbose)
93
107
  except ValueError as e:
94
108
  # querying DACE failed, should we raise an error?
@@ -248,7 +262,7 @@ class RV:
248
262
  """ Masked array of times """
249
263
  return self.time[self.mask]
250
264
 
251
- @property
265
+ @property
252
266
  def mvrad(self) -> np.ndarray:
253
267
  """ Masked array of radial velocities """
254
268
  return self.vrad[self.mask]
@@ -374,7 +388,7 @@ class RV:
374
388
  star, timestamp = file.replace('.pkl', '').split('_')
375
389
  else:
376
390
  try:
377
- file = sorted(glob(f'{star}_*.pkl'))[-1]
391
+ file = sorted(glob(f'{star}_*.*.pkl'))[-1]
378
392
  except IndexError:
379
393
  raise ValueError(f'cannot find any file matching {star}_*.pkl')
380
394
  star, timestamp = file.replace('.pkl', '').split('_')
@@ -409,7 +423,7 @@ class RV:
409
423
  if star_.size == 1:
410
424
  logger.info(f'assuming star is {star_[0]}')
411
425
  star = star_[0]
412
-
426
+
413
427
  if instrument is None:
414
428
  instruments = np.array([os.path.splitext(f)[0].split('_')[1] for f in files])
415
429
  logger.info(f'assuming instruments: {instruments}')
@@ -495,7 +509,7 @@ class RV:
495
509
  for q in ['drs_qc']:
496
510
  setattr(_s, q, np.full(time.size, True))
497
511
  _quantities.append(q)
498
-
512
+
499
513
  #! end hack
500
514
 
501
515
  _s.mask = np.ones_like(time, dtype=bool)
@@ -522,10 +536,10 @@ class RV:
522
536
  except ImportError:
523
537
  logger.error('iCCF is not installed. Please install it with `pip install iCCF`')
524
538
  return
525
-
539
+
526
540
  if isinstance(files, str):
527
541
  files = [files]
528
-
542
+
529
543
  I = iCCF.from_file(files)
530
544
 
531
545
  objects = np.unique([i.HDU[0].header['OBJECT'].replace(' ', '') for i in I])
@@ -729,13 +743,13 @@ class RV:
729
743
 
730
744
  def remove_instrument(self, instrument, strict=False):
731
745
  """ Remove all observations from one instrument
732
-
746
+
733
747
  Args:
734
748
  instrument (str or list):
735
749
  The instrument(s) for which to remove observations.
736
750
  strict (bool):
737
751
  Whether to match (each) `instrument` exactly
738
-
752
+
739
753
  Note:
740
754
  A common name can be used to remove observations for several subsets
741
755
  of a given instrument. For example
@@ -788,11 +802,24 @@ class RV:
788
802
  if return_self:
789
803
  return self
790
804
 
805
+ def remove_condition(self, condition):
806
+ """ Remove all observations that satisfy a condition
807
+
808
+ Args:
809
+ condition (np.ndarray):
810
+ Boolean array of the same length as the observations
811
+ """
812
+ if self.verbose:
813
+ inst = np.unique(self.instrument_array[condition])
814
+ logger.info(f"Removing {condition.sum()} points from instruments {inst}")
815
+ self.mask = self.mask & ~condition
816
+ self._propagate_mask_changes()
817
+
791
818
  def remove_point(self, index):
792
819
  """
793
820
  Remove individual observations at a given index (or indices).
794
821
  NOTE: Like Python, the index is 0-based.
795
-
822
+
796
823
  Args:
797
824
  index (int, list, ndarray):
798
825
  Single index, list, or array of indices to remove.
@@ -899,45 +926,76 @@ class RV:
899
926
  n_before = (self.obs < self.obs[m]).sum()
900
927
  getattr(self, inst).mask[m - n_before] = False
901
928
 
902
- def secular_acceleration(self, epoch=55500, plot=False):
929
+ def secular_acceleration(self, epoch=None, just_compute=False, force_simbad=False):
903
930
  """
904
931
  Remove secular acceleration from RVs
905
932
 
906
933
  Args:
907
- epoch (float):
934
+ epoch (float, optional):
908
935
  The reference epoch (DACE uses 55500, 31/10/2010)
909
936
  instruments (bool or collection of str):
910
- Only remove secular acceleration for some instruments, or for all
937
+ Only remove secular acceleration for some instruments, or for all
911
938
  if `instruments=True`
912
- plot (bool):
913
- Show a plot of the RVs with the secular acceleration
914
939
  """
915
- if self._did_secular_acceleration: # don't do it twice
940
+ if self._did_secular_acceleration and not just_compute: # don't do it twice
916
941
  return
917
-
942
+
943
+ #as_yr = units.arcsec / units.year
944
+ mas_yr = units.milliarcsecond / units.year
945
+ mas = units.milliarcsecond
946
+
918
947
  try:
919
- self.simbad
920
- except AttributeError:
948
+ if force_simbad:
949
+ raise AttributeError
950
+
951
+ self.gaia
952
+ self.gaia.plx
953
+
921
954
  if self.verbose:
922
- logger.error('no information from simbad, cannot remove secular acceleration')
923
- return
955
+ logger.info('using Gaia information to remove secular acceleration')
956
+
957
+ if epoch is None:
958
+ # Gaia DR3 epoch (astropy.time.Time('J2016.0', format='jyear_str').jd)
959
+ epoch = 57389.0
960
+
961
+ π = self.gaia.plx * mas
962
+ d = π.to(units.pc, equivalencies=units.parallax())
963
+ μα = self.gaia.pmra * mas_yr
964
+ μδ = self.gaia.pmdec * mas_yr
965
+ μ = μα**2 + μδ**2
966
+ sa = (μ * d).to(units.m / units.second / units.year,
967
+ equivalencies=units.dimensionless_angles())
968
+
969
+ except AttributeError:
970
+ try:
971
+ self.simbad
972
+ except AttributeError:
973
+ if self.verbose:
974
+ logger.error('no information from simbad, cannot remove secular acceleration')
975
+ return
976
+
977
+ if self.simbad.plx is None:
978
+ if self.verbose:
979
+ logger.error('no parallax from simbad, cannot remove secular acceleration')
980
+ return
924
981
 
925
- if self.simbad.plx_value is None:
926
982
  if self.verbose:
927
- logger.error('no parallax from simbad, cannot remove secular acceleration')
928
- return
983
+ logger.info('using Simbad information to remove secular acceleration')
929
984
 
930
- #as_yr = units.arcsec / units.year
931
- mas_yr = units.milliarcsecond / units.year
932
- mas = units.milliarcsecond
985
+ if epoch is None:
986
+ epoch = 55500
987
+
988
+ π = self.simbad.plx * mas
989
+ d = π.to(units.pc, equivalencies=units.parallax())
990
+ μα = self.simbad.pmra * mas_yr
991
+ μδ = self.simbad.pmdec * mas_yr
992
+ μ = μα**2 + μδ**2
993
+ sa = (μ * d).to(units.m / units.second / units.year,
994
+ equivalencies=units.dimensionless_angles())
995
+
996
+ if just_compute:
997
+ return sa
933
998
 
934
- π = self.simbad.plx_value * mas
935
- d = π.to(units.pc, equivalencies=units.parallax())
936
- μα = self.simbad.pmra * mas_yr
937
- μδ = self.simbad.pmdec * mas_yr
938
- μ = μα**2 + μδ**2
939
- sa = (μ * d).to(units.m / units.second / units.year,
940
- equivalencies=units.dimensionless_angles())
941
999
  sa = sa.value
942
1000
 
943
1001
  if self.verbose:
@@ -961,7 +1019,7 @@ class RV:
961
1019
  continue
962
1020
 
963
1021
  s.vrad = s.vrad - sa * (s.time - epoch) / 365.25
964
-
1022
+
965
1023
  self._build_arrays()
966
1024
 
967
1025
  self._did_secular_acceleration = True
@@ -1012,7 +1070,7 @@ class RV:
1012
1070
 
1013
1071
  def clip_maxerror(self, maxerror:float):
1014
1072
  """ Mask out points with RV error larger than a given value
1015
-
1073
+
1016
1074
  Args:
1017
1075
  maxerror (float): Maximum error to keep.
1018
1076
  """
@@ -1038,10 +1096,10 @@ class RV:
1038
1096
 
1039
1097
  WARNING: This creates and returns a new object and does not modify self.
1040
1098
  """
1041
-
1099
+
1042
1100
  # create copy of self to be returned
1043
1101
  snew = deepcopy(self)
1044
-
1102
+
1045
1103
  all_bad_quantities = []
1046
1104
 
1047
1105
  for inst in snew.instruments:
@@ -1050,7 +1108,7 @@ class RV:
1050
1108
  # only one observation?
1051
1109
  if s.N == 1:
1052
1110
  continue
1053
-
1111
+
1054
1112
  # are all observations masked?
1055
1113
  if s.mtime.size == 0:
1056
1114
  continue
@@ -1101,7 +1159,7 @@ class RV:
1101
1159
  with warnings.catch_warnings():
1102
1160
  warnings.filterwarnings('ignore', category=RuntimeWarning)
1103
1161
  try:
1104
- _, yb = binRV(s.mtime, Q[s.mask],
1162
+ _, yb = binRV(s.mtime, Q[s.mask],
1105
1163
  stat=np.nanmean, tstat=np.nanmean)
1106
1164
  setattr(s, q, yb)
1107
1165
  except TypeError:
@@ -1116,7 +1174,7 @@ class RV:
1116
1174
 
1117
1175
  s.time = tb
1118
1176
  s.mask = np.full(tb.shape, True)
1119
-
1177
+
1120
1178
  if snew.verbose and len(all_bad_quantities) > 0:
1121
1179
  logger.warning('\nnew object will not have these non-float quantities')
1122
1180
 
@@ -1186,7 +1244,7 @@ class RV:
1186
1244
  # log_msg += other
1187
1245
  # if i < len(others) - 1:
1188
1246
  # log_msg += ', '
1189
-
1247
+
1190
1248
  # if self.verbose:
1191
1249
  # logger.info(log_msg)
1192
1250
 
@@ -1237,7 +1295,7 @@ class RV:
1237
1295
  self._build_arrays()
1238
1296
 
1239
1297
 
1240
- def save(self, directory=None, instrument=None, full=False,
1298
+ def save(self, directory=None, instrument=None, full=False, postfix=None,
1241
1299
  save_masked=False, save_nans=True):
1242
1300
  """ Save the observations in .rdb files.
1243
1301
 
@@ -1246,9 +1304,10 @@ class RV:
1246
1304
  Directory where to save the .rdb files.
1247
1305
  instrument (str, optional):
1248
1306
  Instrument for which to save observations.
1249
- full (bool, optional):
1250
- Whether to save just RVs and errors (False) or more indicators
1251
- (True).
1307
+ full (bool, optional):
1308
+ Save just RVs and errors (False) or more indicators (True).
1309
+ postfix (str, optional):
1310
+ Postfix to add to the filenames ([star]_[instrument]_[postfix].rdb).
1252
1311
  save_nans (bool, optional)
1253
1312
  Whether to save NaN values in the indicators, if they exist. If
1254
1313
  False, the full observation is not saved.
@@ -1301,8 +1360,11 @@ class RV:
1301
1360
  else:
1302
1361
  d = np.c_[_s.mtime, _s.mvrad, _s.msvrad]
1303
1362
  header = 'bjd\tvrad\tsvrad\n---\t----\t-----'
1304
-
1363
+
1305
1364
  file = f'{star_name}_{inst}.rdb'
1365
+ if postfix is not None:
1366
+ file = f'{star_name}_{inst}_{postfix}.rdb'
1367
+
1306
1368
  files.append(file)
1307
1369
  file = os.path.join(directory, file)
1308
1370
 
@@ -1310,7 +1372,7 @@ class RV:
1310
1372
 
1311
1373
  if self.verbose:
1312
1374
  logger.info(f'saving to {file}')
1313
-
1375
+
1314
1376
  return files
1315
1377
 
1316
1378
  def checksum(self, write_to=None):
@@ -1325,7 +1387,7 @@ class RV:
1325
1387
 
1326
1388
 
1327
1389
  #
1328
- def run_lbl(self, instrument=None, data_dir=None,
1390
+ def run_lbl(self, instrument=None, data_dir=None,
1329
1391
  skysub=False, tell=False, limit=None, **kwargs):
1330
1392
  from .lbl_wrapper import run_lbl, NIRPS_create_telluric_corrected_S2D
1331
1393
 
@@ -1339,7 +1401,7 @@ class RV:
1339
1401
  logger.error(f"No data from instrument '{instrument}'")
1340
1402
  logger.info(f'available: {self.instruments}')
1341
1403
  return
1342
-
1404
+
1343
1405
  if isinstance(instrument, str):
1344
1406
  instruments = [instrument]
1345
1407
  else:
@@ -1394,7 +1456,7 @@ class RV:
1394
1456
  logger.error(f"No data from instrument '{instrument}'")
1395
1457
  logger.info(f'available: {self.instruments}')
1396
1458
  return
1397
-
1459
+
1398
1460
  if isinstance(instrument, str):
1399
1461
  instruments = [instrument]
1400
1462
  else:
@@ -1448,4 +1510,4 @@ def fit_sine(t, y, yerr, period='gls', fix_period=False):
1448
1510
  p0 = [y.ptp(), period, 0.0, 0.0]
1449
1511
  xbest, _ = leastsq(lambda p, t, y, ye: (sine(t, p) - y) / ye,
1450
1512
  p0, args=(t, y, yerr))
1451
- return xbest, partial(sine, p=xbest)
1513
+ return xbest, partial(sine, p=xbest)
@@ -0,0 +1,21 @@
1
+ import re
2
+
3
+ STARS = {
4
+ 'Barnard': 'GJ699',
5
+ "Barnard's": 'GJ699',
6
+ }
7
+
8
+
9
+ def translate(star):
10
+ # known translations
11
+ if star in STARS:
12
+ return STARS[star]
13
+
14
+ # regex translations
15
+ NGC_match = re.match(r'NGC([\s\d]+)No([\s\d]+)', star)
16
+ if NGC_match:
17
+ cluster = NGC_match.group(1).replace(' ', '')
18
+ target = NGC_match.group(2).replace(' ', '')
19
+ return f'Cl* NGC {cluster} MMU {target}'
20
+
21
+ return star
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: arvi
3
- Version: 0.1.11
3
+ Version: 0.1.12
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -9,10 +9,12 @@ setup.py
9
9
  .github/workflows/python-publish.yml
10
10
  arvi/HZ.py
11
11
  arvi/__init__.py
12
+ arvi/ariadne_wrapper.py
12
13
  arvi/binning.py
13
14
  arvi/config.py
14
15
  arvi/dace_wrapper.py
15
16
  arvi/extra_data.py
17
+ arvi/gaia_wrapper.py
16
18
  arvi/instrument_specific.py
17
19
  arvi/lbl_wrapper.py
18
20
  arvi/nasaexo_wrapper.py
@@ -43,4 +45,5 @@ docs/logo/detective.png
43
45
  docs/logo/logo.png
44
46
  tests/test_binning.py
45
47
  tests/test_import_object.py
48
+ tests/test_simbad.py
46
49
  tests/test_stats.py
@@ -8,7 +8,7 @@ authors = [
8
8
  {name = "João Faria", email = "joao.faria@unige.ch"},
9
9
  ]
10
10
  description = "The Automated RV Inspector"
11
- version = "0.1.11"
11
+ version = "0.1.12"
12
12
  readme = {file = "README.md", content-type = "text/markdown"}
13
13
  requires-python = ">=3.8"
14
14
  keywords = ["RV", "exoplanets"]
@@ -0,0 +1,20 @@
1
+ import pytest
2
+
3
+ def test_import():
4
+ from arvi.simbad_wrapper import simbad
5
+
6
+ def test_star():
7
+ from arvi.simbad_wrapper import simbad
8
+ s = simbad('HD69830')
9
+ assert hasattr(s, 'ra')
10
+ assert hasattr(s, 'dec')
11
+ assert hasattr(s, 'coords')
12
+ assert hasattr(s, 'plx')
13
+ assert hasattr(s, 'sp_type')
14
+ assert hasattr(s, 'B')
15
+ assert hasattr(s, 'V')
16
+ assert hasattr(s, 'ids')
17
+ assert s.V == 5.95
18
+ assert s.sp_type == 'G8:V'
19
+
20
+
@@ -1,2 +0,0 @@
1
- return_self = False
2
- check_internet = False
@@ -1,10 +0,0 @@
1
- STARS = {
2
- 'Barnard': 'GJ699',
3
- "Barnard's": 'GJ699',
4
- }
5
-
6
-
7
- def translate(star):
8
- if star in STARS:
9
- return STARS[star]
10
- return star
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes