arvi 0.1.18__py3-none-any.whl → 0.1.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arvi might be problematic. Click here for more details.

arvi/__init__.py CHANGED
@@ -1,7 +1,16 @@
1
1
  __all__ = ['RV']
2
2
 
3
+ from importlib.metadata import version, PackageNotFoundError
4
+
5
+ from .config import config
3
6
  from .timeseries import RV
4
7
 
8
+ try:
9
+ __version__ = version("arvi")
10
+ except PackageNotFoundError:
11
+ # package is not installed
12
+ pass
13
+
5
14
  ## OLD
6
15
  # # the __getattr__ function is always called twice, so we need this
7
16
  # # to only build and return the RV object on the second time
arvi/ariadne_wrapper.py CHANGED
@@ -2,6 +2,9 @@ import os
2
2
  import sys
3
3
  from matplotlib import pyplot as plt
4
4
 
5
+ from .utils import stdout_disabled, all_logging_disabled
6
+ from .setup_logger import logger
7
+
5
8
  try:
6
9
  from astroARIADNE.star import Star
7
10
  from astroARIADNE.fitter import Fitter
arvi/berv.py CHANGED
@@ -12,7 +12,7 @@ from astropy.timeseries import LombScargle
12
12
  from tqdm import tqdm
13
13
 
14
14
  from .setup_logger import logger
15
- from . import config
15
+ from .config import config
16
16
 
17
17
 
18
18
  def correct_rvs(self, simple=False, H=None, save_files=False, plot=True):
@@ -349,7 +349,6 @@ def BERV(self, H=None, use_gaia_meassurements=False, plx=None,
349
349
  axs[1].plot(bjd, diff, 'k.', label=label)
350
350
  axs[1].axhline(np.mean(diff), ls='--', c='k', alpha=0.1)
351
351
 
352
- from adjustText import adjust_text
353
352
  text = axs[1].text(bjd.max(), diff.min() + 0.1*diff.ptp(),
354
353
  f'ptp: {diff.ptp()*1e2:.2f} cm/s',
355
354
  ha='right', va='bottom', color='g', alpha=0.8)
arvi/config.py CHANGED
@@ -1,14 +1,36 @@
1
- # whether to return self from (some) RV methods
2
- return_self = False
3
1
 
4
- # whether to check internet connection before querying DACE
5
- check_internet = False
2
+ def instancer(cls):
3
+ return cls()
6
4
 
7
- # make all DACE requests without using a .dacerc file
8
- request_as_public = False
5
+ @instancer
6
+ class config:
7
+ # configuration values
8
+ __conf = {
9
+ # whether to return self from (some) RV methods
10
+ 'return_self': False,
11
+ # whether to adjust instrument means before gls by default
12
+ 'adjust_means_gls': True,
13
+ # whether to check internet connection before querying DACE
14
+ 'check_internet': False,
15
+ # make all DACE requests without using a .dacerc file
16
+ 'request_as_public': False,
17
+ # username for DACE servers
18
+ 'username': 'desousaj',
19
+ # debug
20
+ 'debug': False,
21
+ }
22
+ # all, for now
23
+ __setters = list(__conf.keys())
9
24
 
10
- # whether to adjust instrument means before gls by default
11
- adjust_means_gls = True
25
+ def __getattr__(self, name):
26
+ if name in ('__custom_documentations__', ):
27
+ # return {'return_self': 'help!'}
28
+ return {}
12
29
 
13
- # debug
14
- debug = False
30
+ return self.__conf[name]
31
+
32
+ def __setattr__(self, name, value):
33
+ if name in config.__setters:
34
+ self.__conf[name] = value
35
+ else:
36
+ raise NameError(f"unknown configuration name '{name}'")
arvi/dace_wrapper.py CHANGED
@@ -10,8 +10,8 @@ from .utils import create_directory, all_logging_disabled, stdout_disabled, tqdm
10
10
 
11
11
 
12
12
  def load_spectroscopy() -> SpectroscopyClass:
13
- from .config import request_as_public
14
- if request_as_public:
13
+ from .config import config
14
+ if config.request_as_public:
15
15
  with all_logging_disabled():
16
16
  dace = DaceClass(dace_rc_config_path='none')
17
17
  return SpectroscopyClass(dace_instance=dace)
@@ -123,7 +123,7 @@ def get_observations_from_instrument(star, instrument, main_id=None):
123
123
 
124
124
  Spectroscopy = load_spectroscopy()
125
125
  filters = {
126
- "ins_name": {"contains": [instrument]},
126
+ "ins_name": {"contains": [instrument]},
127
127
  "obj_id_daceid": {"contains": [dace_id]}
128
128
  }
129
129
  with stdout_disabled(), all_logging_disabled():
@@ -251,9 +251,9 @@ def get_observations(star, instrument=None, main_id=None, verbose=True):
251
251
  # (i.e. ensure that 3.x.x > 3.5)
252
252
  from re import match
253
253
  def cmp(a, b):
254
- if a[0] in ('3.5', '3.5 EGGS') and match(r'3.\d.\d', b[0]):
254
+ if a[0] in ('3.5', '3.5 EGGS') or 'EGGS' in a[0] and match(r'3.\d.\d', b[0]):
255
255
  return -1
256
- if b[0] in ('3.5', '3.5 EGGS') and match(r'3.\d.\d', a[0]):
256
+ if b[0] in ('3.5', '3.5 EGGS') or 'EGGS' in b[0] and match(r'3.\d.\d', a[0]):
257
257
  return 1
258
258
 
259
259
  if a[0] == b[0]:
@@ -411,12 +411,12 @@ def do_download_filetype(type, raw_files, output_directory, clobber=False,
411
411
 
412
412
  if verbose:
413
413
  if chunk_size < n:
414
- msg = f"Downloading {n} {type}s "
414
+ msg = f"downloading {n} {type}s "
415
415
  msg += f"(in chunks of {chunk_size}) "
416
416
  msg += f"into '{output_directory}'..."
417
417
  logger.info(msg)
418
418
  else:
419
- msg = f"Downloading {n} {type}s into '{output_directory}'..."
419
+ msg = f"downloading {n} {type}s into '{output_directory}'..."
420
420
  logger.info(msg)
421
421
 
422
422
  iterator = [raw_files[i:i + chunk_size] for i in range(0, n, chunk_size)]
@@ -424,7 +424,7 @@ def do_download_filetype(type, raw_files, output_directory, clobber=False,
424
424
  download(files, type, output_directory)
425
425
  extract_fits(output_directory)
426
426
 
427
- logger.info('Extracted .fits files')
427
+ logger.info('extracted .fits files')
428
428
 
429
429
 
430
430
  # def do_download_s1d(raw_files, output_directory, clobber=False, verbose=True):
arvi/gaia_wrapper.py CHANGED
@@ -30,6 +30,7 @@ gaia_source.source_id = {id}
30
30
  """
31
31
 
32
32
  translate = {
33
+ 'Proxima': '5853498713190525696',
33
34
  'LS II +14 13': '4318465066420528000',
34
35
  }
35
36
 
@@ -38,7 +39,7 @@ def run_query(query):
38
39
  url = 'https://gea.esac.esa.int/tap-server/tap/sync'
39
40
  data = dict(query=query, request='doQuery', lang='ADQL', format='csv')
40
41
  try:
41
- response = requests.post(url, data=data, timeout=5)
42
+ response = requests.post(url, data=data, timeout=2)
42
43
  except requests.ReadTimeout as err:
43
44
  raise IndexError(err)
44
45
  except requests.ConnectionError as err:
@@ -85,6 +86,8 @@ class gaia:
85
86
  try:
86
87
  if star in translate:
87
88
  table = run_query(query=QUERY_ID.format(id=translate[star]))
89
+ elif hasattr(simbad, 'gaia_id'):
90
+ table = run_query(query=QUERY_ID.format(id=simbad.gaia_id))
88
91
  else:
89
92
  table = run_query(query=QUERY.format(**args))
90
93
  results = parse_csv(table)[0]
@@ -103,11 +103,10 @@ def ADC_issues(self, mask=True, plot=True, check_headers=False):
103
103
  """
104
104
  instruments = self._check_instrument('ESPRESSO')
105
105
 
106
- if len(instruments) < 1:
106
+ if instruments is None:
107
107
  if self.verbose:
108
- logger.error(f"no data from ESPRESSO")
109
- logger.info(f'available: {self.instruments}')
110
-
108
+ logger.error(f"ADC_issues: no data from ESPRESSO")
109
+ return
111
110
 
112
111
  affected_file_roots = ESPRESSO_ADC_issues()
113
112
  file_roots = [os.path.basename(f).replace('.fits', '') for f in self.raw_file]
@@ -149,10 +148,10 @@ def blue_cryostat_issues(self, mask=True, plot=True):
149
148
  """
150
149
  instruments = self._check_instrument('ESPRESSO')
151
150
 
152
- if len(instruments) < 1:
151
+ if instruments is None:
153
152
  if self.verbose:
154
- logger.error(f"no data from ESPRESSO")
155
- logger.info(f'available: {self.instruments}')
153
+ logger.error(f"blue_cryostat_issues: no data from ESPRESSO")
154
+ return
156
155
 
157
156
  affected_file_roots = ESPRESSO_cryostat_issues()
158
157
  file_roots = [os.path.basename(f).replace('.fits', '') for f in self.raw_file]
@@ -230,16 +229,18 @@ def known_issues(self, mask=True, plot=False, **kwargs):
230
229
  """
231
230
  try:
232
231
  adc = ADC_issues(self, mask, plot, **kwargs)
233
- except IndexError as e:
232
+ except IndexError:
234
233
  # logger.error(e)
235
234
  logger.error('are the data binned? cannot proceed to mask these points...')
236
235
 
237
236
  try:
238
237
  cryostat = blue_cryostat_issues(self, mask, plot)
239
- except IndexError as e:
238
+ except IndexError:
240
239
  # logger.error(e)
241
240
  logger.error('are the data binned? cannot proceed to mask these points...')
242
241
 
242
+ if adc is None and cryostat is None:
243
+ return
243
244
  try:
244
245
  return adc | cryostat
245
246
  except UnboundLocalError:
arvi/kima_wrapper.py ADDED
@@ -0,0 +1,74 @@
1
+ import os
2
+ import numpy as np
3
+
4
+ from .setup_logger import logger
5
+
6
+ try:
7
+ import kima
8
+ from kima.pykima.utils import chdir
9
+ from kima import distributions
10
+ from kima import RVData, RVmodel
11
+ kima_available = True
12
+ except ImportError:
13
+ kima_available = False
14
+
15
+
16
+ def try_to_guess_prior(model, prior):
17
+ if 'jitter' in prior:
18
+ return 'Jprior'
19
+ if 'vsys' in prior:
20
+ return 'Cprior'
21
+ return None
22
+
23
+
24
+ def run_kima(self, run=False, load=False, run_directory=None, priors={}, **kwargs):
25
+ if not kima_available:
26
+ raise ImportError('kima not available, please install with `pip install kima`')
27
+
28
+ time = [getattr(self, inst).mtime for inst in self.instruments]
29
+ vrad = [getattr(self, inst).mvrad for inst in self.instruments]
30
+ err = [getattr(self, inst).msvrad for inst in self.instruments]
31
+ data = RVData(time, vrad, err, instruments=self.instruments)
32
+
33
+ fix = kwargs.pop('fix', False)
34
+ npmax = kwargs.pop('npmax', 1)
35
+ model = RVmodel(fix=fix, npmax=npmax, data=data)
36
+
37
+ model.trend = kwargs.pop('trend', False)
38
+ model.degree = kwargs.pop('degree', 0)
39
+
40
+ model.studentt = kwargs.pop('studentt', False)
41
+ model.enforce_stability = kwargs.pop('enforce_stability', False)
42
+ model.star_mass = kwargs.pop('star_mass', 1.0)
43
+
44
+ for k, v in priors.items():
45
+ try:
46
+ if 'conditional' in k:
47
+ setattr(model.conditional, k.replace('conditional.', ''), v)
48
+ else:
49
+ setattr(model, k, v)
50
+
51
+ except AttributeError:
52
+ msg = f'`RVmodel` has no attribute `{k}`, '
53
+ if guess := try_to_guess_prior(model, k):
54
+ msg += f'did you mean `{guess}`?'
55
+ logger.warning(msg)
56
+ return
57
+
58
+ if run:
59
+ if run_directory is None:
60
+ run_directory = os.getcwd()
61
+
62
+ # TODO: use signature of kima.run to pop the correct kwargs
63
+ # model_name = model.__class__.__name__
64
+ # model_name = f'kima.{model_name}.{model_name}'
65
+ # signature, defaults = [sig for sig in kima.run.__nb_signature__ if model_name in sig[0]]
66
+
67
+ with chdir(run_directory):
68
+ kima.run(model, **kwargs)
69
+
70
+ if load:
71
+ res = kima.load_results(model)
72
+ return data, model, res
73
+
74
+ return data, model
arvi/reports.py CHANGED
@@ -27,6 +27,29 @@ def sine_picker(event, self, fig, ax, ax1):
27
27
  fig.canvas.draw_idle()
28
28
 
29
29
 
30
+ def summary(self, add_ccf_mask=True, add_prog_id=False):
31
+ from .utils import pretty_print_table
32
+ rows = []
33
+ rows.append([self.star] + [''] * len(self.instruments))
34
+ rows.append([''] + self.instruments)
35
+ rows.append(['N'] + list(self.NN.values()))
36
+
37
+ if add_ccf_mask:
38
+ row = ['CCF mask']
39
+ for inst in self.instruments:
40
+ row.append(', '.join(np.unique(getattr(self, inst).ccf_mask)))
41
+ rows.append(row)
42
+
43
+ if add_prog_id:
44
+ row = ['prog ID']
45
+ for inst in self.instruments:
46
+ p = ', '.join(np.unique(getattr(self, inst).prog_id))
47
+ row.append(p)
48
+ rows.append(row)
49
+
50
+ pretty_print_table(rows)
51
+
52
+
30
53
  def report(self, save=None):
31
54
  import matplotlib.pyplot as plt
32
55
  import matplotlib.gridspec as gridspec
arvi/simbad_wrapper.py CHANGED
@@ -3,6 +3,8 @@ import requests
3
3
 
4
4
  import pysweetcat
5
5
 
6
+ from .translations import translate
7
+
6
8
  DATA_PATH = os.path.dirname(__file__)
7
9
  DATA_PATH = os.path.join(DATA_PATH, 'data')
8
10
 
@@ -98,7 +100,7 @@ class simbad:
98
100
  """
99
101
  from astropy.coordinates import SkyCoord
100
102
 
101
- self.star = star
103
+ self.star = translate(star, ngc=True, ic=True)
102
104
 
103
105
  if 'kobe' in self.star.lower():
104
106
  fname = os.path.join(DATA_PATH, 'KOBE-translate.csv')
@@ -117,13 +119,13 @@ class simbad:
117
119
  # self.oid = str(oid.split()[-1])
118
120
 
119
121
  try:
120
- table1 = run_query(query=QUERY.format(star=star))
122
+ table1 = run_query(query=QUERY.format(star=self.star))
121
123
  cols, values = parse_table(table1)
122
124
 
123
- table2 = run_query(query=BV_QUERY.format(star=star))
125
+ table2 = run_query(query=BV_QUERY.format(star=self.star))
124
126
  cols, values = parse_table(table2, cols, values)
125
127
 
126
- table3 = run_query(query=IDS_QUERY.format(star=star))
128
+ table3 = run_query(query=IDS_QUERY.format(star=self.star))
127
129
  line = table3.splitlines()[2]
128
130
  self.ids = line.replace('"', '').replace(' ', ' ').replace(' ', ' ').replace(' ', ' ').split('|')
129
131
  except IndexError: