arvi 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arvi might be problematic. Click here for more details.

arvi/timeseries.py CHANGED
@@ -11,16 +11,16 @@ import numpy as np
11
11
  from astropy import units
12
12
 
13
13
  from .setup_logger import logger
14
- from .config import return_self
14
+ from .config import return_self, check_internet
15
15
  from .translations import translate
16
- from .dace_wrapper import get_observations, get_arrays
17
- from .dace_wrapper import do_download_ccf, do_download_s1d, do_download_s2d
16
+ from .dace_wrapper import do_download_filetype, get_observations, get_arrays
18
17
  from .simbad_wrapper import simbad
18
+ from .gaia_wrapper import gaia
19
19
  from .extra_data import get_extra_data
20
20
  from .stats import wmean, wrms
21
21
  from .binning import bin_ccf_mask, binRV
22
22
  from .HZ import getHZ_period
23
- from .utils import strtobool
23
+ from .utils import strtobool, there_is_internet
24
24
 
25
25
 
26
26
  @dataclass
@@ -50,7 +50,7 @@ class RV:
50
50
  do_sigma_clip: bool = field(init=True, repr=False, default=False)
51
51
  do_adjust_means: bool = field(init=True, repr=False, default=True)
52
52
  only_latest_pipeline: bool = field(init=True, repr=False, default=True)
53
- load_extra_data: Union[bool, str] = field(init=True, repr=False, default=True)
53
+ load_extra_data: Union[bool, str] = field(init=True, repr=False, default=False)
54
54
  #
55
55
  _child: bool = field(init=True, repr=False, default=False)
56
56
  _did_secular_acceleration: bool = field(init=False, repr=False, default=False)
@@ -71,6 +71,9 @@ class RV:
71
71
  self.__star__ = translate(self.star)
72
72
 
73
73
  if not self._child:
74
+ if check_internet and not there_is_internet():
75
+ raise ConnectionError('There is no internet connection?')
76
+
74
77
  # complicated way to query Simbad with self.__star__ or, if that
75
78
  # fails, try after removing a trailing 'A'
76
79
  for target in (self.__star__, self.__star__.replace('A', '')):
@@ -83,11 +86,23 @@ class RV:
83
86
  if self.verbose:
84
87
  logger.error(f'simbad query for {self.__star__} failed')
85
88
 
89
+ # complicated way to query Gaia with self.__star__ or, if that
90
+ # fails, try after removing a trailing 'A'
91
+ for target in (self.__star__, self.__star__.replace('A', '')):
92
+ try:
93
+ self.gaia = gaia(target)
94
+ break
95
+ except ValueError:
96
+ continue
97
+ else:
98
+ if self.verbose:
99
+ logger.error(f'Gaia query for {self.__star__} failed')
100
+
86
101
  # query DACE
87
102
  if self.verbose:
88
103
  logger.info(f'querying DACE for {self.__star__}...')
89
104
  try:
90
- self.dace_result = get_observations(self.__star__, self.instrument,
105
+ self.dace_result = get_observations(self.__star__, self.instrument,
91
106
  verbose=self.verbose)
92
107
  except ValueError as e:
93
108
  # querying DACE failed, should we raise an error?
@@ -226,6 +241,8 @@ class RV:
226
241
  @property
227
242
  def N_nights(self) -> int:
228
243
  """ Number of individual nights """
244
+ if self.mtime.size == 0:
245
+ return 0
229
246
  return binRV(self.mtime, None, None, binning_bins=True).size - 1
230
247
 
231
248
  @property
@@ -245,7 +262,7 @@ class RV:
245
262
  """ Masked array of times """
246
263
  return self.time[self.mask]
247
264
 
248
- @property
265
+ @property
249
266
  def mvrad(self) -> np.ndarray:
250
267
  """ Masked array of radial velocities """
251
268
  return self.vrad[self.mask]
@@ -371,7 +388,7 @@ class RV:
371
388
  star, timestamp = file.replace('.pkl', '').split('_')
372
389
  else:
373
390
  try:
374
- file = sorted(glob(f'{star}_*.pkl'))[-1]
391
+ file = sorted(glob(f'{star}_*.*.pkl'))[-1]
375
392
  except IndexError:
376
393
  raise ValueError(f'cannot find any file matching {star}_*.pkl')
377
394
  star, timestamp = file.replace('.pkl', '').split('_')
@@ -402,11 +419,11 @@ class RV:
402
419
  files = [files]
403
420
 
404
421
  if star is None:
405
- star_ = np.unique([os.path.splitext(f)[0].split('_')[0] for f in files])
422
+ star_ = np.unique([os.path.splitext(os.path.basename(f))[0].split('_')[0] for f in files])
406
423
  if star_.size == 1:
407
424
  logger.info(f'assuming star is {star_[0]}')
408
425
  star = star_[0]
409
-
426
+
410
427
  if instrument is None:
411
428
  instruments = np.array([os.path.splitext(f)[0].split('_')[1] for f in files])
412
429
  logger.info(f'assuming instruments: {instruments}')
@@ -439,12 +456,12 @@ class RV:
439
456
  names = header.split()
440
457
 
441
458
  if len(names) > 3:
442
- kw = dict(skip_header=2, dtype=None, encoding=None)
443
- try:
444
- data = np.genfromtxt(f, **kw)
445
- except ValueError:
459
+ kw = dict(skip_header=0, comments='--', names=True, dtype=None, encoding=None)
460
+ if '\t' in header:
446
461
  data = np.genfromtxt(f, **kw, delimiter='\t')
447
- data.dtype.names = names
462
+ else:
463
+ data = np.genfromtxt(f, **kw)
464
+ # data.dtype.names = names
448
465
  else:
449
466
  data = np.array([], dtype=np.dtype([]))
450
467
 
@@ -463,8 +480,10 @@ class RV:
463
480
 
464
481
  if 'rhk' in data.dtype.fields:
465
482
  _s.rhk = data['rhk']
466
- if 'srhk' in data.dtype.fields:
467
- _s.rhk_err = data['srhk']
483
+ _s.rhk_err = np.full_like(time, np.nan)
484
+ for possible_name in ['srhk', 'rhk_err']:
485
+ if possible_name in data.dtype.fields:
486
+ _s.rhk_err = data[possible_name]
468
487
  else:
469
488
  _s.rhk = np.zeros_like(time)
470
489
  _s.rhk_err = np.full_like(time, np.nan)
@@ -490,7 +509,7 @@ class RV:
490
509
  for q in ['drs_qc']:
491
510
  setattr(_s, q, np.full(time.size, True))
492
511
  _quantities.append(q)
493
-
512
+
494
513
  #! end hack
495
514
 
496
515
  _s.mask = np.ones_like(time, dtype=bool)
@@ -517,10 +536,10 @@ class RV:
517
536
  except ImportError:
518
537
  logger.error('iCCF is not installed. Please install it with `pip install iCCF`')
519
538
  return
520
-
539
+
521
540
  if isinstance(files, str):
522
541
  files = [files]
523
-
542
+
524
543
  I = iCCF.from_file(files)
525
544
 
526
545
  objects = np.unique([i.HDU[0].header['OBJECT'].replace(' ', '') for i in I])
@@ -624,11 +643,12 @@ class RV:
624
643
  setattr(self, q, arr)
625
644
 
626
645
 
627
- def download_ccf(self, instrument=None, limit=None, directory=None):
646
+ def download_ccf(self, instrument=None, index=None, limit=None, directory=None, **kwargs):
628
647
  """ Download CCFs from DACE
629
648
 
630
649
  Args:
631
650
  instrument (str): Specific instrument for which to download data
651
+ index (int): Specific index of point for which to download data (0-based)
632
652
  limit (int): Maximum number of files to download.
633
653
  directory (str): Directory where to store data.
634
654
  """
@@ -638,18 +658,27 @@ class RV:
638
658
  if instrument is None:
639
659
  files = [file for file in self.raw_file if file.endswith('.fits')]
640
660
  else:
641
- instrument = self._check_instrument(instrument)
661
+ strict = kwargs.pop('strict', False)
662
+ instrument = self._check_instrument(instrument, strict=strict)
642
663
  files = []
643
664
  for inst in instrument:
644
665
  files += list(getattr(self, inst).raw_file)
645
666
 
646
- do_download_ccf(files[:limit], directory)
667
+ if index is not None:
668
+ index = np.atleast_1d(index)
669
+ files = list(np.array(files)[index])
670
+
671
+ # remove empty strings
672
+ files = list(filter(None, files))
673
+
674
+ do_download_filetype('CCF', files[:limit], directory, **kwargs)
647
675
 
648
- def download_s1d(self, instrument=None, limit=None, directory=None):
676
+ def download_s1d(self, instrument=None, index=None, limit=None, directory=None, **kwargs):
649
677
  """ Download S1Ds from DACE
650
678
 
651
679
  Args:
652
680
  instrument (str): Specific instrument for which to download data
681
+ index (int): Specific index of point for which to download data (0-based)
653
682
  limit (int): Maximum number of files to download.
654
683
  directory (str): Directory where to store data.
655
684
  """
@@ -659,18 +688,27 @@ class RV:
659
688
  if instrument is None:
660
689
  files = [file for file in self.raw_file if file.endswith('.fits')]
661
690
  else:
662
- instrument = self._check_instrument(instrument)
691
+ strict = kwargs.pop('strict', False)
692
+ instrument = self._check_instrument(instrument, strict=strict)
663
693
  files = []
664
694
  for inst in instrument:
665
695
  files += list(getattr(self, inst).raw_file)
666
696
 
667
- do_download_s1d(files[:limit], directory)
697
+ if index is not None:
698
+ index = np.atleast_1d(index)
699
+ files = list(np.array(files)[index])
668
700
 
669
- def download_s2d(self, instrument=None, limit=None, directory=None):
701
+ # remove empty strings
702
+ files = list(filter(None, files))
703
+
704
+ do_download_filetype('S1D', files[:limit], directory, **kwargs)
705
+
706
+ def download_s2d(self, instrument=None, index=None, limit=None, directory=None, **kwargs):
670
707
  """ Download S2Ds from DACE
671
708
 
672
709
  Args:
673
710
  instrument (str): Specific instrument for which to download data
711
+ index (int): Specific index of point for which to download data (0-based)
674
712
  limit (int): Maximum number of files to download.
675
713
  directory (str): Directory where to store data.
676
714
  """
@@ -680,12 +718,20 @@ class RV:
680
718
  if instrument is None:
681
719
  files = [file for file in self.raw_file if file.endswith('.fits')]
682
720
  else:
683
- instrument = self._check_instrument(instrument)
721
+ strict = kwargs.pop('strict', False)
722
+ instrument = self._check_instrument(instrument, strict=strict)
684
723
  files = []
685
724
  for inst in instrument:
686
725
  files += list(getattr(self, inst).raw_file)
687
726
 
688
- extracted_files = do_download_s2d(files[:limit], directory)
727
+ if index is not None:
728
+ index = np.atleast_1d(index)
729
+ files = list(np.array(files)[index])
730
+
731
+ # remove empty strings
732
+ files = list(filter(None, files))
733
+
734
+ do_download_filetype('S2D', files[:limit], directory, **kwargs)
689
735
 
690
736
 
691
737
  from .plots import plot, plot_fwhm, plot_bis, plot_rhk, plot_quantity
@@ -697,13 +743,13 @@ class RV:
697
743
 
698
744
  def remove_instrument(self, instrument, strict=False):
699
745
  """ Remove all observations from one instrument
700
-
746
+
701
747
  Args:
702
748
  instrument (str or list):
703
749
  The instrument(s) for which to remove observations.
704
750
  strict (bool):
705
751
  Whether to match (each) `instrument` exactly
706
-
752
+
707
753
  Note:
708
754
  A common name can be used to remove observations for several subsets
709
755
  of a given instrument. For example
@@ -756,9 +802,24 @@ class RV:
756
802
  if return_self:
757
803
  return self
758
804
 
805
+ def remove_condition(self, condition):
806
+ """ Remove all observations that satisfy a condition
807
+
808
+ Args:
809
+ condition (np.ndarray):
810
+ Boolean array of the same length as the observations
811
+ """
812
+ if self.verbose:
813
+ inst = np.unique(self.instrument_array[condition])
814
+ logger.info(f"Removing {condition.sum()} points from instruments {inst}")
815
+ self.mask = self.mask & ~condition
816
+ self._propagate_mask_changes()
817
+
759
818
  def remove_point(self, index):
760
- """ Remove individual observations at a given index (or indices)
761
-
819
+ """
820
+ Remove individual observations at a given index (or indices).
821
+ NOTE: Like Python, the index is 0-based.
822
+
762
823
  Args:
763
824
  index (int, list, ndarray):
764
825
  Single index, list, or array of indices to remove.
@@ -865,45 +926,76 @@ class RV:
865
926
  n_before = (self.obs < self.obs[m]).sum()
866
927
  getattr(self, inst).mask[m - n_before] = False
867
928
 
868
- def secular_acceleration(self, epoch=55500, plot=False):
929
+ def secular_acceleration(self, epoch=None, just_compute=False, force_simbad=False):
869
930
  """
870
931
  Remove secular acceleration from RVs
871
932
 
872
933
  Args:
873
- epoch (float):
934
+ epoch (float, optional):
874
935
  The reference epoch (DACE uses 55500, 31/10/2010)
875
936
  instruments (bool or collection of str):
876
- Only remove secular acceleration for some instruments, or for all
937
+ Only remove secular acceleration for some instruments, or for all
877
938
  if `instruments=True`
878
- plot (bool):
879
- Show a plot of the RVs with the secular acceleration
880
939
  """
881
- if self._did_secular_acceleration: # don't do it twice
940
+ if self._did_secular_acceleration and not just_compute: # don't do it twice
882
941
  return
883
-
942
+
943
+ #as_yr = units.arcsec / units.year
944
+ mas_yr = units.milliarcsecond / units.year
945
+ mas = units.milliarcsecond
946
+
884
947
  try:
885
- self.simbad
886
- except AttributeError:
948
+ if force_simbad:
949
+ raise AttributeError
950
+
951
+ self.gaia
952
+ self.gaia.plx
953
+
887
954
  if self.verbose:
888
- logger.error('no information from simbad, cannot remove secular acceleration')
889
- return
955
+ logger.info('using Gaia information to remove secular acceleration')
956
+
957
+ if epoch is None:
958
+ # Gaia DR3 epoch (astropy.time.Time('J2016.0', format='jyear_str').jd)
959
+ epoch = 57389.0
960
+
961
+ π = self.gaia.plx * mas
962
+ d = π.to(units.pc, equivalencies=units.parallax())
963
+ μα = self.gaia.pmra * mas_yr
964
+ μδ = self.gaia.pmdec * mas_yr
965
+ μ = μα**2 + μδ**2
966
+ sa = (μ * d).to(units.m / units.second / units.year,
967
+ equivalencies=units.dimensionless_angles())
968
+
969
+ except AttributeError:
970
+ try:
971
+ self.simbad
972
+ except AttributeError:
973
+ if self.verbose:
974
+ logger.error('no information from simbad, cannot remove secular acceleration')
975
+ return
976
+
977
+ if self.simbad.plx is None:
978
+ if self.verbose:
979
+ logger.error('no parallax from simbad, cannot remove secular acceleration')
980
+ return
890
981
 
891
- if self.simbad.plx_value is None:
892
982
  if self.verbose:
893
- logger.error('no parallax from simbad, cannot remove secular acceleration')
894
- return
983
+ logger.info('using Simbad information to remove secular acceleration')
895
984
 
896
- #as_yr = units.arcsec / units.year
897
- mas_yr = units.milliarcsecond / units.year
898
- mas = units.milliarcsecond
985
+ if epoch is None:
986
+ epoch = 55500
987
+
988
+ π = self.simbad.plx * mas
989
+ d = π.to(units.pc, equivalencies=units.parallax())
990
+ μα = self.simbad.pmra * mas_yr
991
+ μδ = self.simbad.pmdec * mas_yr
992
+ μ = μα**2 + μδ**2
993
+ sa = (μ * d).to(units.m / units.second / units.year,
994
+ equivalencies=units.dimensionless_angles())
995
+
996
+ if just_compute:
997
+ return sa
899
998
 
900
- π = self.simbad.plx_value * mas
901
- d = π.to(units.pc, equivalencies=units.parallax())
902
- μα = self.simbad.pmra * mas_yr
903
- μδ = self.simbad.pmdec * mas_yr
904
- μ = μα**2 + μδ**2
905
- sa = (μ * d).to(units.m / units.second / units.year,
906
- equivalencies=units.dimensionless_angles())
907
999
  sa = sa.value
908
1000
 
909
1001
  if self.verbose:
@@ -927,7 +1019,7 @@ class RV:
927
1019
  continue
928
1020
 
929
1021
  s.vrad = s.vrad - sa * (s.time - epoch) / 365.25
930
-
1022
+
931
1023
  self._build_arrays()
932
1024
 
933
1025
  self._did_secular_acceleration = True
@@ -978,7 +1070,7 @@ class RV:
978
1070
 
979
1071
  def clip_maxerror(self, maxerror:float):
980
1072
  """ Mask out points with RV error larger than a given value
981
-
1073
+
982
1074
  Args:
983
1075
  maxerror (float): Maximum error to keep.
984
1076
  """
@@ -1004,10 +1096,10 @@ class RV:
1004
1096
 
1005
1097
  WARNING: This creates and returns a new object and does not modify self.
1006
1098
  """
1007
-
1099
+
1008
1100
  # create copy of self to be returned
1009
1101
  snew = deepcopy(self)
1010
-
1102
+
1011
1103
  all_bad_quantities = []
1012
1104
 
1013
1105
  for inst in snew.instruments:
@@ -1016,7 +1108,7 @@ class RV:
1016
1108
  # only one observation?
1017
1109
  if s.N == 1:
1018
1110
  continue
1019
-
1111
+
1020
1112
  # are all observations masked?
1021
1113
  if s.mtime.size == 0:
1022
1114
  continue
@@ -1067,7 +1159,7 @@ class RV:
1067
1159
  with warnings.catch_warnings():
1068
1160
  warnings.filterwarnings('ignore', category=RuntimeWarning)
1069
1161
  try:
1070
- _, yb = binRV(s.mtime, Q[s.mask],
1162
+ _, yb = binRV(s.mtime, Q[s.mask],
1071
1163
  stat=np.nanmean, tstat=np.nanmean)
1072
1164
  setattr(s, q, yb)
1073
1165
  except TypeError:
@@ -1082,7 +1174,7 @@ class RV:
1082
1174
 
1083
1175
  s.time = tb
1084
1176
  s.mask = np.full(tb.shape, True)
1085
-
1177
+
1086
1178
  if snew.verbose and len(all_bad_quantities) > 0:
1087
1179
  logger.warning('\nnew object will not have these non-float quantities')
1088
1180
 
@@ -1152,7 +1244,7 @@ class RV:
1152
1244
  # log_msg += other
1153
1245
  # if i < len(others) - 1:
1154
1246
  # log_msg += ', '
1155
-
1247
+
1156
1248
  # if self.verbose:
1157
1249
  # logger.info(log_msg)
1158
1250
 
@@ -1203,7 +1295,8 @@ class RV:
1203
1295
  self._build_arrays()
1204
1296
 
1205
1297
 
1206
- def save(self, directory=None, instrument=None, full=False, save_nans=True):
1298
+ def save(self, directory=None, instrument=None, full=False, postfix=None,
1299
+ save_masked=False, save_nans=True):
1207
1300
  """ Save the observations in .rdb files.
1208
1301
 
1209
1302
  Args:
@@ -1211,9 +1304,10 @@ class RV:
1211
1304
  Directory where to save the .rdb files.
1212
1305
  instrument (str, optional):
1213
1306
  Instrument for which to save observations.
1214
- full (bool, optional):
1215
- Whether to save just RVs and errors (False) or more indicators
1216
- (True).
1307
+ full (bool, optional):
1308
+ Save just RVs and errors (False) or more indicators (True).
1309
+ postfix (str, optional):
1310
+ Postfix to add to the filenames ([star]_[instrument]_[postfix].rdb).
1217
1311
  save_nans (bool, optional)
1218
1312
  Whether to save NaN values in the indicators, if they exist. If
1219
1313
  False, the full observation is not saved.
@@ -1238,11 +1332,18 @@ class RV:
1238
1332
  continue
1239
1333
 
1240
1334
  if full:
1241
- d = np.c_[
1242
- _s.mtime, _s.mvrad, _s.msvrad,
1243
- _s.fwhm[_s.mask], _s.fwhm_err[_s.mask],
1244
- _s.rhk[_s.mask], _s.rhk_err[_s.mask],
1245
- ]
1335
+ if save_masked:
1336
+ d = np.c_[
1337
+ _s.time, _s.vrad, _s.svrad,
1338
+ _s.fwhm, _s.fwhm_err,
1339
+ _s.rhk, _s.rhk_err,
1340
+ ]
1341
+ else:
1342
+ d = np.c_[
1343
+ _s.mtime, _s.mvrad, _s.msvrad,
1344
+ _s.fwhm[_s.mask], _s.fwhm_err[_s.mask],
1345
+ _s.rhk[_s.mask], _s.rhk_err[_s.mask],
1346
+ ]
1246
1347
  if not save_nans:
1247
1348
  if np.isnan(d).any():
1248
1349
  # remove observations where any of the indicators are # NaN
@@ -1254,10 +1355,16 @@ class RV:
1254
1355
  header = 'bjd\tvrad\tsvrad\tfwhm\tsfwhm\trhk\tsrhk\n'
1255
1356
  header += '---\t----\t-----\t----\t-----\t---\t----'
1256
1357
  else:
1257
- d = np.c_[_s.mtime, _s.mvrad, _s.msvrad]
1358
+ if save_masked:
1359
+ d = np.c_[_s.time, _s.vrad, _s.svrad]
1360
+ else:
1361
+ d = np.c_[_s.mtime, _s.mvrad, _s.msvrad]
1258
1362
  header = 'bjd\tvrad\tsvrad\n---\t----\t-----'
1259
-
1363
+
1260
1364
  file = f'{star_name}_{inst}.rdb'
1365
+ if postfix is not None:
1366
+ file = f'{star_name}_{inst}_{postfix}.rdb'
1367
+
1261
1368
  files.append(file)
1262
1369
  file = os.path.join(directory, file)
1263
1370
 
@@ -1265,7 +1372,7 @@ class RV:
1265
1372
 
1266
1373
  if self.verbose:
1267
1374
  logger.info(f'saving to {file}')
1268
-
1375
+
1269
1376
  return files
1270
1377
 
1271
1378
  def checksum(self, write_to=None):
@@ -1280,7 +1387,7 @@ class RV:
1280
1387
 
1281
1388
 
1282
1389
  #
1283
- def run_lbl(self, instrument=None, data_dir=None,
1390
+ def run_lbl(self, instrument=None, data_dir=None,
1284
1391
  skysub=False, tell=False, limit=None, **kwargs):
1285
1392
  from .lbl_wrapper import run_lbl, NIRPS_create_telluric_corrected_S2D
1286
1393
 
@@ -1294,7 +1401,7 @@ class RV:
1294
1401
  logger.error(f"No data from instrument '{instrument}'")
1295
1402
  logger.info(f'available: {self.instruments}')
1296
1403
  return
1297
-
1404
+
1298
1405
  if isinstance(instrument, str):
1299
1406
  instruments = [instrument]
1300
1407
  else:
@@ -1349,7 +1456,7 @@ class RV:
1349
1456
  logger.error(f"No data from instrument '{instrument}'")
1350
1457
  logger.info(f'available: {self.instruments}')
1351
1458
  return
1352
-
1459
+
1353
1460
  if isinstance(instrument, str):
1354
1461
  instruments = [instrument]
1355
1462
  else:
@@ -1403,4 +1510,4 @@ def fit_sine(t, y, yerr, period='gls', fix_period=False):
1403
1510
  p0 = [y.ptp(), period, 0.0, 0.0]
1404
1511
  xbest, _ = leastsq(lambda p, t, y, ye: (sine(t, p) - y) / ye,
1405
1512
  p0, args=(t, y, yerr))
1406
- return xbest, partial(sine, p=xbest)
1513
+ return xbest, partial(sine, p=xbest)
arvi/translations.py CHANGED
@@ -1,3 +1,5 @@
1
+ import re
2
+
1
3
  STARS = {
2
4
  'Barnard': 'GJ699',
3
5
  "Barnard's": 'GJ699',
@@ -5,6 +7,15 @@ STARS = {
5
7
 
6
8
 
7
9
  def translate(star):
10
+ # known translations
8
11
  if star in STARS:
9
12
  return STARS[star]
13
+
14
+ # regex translations
15
+ NGC_match = re.match(r'NGC([\s\d]+)No([\s\d]+)', star)
16
+ if NGC_match:
17
+ cluster = NGC_match.group(1).replace(' ', '')
18
+ target = NGC_match.group(2).replace(' ', '')
19
+ return f'Cl* NGC {cluster} MMU {target}'
20
+
10
21
  return star
arvi/utils.py CHANGED
@@ -13,6 +13,12 @@ import logging
13
13
  from glob import glob
14
14
  import numpy as np
15
15
 
16
+ try:
17
+ from tqdm import tqdm, trange
18
+ except ImportError:
19
+ tqdm = lambda x, *args, **kwargs: x
20
+ trange = lambda *args, **kwargs: range(*args, **kwargs)
21
+
16
22
 
17
23
  def create_directory(directory):
18
24
  """ Create a directory if it does not exist """
@@ -70,6 +76,14 @@ def strtobool(val):
70
76
  else:
71
77
  raise ValueError("invalid truth value {!r}".format(val))
72
78
 
79
+ def there_is_internet(timeout=1):
80
+ from socket import create_connection
81
+ try:
82
+ create_connection(('8.8.8.8', 53), timeout=timeout)
83
+ return True
84
+ except OSError:
85
+ pass
86
+ return False
73
87
 
74
88
  def find_data_file(file):
75
89
  here = os.path.dirname(os.path.abspath(__file__))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: arvi
3
- Version: 0.1.10
3
+ Version: 0.1.12
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -0,0 +1,31 @@
1
+ arvi/HZ.py,sha256=btqDFTxHeQlmvXOQKrjOpP8YFtmFjGhSZwsYYehLOaM,2885
2
+ arvi/__init__.py,sha256=cECMjLbPiolmft2lsnNoszU6_1a-dKOVY8cb2D1CBfg,366
3
+ arvi/ariadne_wrapper.py,sha256=jv8Wl35LfHl1UH1EklbvxHcQHqaEDhRGNogSYjFt7Y4,2141
4
+ arvi/binning.py,sha256=G1nZvH2ev02quDPZCReygLkH7tX6B3hFAOoOYTfhLec,15249
5
+ arvi/config.py,sha256=7ZoKzTwhIrlueYV1OF3q_iirAdZC0WheFUmXhl9wR4s,232
6
+ arvi/dace_wrapper.py,sha256=CSq21VDt5n_Ln_A6jvxLVwo2TS25C_M00MfvH38uA8I,10621
7
+ arvi/extra_data.py,sha256=WEEaYeLh52Zdv0uyHO72Ys5MWS3naTAP4wJV2BJ1mbk,2551
8
+ arvi/gaia_wrapper.py,sha256=dv-uKWIsHbW-CPI14Fn1xIngsZrV7YBROVamgH0SULc,3418
9
+ arvi/instrument_specific.py,sha256=gDNu6xM0nL3AItAxipTdg3dIGaGGi7tNWE5nnad5MxM,2873
10
+ arvi/lbl_wrapper.py,sha256=_ViGVkpakvuBR_xhu9XJRV5EKHpj5Go6jBZGJZMIS2Y,11850
11
+ arvi/nasaexo_wrapper.py,sha256=xrdF4KKXmlZAqu4IQtdcqfOeUXJXoui5M-eZpnVXNlk,7311
12
+ arvi/plots.py,sha256=J1FR-7NgWL0zdjtD0Sumj6ZHLXSRC_EtnpXkTguxQ-s,17549
13
+ arvi/programs.py,sha256=WpFE2cXYG-4ax2xmih0fFhvQbcVhnOEofVcwjePNmKQ,4505
14
+ arvi/reports.py,sha256=yrdajC-zz5_kH1Ucc6cU70DK-5dpG0Xyeru-EITKpNo,3355
15
+ arvi/setup_logger.py,sha256=pBzaRTn0hntozjbaRVx0JIbWGuENkvYUApa6uB-FsRo,279
16
+ arvi/simbad_wrapper.py,sha256=7sqaVHDKtqLDxhcTzoa0EUcOAPGLEtoqkZz_XQS9eKw,5504
17
+ arvi/stats.py,sha256=MQiyLvdiAFxIC29uajTy8kuxD-b2Y6mraL4AfWkRJkM,2576
18
+ arvi/timeseries.py,sha256=LjMiht2LVi17s40aBTrNznANKpKfBxQ7b8uoi_vdZ8A,54371
19
+ arvi/translations.py,sha256=AljjMsHRqzfcirdeavHsyVJFLvwKrM1wDLMv2RApG58,446
20
+ arvi/utils.py,sha256=ay3GouZYqn66zF43KgSs8Ngsfb4YInKZEmk70YqVDoA,3676
21
+ arvi/data/info.svg,sha256=0IMI6W-eFoTD8acnury79WJJakpBwLa4qKS4JWpsXiI,489
22
+ arvi/data/obs_affected_ADC_issues.dat,sha256=pbwzvRBjS8KsLrKi1Cdgfihvo3hK3FUN8Csdysw-vAw,10653
23
+ arvi/data/obs_affected_blue_cryostat_issues.dat,sha256=z4AK17xfz8tGTDv1FjRvQFnio4XA6PNNfDXuicewHk4,1771
24
+ arvi/data/extra/HD86226_PFS1.rdb,sha256=vfAozbrKHM_j8dYkCBJsuHyD01KEM1asghe2KInwVao,3475
25
+ arvi/data/extra/HD86226_PFS2.rdb,sha256=F2P7dB6gVyzCglUjNheB0hIHVClC5RmARrGwbrY1cfo,4114
26
+ arvi/data/extra/metadata.json,sha256=C69hIw6CohyES6BI9vDWjxwSz7N4VOYX0PCgjXtYFmU,178
27
+ arvi-0.1.12.dist-info/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
28
+ arvi-0.1.12.dist-info/METADATA,sha256=ZYHmuqlbt45XyTKJO4_Jzwi4rM2-K_t0ty6dvnNJOBc,1306
29
+ arvi-0.1.12.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
30
+ arvi-0.1.12.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
31
+ arvi-0.1.12.dist-info/RECORD,,