arvi 0.2.8__py3-none-any.whl → 0.2.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arvi might be problematic. Click here for more details.
- arvi/dace_wrapper.py +7 -5
- arvi/instrument_specific.py +23 -9
- arvi/kepmodel_wrapper.py +296 -0
- arvi/nasaexo_wrapper.py +7 -3
- arvi/plots.py +1 -3
- arvi/reports.py +108 -1
- arvi/stats.py +30 -5
- arvi/timeseries.py +312 -119
- arvi/utils.py +86 -8
- {arvi-0.2.8.dist-info → arvi-0.2.10.dist-info}/METADATA +1 -1
- {arvi-0.2.8.dist-info → arvi-0.2.10.dist-info}/RECORD +14 -13
- {arvi-0.2.8.dist-info → arvi-0.2.10.dist-info}/WHEEL +0 -0
- {arvi-0.2.8.dist-info → arvi-0.2.10.dist-info}/licenses/LICENSE +0 -0
- {arvi-0.2.8.dist-info → arvi-0.2.10.dist-info}/top_level.txt +0 -0
arvi/timeseries.py
CHANGED
|
@@ -6,6 +6,7 @@ from glob import glob
|
|
|
6
6
|
import warnings
|
|
7
7
|
from copy import deepcopy
|
|
8
8
|
from datetime import datetime, timezone
|
|
9
|
+
|
|
9
10
|
import numpy as np
|
|
10
11
|
|
|
11
12
|
from .setup_logger import setup_logger
|
|
@@ -24,10 +25,12 @@ from .HZ import getHZ_period
|
|
|
24
25
|
from .instrument_specific import ISSUES
|
|
25
26
|
from .reports import REPORTS
|
|
26
27
|
from .utils import sanitize_path, strtobool, there_is_internet, timer, chdir
|
|
27
|
-
from .
|
|
28
|
+
from .setup_logger import setup_logger
|
|
29
|
+
logger = setup_logger()
|
|
28
30
|
|
|
29
|
-
units = lazy_import('astropy.units')
|
|
30
|
-
#
|
|
31
|
+
# units = lazy_import('astropy.units')
|
|
32
|
+
# units = lazy.load('astropy.units')
|
|
33
|
+
from astropy import units
|
|
31
34
|
|
|
32
35
|
class ExtraFields:
|
|
33
36
|
@property
|
|
@@ -408,35 +411,71 @@ class RV(ISSUES, REPORTS):
|
|
|
408
411
|
self._did_correct_berv = False
|
|
409
412
|
self.__post_init__()
|
|
410
413
|
|
|
411
|
-
def snapshot(self, directory=None, delete_others=False):
|
|
412
|
-
|
|
414
|
+
def snapshot(self, directory=None, delete_others=False, compress=False):
|
|
415
|
+
if compress:
|
|
416
|
+
try:
|
|
417
|
+
import compress_pickle as pickle
|
|
418
|
+
except ImportError:
|
|
419
|
+
logger.warning('compress_pickle not installed, not compressing')
|
|
420
|
+
import pickle
|
|
421
|
+
compress = False
|
|
422
|
+
else:
|
|
423
|
+
import pickle
|
|
424
|
+
import re
|
|
413
425
|
from datetime import datetime
|
|
426
|
+
|
|
414
427
|
ts = datetime.now().timestamp()
|
|
415
428
|
star_name = self.star.replace(' ', '')
|
|
416
429
|
file = f'{star_name}_{ts}.pkl'
|
|
417
430
|
|
|
431
|
+
server = None
|
|
418
432
|
if directory is None:
|
|
419
433
|
directory = '.'
|
|
420
434
|
else:
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
import re
|
|
427
|
-
other_pkls = [
|
|
428
|
-
f for f in os.listdir(directory)
|
|
429
|
-
if re.search(fr'{star_name}_\d+.\d+.pkl', f)
|
|
430
|
-
]
|
|
431
|
-
for pkl in other_pkls:
|
|
432
|
-
os.remove(os.path.join(directory, pkl))
|
|
435
|
+
if ':' in directory:
|
|
436
|
+
server, directory = directory.split(':')
|
|
437
|
+
delete_others = False
|
|
438
|
+
else:
|
|
439
|
+
os.makedirs(directory, exist_ok=True)
|
|
433
440
|
|
|
434
441
|
metadata = {
|
|
435
442
|
'star': self.star,
|
|
436
443
|
'timestamp': ts,
|
|
437
444
|
'description': 'arvi snapshot'
|
|
438
445
|
}
|
|
439
|
-
|
|
446
|
+
|
|
447
|
+
|
|
448
|
+
if server:
|
|
449
|
+
import posixpath
|
|
450
|
+
from .utils import server_sftp, server_file
|
|
451
|
+
with server_sftp(server=server) as sftp:
|
|
452
|
+
try:
|
|
453
|
+
sftp.chdir(directory)
|
|
454
|
+
except FileNotFoundError:
|
|
455
|
+
sftp.mkdir(directory)
|
|
456
|
+
finally:
|
|
457
|
+
sftp.chdir(directory)
|
|
458
|
+
with sftp.open(file, 'wb') as f:
|
|
459
|
+
print('saving snapshot to server...', end='', flush=True)
|
|
460
|
+
pickle.dump((self, metadata), f, protocol=0)
|
|
461
|
+
print('done')
|
|
462
|
+
file = posixpath.join(directory, file)
|
|
463
|
+
else:
|
|
464
|
+
if delete_others:
|
|
465
|
+
other_pkls = [
|
|
466
|
+
f for f in os.listdir(directory)
|
|
467
|
+
if re.search(fr'{star_name}_\d+.\d+.pkl', f)
|
|
468
|
+
]
|
|
469
|
+
for pkl in other_pkls:
|
|
470
|
+
os.remove(os.path.join(directory, pkl))
|
|
471
|
+
|
|
472
|
+
file = os.path.join(directory, file)
|
|
473
|
+
|
|
474
|
+
if compress:
|
|
475
|
+
file += '.gz'
|
|
476
|
+
|
|
477
|
+
with open(file, 'wb') as f:
|
|
478
|
+
pickle.dump((self, metadata), f)
|
|
440
479
|
|
|
441
480
|
if self.verbose:
|
|
442
481
|
logger.info(f'saved snapshot to {file}')
|
|
@@ -511,6 +550,15 @@ class RV(ISSUES, REPORTS):
|
|
|
511
550
|
def instrument_array(self):
|
|
512
551
|
return np.concatenate([[i] * n for i, n in self.NN.items()])
|
|
513
552
|
|
|
553
|
+
def _instrument_mask(self, instrument):
|
|
554
|
+
if isinstance(instrument, str):
|
|
555
|
+
return np.char.find(self.instrument_array, instrument) == 0
|
|
556
|
+
elif isinstance(instrument, (list, tuple, np.ndarray)):
|
|
557
|
+
m = np.full_like(self.time, False, dtype=bool)
|
|
558
|
+
for i in instrument:
|
|
559
|
+
m |= np.char.find(self.instrument_array, i) == 0
|
|
560
|
+
return m
|
|
561
|
+
|
|
514
562
|
@property
|
|
515
563
|
def rms(self) -> float:
|
|
516
564
|
""" Weighted rms of the (masked) radial velocities """
|
|
@@ -537,6 +585,11 @@ class RV(ISSUES, REPORTS):
|
|
|
537
585
|
def _mtime_sorter(self):
|
|
538
586
|
return np.argsort(self.mtime)
|
|
539
587
|
|
|
588
|
+
@property
|
|
589
|
+
def timespan(self):
|
|
590
|
+
""" Total time span of the (masked) observations """
|
|
591
|
+
return np.ptp(self.mtime)
|
|
592
|
+
|
|
540
593
|
def _index_from_instrument_index(self, index, instrument):
|
|
541
594
|
ind = np.where(self.instrument_array == instrument)[0]
|
|
542
595
|
return ind[getattr(self, instrument).mask][index]
|
|
@@ -577,7 +630,8 @@ class RV(ISSUES, REPORTS):
|
|
|
577
630
|
# --> not just in rhk and rhk_err...
|
|
578
631
|
if data[arr].dtype == float and (bad := data[arr] == -99999).any():
|
|
579
632
|
data[arr][bad] = np.nan
|
|
580
|
-
|
|
633
|
+
if data[arr].dtype == float and (bad := data[arr] == -99).any():
|
|
634
|
+
data[arr][bad] = np.nan
|
|
581
635
|
setattr(s, arr, data[arr][ind])
|
|
582
636
|
s._quantities.append(arr)
|
|
583
637
|
|
|
@@ -629,22 +683,28 @@ class RV(ISSUES, REPORTS):
|
|
|
629
683
|
import pickle
|
|
630
684
|
from datetime import datetime
|
|
631
685
|
if star is None:
|
|
632
|
-
assert file.endswith('.pkl'), 'expected a .pkl file'
|
|
633
|
-
|
|
686
|
+
assert file.endswith(('.pkl', '.pkl.gz')), 'expected a .pkl file'
|
|
687
|
+
basefile = os.path.basename(file)
|
|
688
|
+
star, timestamp = basefile.replace('.pkl.gz', '').replace('.pkl', '').split('_')
|
|
634
689
|
else:
|
|
635
690
|
try:
|
|
636
|
-
file = sorted(glob(f'{star}_*.*.pkl'))[-1]
|
|
691
|
+
file = sorted(glob(f'{star}_*.*.pkl*'))[-1]
|
|
637
692
|
except IndexError:
|
|
638
693
|
raise ValueError(f'cannot find any file matching {star}_*.pkl')
|
|
639
|
-
star, timestamp = file.replace('.pkl', '').split('_')
|
|
694
|
+
star, timestamp = file.replace('.pkl.gz', '').replace('.pkl', '').split('_')
|
|
640
695
|
|
|
641
696
|
dt = datetime.fromtimestamp(float(timestamp))
|
|
642
697
|
if verbose:
|
|
643
698
|
logger.info(f'reading snapshot of {star} from {dt}')
|
|
644
699
|
|
|
645
|
-
|
|
700
|
+
with open(file, 'rb') as f:
|
|
701
|
+
if file.endswith('.gz'):
|
|
702
|
+
import compress_pickle as pickle
|
|
703
|
+
s = pickle.load(f)
|
|
704
|
+
|
|
646
705
|
if isinstance(s, tuple) and len(s) == 2:
|
|
647
706
|
s, _metadata = s
|
|
707
|
+
|
|
648
708
|
s._snapshot = file
|
|
649
709
|
return s
|
|
650
710
|
|
|
@@ -1504,7 +1564,7 @@ class RV(ISSUES, REPORTS):
|
|
|
1504
1564
|
""" Remove all observations that satisfy a condition
|
|
1505
1565
|
|
|
1506
1566
|
Args:
|
|
1507
|
-
condition (
|
|
1567
|
+
condition (ndarray):
|
|
1508
1568
|
Boolean array of the same length as the observations
|
|
1509
1569
|
"""
|
|
1510
1570
|
if self.verbose:
|
|
@@ -1664,16 +1724,17 @@ class RV(ISSUES, REPORTS):
|
|
|
1664
1724
|
self._propagate_mask_changes()
|
|
1665
1725
|
|
|
1666
1726
|
|
|
1667
|
-
def _propagate_mask_changes(self):
|
|
1727
|
+
def _propagate_mask_changes(self, _remove_instrument=True):
|
|
1668
1728
|
""" link self.mask with each self.`instrument`.mask """
|
|
1669
1729
|
masked = np.where(~self.mask)[0]
|
|
1670
1730
|
for m in masked:
|
|
1671
1731
|
inst = self.instruments[self.obs[m] - 1]
|
|
1672
1732
|
n_before = (self.obs < self.obs[m]).sum()
|
|
1673
1733
|
getattr(self, inst).mask[m - n_before] = False
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
self
|
|
1734
|
+
if _remove_instrument:
|
|
1735
|
+
for inst in self.instruments:
|
|
1736
|
+
if getattr(self, inst).mtime.size == 0:
|
|
1737
|
+
self.remove_instrument(inst, strict=True)
|
|
1677
1738
|
|
|
1678
1739
|
def secular_acceleration(self, epoch=None, just_compute=False, force_simbad=False):
|
|
1679
1740
|
"""
|
|
@@ -1691,9 +1752,12 @@ class RV(ISSUES, REPORTS):
|
|
|
1691
1752
|
force_simbad (bool, optional):
|
|
1692
1753
|
Use Simbad proper motions even if Gaia is available
|
|
1693
1754
|
"""
|
|
1694
|
-
|
|
1755
|
+
# don't do it twice
|
|
1756
|
+
if self._did_secular_acceleration and not just_compute:
|
|
1695
1757
|
return
|
|
1696
1758
|
|
|
1759
|
+
from astropy import units
|
|
1760
|
+
|
|
1697
1761
|
#as_yr = units.arcsec / units.year
|
|
1698
1762
|
mas_yr = units.milliarcsecond / units.year
|
|
1699
1763
|
mas = units.milliarcsecond
|
|
@@ -1825,15 +1889,21 @@ class RV(ISSUES, REPORTS):
|
|
|
1825
1889
|
|
|
1826
1890
|
self._did_secular_acceleration = False
|
|
1827
1891
|
|
|
1828
|
-
def sigmaclip(self, sigma=5, instrument=None,
|
|
1892
|
+
def sigmaclip(self, sigma=5, quantity='vrad', instrument=None,
|
|
1893
|
+
strict=True):
|
|
1829
1894
|
"""
|
|
1830
|
-
Sigma-clip RVs (per instrument!), by MAD away from
|
|
1895
|
+
Sigma-clip RVs or other quantities (per instrument!), by MAD away from
|
|
1896
|
+
the median.
|
|
1831
1897
|
|
|
1832
1898
|
Args:
|
|
1833
1899
|
sigma (float):
|
|
1834
|
-
Number of MADs
|
|
1900
|
+
Number of MADs away from the median
|
|
1901
|
+
quantity (str):
|
|
1902
|
+
Quantity to sigma-clip (by default the RVs)
|
|
1835
1903
|
instrument (str, list):
|
|
1836
1904
|
Instrument(s) to sigma-clip
|
|
1905
|
+
strict (bool):
|
|
1906
|
+
Passed directly to self._check_instrument
|
|
1837
1907
|
"""
|
|
1838
1908
|
#from scipy.stats import sigmaclip as dosigmaclip
|
|
1839
1909
|
from .stats import sigmaclip_median as dosigmaclip
|
|
@@ -1846,16 +1916,20 @@ class RV(ISSUES, REPORTS):
|
|
|
1846
1916
|
|
|
1847
1917
|
for inst in instruments:
|
|
1848
1918
|
m = self.instrument_array == inst
|
|
1849
|
-
|
|
1919
|
+
d = getattr(self, quantity)
|
|
1920
|
+
|
|
1921
|
+
if np.isnan(d[m]).all():
|
|
1922
|
+
continue
|
|
1923
|
+
|
|
1924
|
+
result = dosigmaclip(d[m], low=sigma, high=sigma)
|
|
1850
1925
|
# n = self.vrad[m].size - result.clipped.size
|
|
1851
1926
|
|
|
1852
|
-
ind = m & self.mask &
|
|
1853
|
-
((self.vrad < result.lower) | (self.vrad > result.upper))
|
|
1927
|
+
ind = m & self.mask & ((d < result.lower) | (d > result.upper))
|
|
1854
1928
|
n = ind.sum()
|
|
1855
1929
|
|
|
1856
1930
|
if self.verbose and n > 0:
|
|
1857
1931
|
s = 's' if (n == 0 or n > 1) else ''
|
|
1858
|
-
logger.warning(f'sigma-clip
|
|
1932
|
+
logger.warning(f'sigma-clip {quantity} will remove {n} point{s} for {inst}')
|
|
1859
1933
|
|
|
1860
1934
|
if n > 0:
|
|
1861
1935
|
self.mask[ind] = False
|
|
@@ -1880,21 +1954,32 @@ class RV(ISSUES, REPORTS):
|
|
|
1880
1954
|
if config.return_self:
|
|
1881
1955
|
return self
|
|
1882
1956
|
|
|
1883
|
-
def clip_maxerror(self, maxerror:float):
|
|
1884
|
-
"""
|
|
1957
|
+
def clip_maxerror(self, maxerror:float, instrument=None):
|
|
1958
|
+
"""
|
|
1959
|
+
Mask out points with RV error larger than a given value. If `instrument`
|
|
1960
|
+
is given, mask only observations from that instrument.
|
|
1885
1961
|
|
|
1886
1962
|
Args:
|
|
1887
1963
|
maxerror (float): Maximum error to keep.
|
|
1964
|
+
instrument (str, list, tuple, ndarray): Instrument(s) to clip
|
|
1888
1965
|
"""
|
|
1889
1966
|
if self._child:
|
|
1890
1967
|
return
|
|
1891
1968
|
|
|
1892
1969
|
self.maxerror = maxerror
|
|
1970
|
+
|
|
1971
|
+
if instrument is None:
|
|
1972
|
+
inst_mask = np.ones_like(self.svrad, dtype=bool)
|
|
1973
|
+
else:
|
|
1974
|
+
inst_mask = self._instrument_mask(instrument)
|
|
1975
|
+
|
|
1893
1976
|
above = self.svrad > maxerror
|
|
1894
|
-
|
|
1895
|
-
|
|
1977
|
+
old_mask = self.mask.copy()
|
|
1978
|
+
|
|
1979
|
+
self.mask[inst_mask & above] = False
|
|
1896
1980
|
|
|
1897
1981
|
if self.verbose and above.sum() > 0:
|
|
1982
|
+
n = (above[inst_mask] & old_mask[inst_mask]).sum()
|
|
1898
1983
|
s = 's' if (n == 0 or n > 1) else ''
|
|
1899
1984
|
logger.warning(f'clip_maxerror ({maxerror} {self.units}) removed {n} point' + s)
|
|
1900
1985
|
|
|
@@ -1902,6 +1987,36 @@ class RV(ISSUES, REPORTS):
|
|
|
1902
1987
|
if config.return_self:
|
|
1903
1988
|
return self
|
|
1904
1989
|
|
|
1990
|
+
def sigmaclip_ew(self, sigma=5):
|
|
1991
|
+
""" Sigma-clip EW (FWHM x contrast), by MAD away from the median """
|
|
1992
|
+
from .stats import sigmaclip_median as dosigmaclip, weighted_median
|
|
1993
|
+
|
|
1994
|
+
S = deepcopy(self)
|
|
1995
|
+
for _s in S:
|
|
1996
|
+
m = _s.mask
|
|
1997
|
+
_s.fwhm -= weighted_median(_s.fwhm[m], 1 / _s.fwhm_err[m])
|
|
1998
|
+
_s.contrast -= weighted_median(_s.contrast[m], 1 / _s.contrast_err[m])
|
|
1999
|
+
S._build_arrays()
|
|
2000
|
+
ew = S.fwhm * S.contrast
|
|
2001
|
+
ew_err = np.hypot(S.fwhm_err * S.contrast, S.fwhm * S.contrast_err)
|
|
2002
|
+
|
|
2003
|
+
wmed = weighted_median(ew[S.mask], 1 / ew_err[S.mask])
|
|
2004
|
+
data = (ew - wmed) / ew_err
|
|
2005
|
+
result = dosigmaclip(data, low=sigma, high=sigma)
|
|
2006
|
+
ind = (data < result.lower) | (data > result.upper)
|
|
2007
|
+
self.mask[ind] = False
|
|
2008
|
+
|
|
2009
|
+
if self.verbose and ind.sum() > 0:
|
|
2010
|
+
n = ind.sum()
|
|
2011
|
+
s = 's' if (n == 0 or n > 1) else ''
|
|
2012
|
+
logger.warning(f'sigmaclip_ew removed {n} point' + s)
|
|
2013
|
+
|
|
2014
|
+
self._propagate_mask_changes()
|
|
2015
|
+
if config.return_self:
|
|
2016
|
+
return self
|
|
2017
|
+
|
|
2018
|
+
|
|
2019
|
+
|
|
1905
2020
|
def bin(self):
|
|
1906
2021
|
"""
|
|
1907
2022
|
Nightly bin the observations.
|
|
@@ -1912,6 +2027,8 @@ class RV(ISSUES, REPORTS):
|
|
|
1912
2027
|
|
|
1913
2028
|
# create copy of self to be returned
|
|
1914
2029
|
snew = deepcopy(self)
|
|
2030
|
+
# store original object
|
|
2031
|
+
snew._unbinned = deepcopy(self)
|
|
1915
2032
|
|
|
1916
2033
|
all_bad_quantities = []
|
|
1917
2034
|
|
|
@@ -1943,7 +2060,8 @@ class RV(ISSUES, REPORTS):
|
|
|
1943
2060
|
|
|
1944
2061
|
# treat ccf_mask specially, doing a 'unique' bin
|
|
1945
2062
|
if q == 'ccf_mask':
|
|
1946
|
-
|
|
2063
|
+
ccf_mask = getattr(s, q)[s.mask]
|
|
2064
|
+
setattr(s, q, bin_ccf_mask(s.mtime, ccf_mask))
|
|
1947
2065
|
continue
|
|
1948
2066
|
|
|
1949
2067
|
if Q.dtype != np.float64:
|
|
@@ -2101,23 +2219,32 @@ class RV(ISSUES, REPORTS):
|
|
|
2101
2219
|
if config.return_self:
|
|
2102
2220
|
return self
|
|
2103
2221
|
|
|
2104
|
-
def detrend(self, degree=1):
|
|
2105
|
-
"""
|
|
2222
|
+
def detrend(self, degree: int=1):
|
|
2223
|
+
"""
|
|
2224
|
+
Detrend the RVs of all instruments using a polynomial of degree `degree`
|
|
2225
|
+
"""
|
|
2106
2226
|
instrument_indices = np.unique_inverse(self.instrument_array).inverse_indices
|
|
2107
|
-
|
|
2227
|
+
instrument_indices_masked = np.unique_inverse(self.instrument_array[self.mask]).inverse_indices
|
|
2228
|
+
|
|
2229
|
+
def fun(p, t, degree, ninstruments, just_model=False, index=None, masked=True):
|
|
2108
2230
|
polyp, offsets = p[:degree], p[-ninstruments:]
|
|
2109
2231
|
polyp = np.r_[polyp, 0.0]
|
|
2110
2232
|
if index is None:
|
|
2111
|
-
|
|
2233
|
+
if masked:
|
|
2234
|
+
model = offsets[instrument_indices_masked] + np.polyval(polyp, t)
|
|
2235
|
+
else:
|
|
2236
|
+
model = offsets[instrument_indices] + np.polyval(polyp, t)
|
|
2112
2237
|
else:
|
|
2113
2238
|
model = offsets[index] + np.polyval(polyp, t)
|
|
2114
2239
|
if just_model:
|
|
2115
2240
|
return model
|
|
2116
2241
|
return self.mvrad - model
|
|
2242
|
+
|
|
2117
2243
|
coef = np.polyfit(self.mtime, self.mvrad, degree)
|
|
2118
2244
|
x0 = np.append(coef, [0.0] * (len(self.instruments) - 1))
|
|
2119
|
-
print(x0)
|
|
2245
|
+
# print(x0)
|
|
2120
2246
|
fun(x0, self.mtime, degree, len(self.instruments))
|
|
2247
|
+
|
|
2121
2248
|
from scipy.optimize import leastsq
|
|
2122
2249
|
xbest, _ = leastsq(fun, x0, args=(self.mtime, degree, len(self.instruments)))
|
|
2123
2250
|
|
|
@@ -2127,12 +2254,13 @@ class RV(ISSUES, REPORTS):
|
|
|
2127
2254
|
self.plot(ax=ax)
|
|
2128
2255
|
for i, inst in enumerate(self.instruments):
|
|
2129
2256
|
s = getattr(self, inst)
|
|
2130
|
-
ax.plot(s.time,
|
|
2257
|
+
ax.plot(s.time,
|
|
2258
|
+
fun(xbest, s.time, degree, len(self.instruments), just_model=True, index=i, masked=False),
|
|
2131
2259
|
color=f'C{i}')
|
|
2132
2260
|
ax.set_title('original', loc='left', fontsize=10)
|
|
2133
2261
|
ax.set_title(f'coefficients: {xbest[:degree]}', loc='right', fontsize=10)
|
|
2134
2262
|
|
|
2135
|
-
self.add_to_vrad(-fun(xbest, self.time, degree, len(self.instruments), just_model=True))
|
|
2263
|
+
self.add_to_vrad(-fun(xbest, self.time, degree, len(self.instruments), just_model=True, masked=False))
|
|
2136
2264
|
ax = fig.add_subplot(2, 1, 2)
|
|
2137
2265
|
self.plot(ax=ax)
|
|
2138
2266
|
ax.set_title('detrended', loc='left', fontsize=10)
|
|
@@ -2141,7 +2269,7 @@ class RV(ISSUES, REPORTS):
|
|
|
2141
2269
|
# axs[1].errorbar(self.mtime, fun(xbest, self.mtime, degree, len(self.instruments)), self.msvrad, fmt='o')
|
|
2142
2270
|
|
|
2143
2271
|
return
|
|
2144
|
-
|
|
2272
|
+
|
|
2145
2273
|
|
|
2146
2274
|
|
|
2147
2275
|
|
|
@@ -2301,24 +2429,31 @@ class RV(ISSUES, REPORTS):
|
|
|
2301
2429
|
self.units = new_units
|
|
2302
2430
|
|
|
2303
2431
|
|
|
2304
|
-
def put_at_systemic_velocity(self):
|
|
2432
|
+
def put_at_systemic_velocity(self, factor=1.0, ignore=None):
|
|
2305
2433
|
"""
|
|
2306
|
-
For instruments in which mean(RV) < ptp(RV), "move" RVs to
|
|
2307
|
-
velocity from simbad. This is useful if some instruments
|
|
2308
|
-
at zero while others are not, and instead of calling
|
|
2309
|
-
but it only works when the systemic velocity is
|
|
2434
|
+
For instruments in which mean(RV) < `factor` * ptp(RV), "move" RVs to
|
|
2435
|
+
the systemic velocity from simbad. This is useful if some instruments
|
|
2436
|
+
are centered at zero while others are not, and instead of calling
|
|
2437
|
+
`.adjust_means()`, but it only works when the systemic velocity is
|
|
2438
|
+
smaller than `factor` * ptp(RV).
|
|
2310
2439
|
"""
|
|
2311
2440
|
changed = False
|
|
2312
2441
|
for inst in self.instruments:
|
|
2442
|
+
if ignore is not None:
|
|
2443
|
+
if inst in ignore or any([i in inst for i in ignore]):
|
|
2444
|
+
continue
|
|
2445
|
+
changed_inst = False
|
|
2313
2446
|
s = getattr(self, inst)
|
|
2314
2447
|
if s.mask.any():
|
|
2315
|
-
if np.abs(s.mvrad.mean()) < np.ptp(s.mvrad):
|
|
2448
|
+
if np.abs(s.mvrad.mean()) < factor * np.ptp(s.mvrad):
|
|
2316
2449
|
s.vrad += self.simbad.rvz_radvel * 1e3
|
|
2317
|
-
changed = True
|
|
2450
|
+
changed = changed_inst = True
|
|
2318
2451
|
else: # all observations are masked, use non-masked arrays
|
|
2319
|
-
if np.abs(s.vrad.mean()) < np.ptp(s.vrad):
|
|
2452
|
+
if np.abs(s.vrad.mean()) < factor * np.ptp(s.vrad):
|
|
2320
2453
|
s.vrad += self.simbad.rvz_radvel * 1e3
|
|
2321
|
-
changed = True
|
|
2454
|
+
changed = changed_inst = True
|
|
2455
|
+
if changed_inst and self.verbose:
|
|
2456
|
+
logger.info(f"putting {inst} RVs at systemic velocity")
|
|
2322
2457
|
if changed:
|
|
2323
2458
|
self._build_arrays()
|
|
2324
2459
|
|
|
@@ -2340,34 +2475,72 @@ class RV(ISSUES, REPORTS):
|
|
|
2340
2475
|
self.instruments = sorted(self.instruments, key=lambda i: getattr(self, i).time.max())
|
|
2341
2476
|
self._build_arrays()
|
|
2342
2477
|
|
|
2478
|
+
def put_instrument_last(self, instrument):
|
|
2479
|
+
if not self._check_instrument(instrument, strict=True, log=True):
|
|
2480
|
+
return
|
|
2481
|
+
self.instruments = [i for i in self.instruments if i != instrument] + [instrument]
|
|
2482
|
+
self._build_arrays()
|
|
2343
2483
|
|
|
2344
|
-
def save(self, directory=None, instrument=None,
|
|
2345
|
-
|
|
2346
|
-
|
|
2484
|
+
def save(self, directory=None, instrument=None, format='rdb',
|
|
2485
|
+
indicators=False, join_instruments=False, postfix=None,
|
|
2486
|
+
save_masked=False, save_nans=True, **kwargs):
|
|
2487
|
+
""" Save the observations in .rdb or .csv files.
|
|
2347
2488
|
|
|
2348
2489
|
Args:
|
|
2349
2490
|
directory (str, optional):
|
|
2350
2491
|
Directory where to save the .rdb files.
|
|
2351
2492
|
instrument (str, optional):
|
|
2352
2493
|
Instrument for which to save observations.
|
|
2353
|
-
|
|
2354
|
-
|
|
2494
|
+
format (str, optional):
|
|
2495
|
+
Format to use ('rdb' or 'csv').
|
|
2496
|
+
indicators (bool, str, list[str], optional):
|
|
2497
|
+
Save only RVs and errors (False) or more indicators. If True,
|
|
2498
|
+
use a default list, if `str`, use an existing list, if list[str]
|
|
2499
|
+
provide a sequence of specific indicators.
|
|
2500
|
+
join_instruments (bool, optional):
|
|
2501
|
+
Join all instruments in a single file.
|
|
2355
2502
|
postfix (str, optional):
|
|
2356
2503
|
Postfix to add to the filenames ([star]_[instrument]_[postfix].rdb).
|
|
2504
|
+
save_masked (bool, optional)
|
|
2505
|
+
If True, also save masked observations (those for which
|
|
2506
|
+
self.mask == False)
|
|
2357
2507
|
save_nans (bool, optional)
|
|
2358
2508
|
Whether to save NaN values in the indicators, if they exist. If
|
|
2359
2509
|
False, the full observation which contains NaN values is not saved.
|
|
2360
2510
|
"""
|
|
2511
|
+
if format not in ('rdb', 'csv'):
|
|
2512
|
+
logger.error(f"format must be 'rdb' or 'csv', got '{format}'")
|
|
2513
|
+
return
|
|
2514
|
+
|
|
2361
2515
|
star_name = self.star.replace(' ', '')
|
|
2362
2516
|
|
|
2363
|
-
if directory is None:
|
|
2364
|
-
directory = '.'
|
|
2365
|
-
else:
|
|
2517
|
+
if directory is not None:
|
|
2366
2518
|
os.makedirs(directory, exist_ok=True)
|
|
2367
2519
|
|
|
2520
|
+
indicator_sets = {
|
|
2521
|
+
"default": [
|
|
2522
|
+
"fwhm", "fwhm_err",
|
|
2523
|
+
"bispan", "bispan_err",
|
|
2524
|
+
"contrast", "contrast_err",
|
|
2525
|
+
"rhk", "rhk_err",
|
|
2526
|
+
"berv",
|
|
2527
|
+
],
|
|
2528
|
+
"CORALIE": [
|
|
2529
|
+
"fwhm", "fwhm_err",
|
|
2530
|
+
"bispan", "bispan_err",
|
|
2531
|
+
"contrast", "contrast_err",
|
|
2532
|
+
"haindex", "haindex_err",
|
|
2533
|
+
"berv",
|
|
2534
|
+
],
|
|
2535
|
+
}
|
|
2536
|
+
|
|
2537
|
+
if 'full' in kwargs:
|
|
2538
|
+
logger.warning('argument `full` is deprecated, use `indicators` instead')
|
|
2539
|
+
indicators = kwargs['full']
|
|
2540
|
+
|
|
2368
2541
|
files = []
|
|
2369
2542
|
|
|
2370
|
-
for inst in self.instruments:
|
|
2543
|
+
for _i, inst in enumerate(self.instruments):
|
|
2371
2544
|
if instrument is not None:
|
|
2372
2545
|
if instrument not in inst:
|
|
2373
2546
|
continue
|
|
@@ -2377,75 +2550,95 @@ class RV(ISSUES, REPORTS):
|
|
|
2377
2550
|
if not _s.mask.any(): # all observations are masked, don't save
|
|
2378
2551
|
continue
|
|
2379
2552
|
|
|
2380
|
-
if
|
|
2381
|
-
|
|
2382
|
-
|
|
2383
|
-
|
|
2384
|
-
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
|
|
2391
|
-
|
|
2392
|
-
|
|
2393
|
-
|
|
2394
|
-
|
|
2395
|
-
|
|
2396
|
-
|
|
2397
|
-
|
|
2398
|
-
|
|
2399
|
-
|
|
2400
|
-
|
|
2401
|
-
|
|
2402
|
-
|
|
2403
|
-
# nan_mask = np.isnan(d[:, 3:]).any(axis=1)
|
|
2404
|
-
# d = d[~nan_mask]
|
|
2405
|
-
# if self.verbose:
|
|
2406
|
-
# logger.warning(f'masking {nan_mask.sum()} observations with NaN in indicators')
|
|
2407
|
-
|
|
2408
|
-
header = '\t'.join(['rjd', 'vrad', 'svrad',
|
|
2409
|
-
'fwhm', 'sig_fwhm',
|
|
2410
|
-
'bispan', 'sig_bispan',
|
|
2411
|
-
'contrast', 'sig_contrast',
|
|
2412
|
-
'rhk', 'sig_rhk',
|
|
2413
|
-
'berv',
|
|
2414
|
-
])
|
|
2415
|
-
header += '\n'
|
|
2416
|
-
header += '\t'.join(['-' * len(c) for c in header.strip().split('\t')])
|
|
2553
|
+
if save_masked:
|
|
2554
|
+
arrays = [_s.time, _s.vrad, _s.svrad]
|
|
2555
|
+
if join_instruments:
|
|
2556
|
+
arrays += [_s.instrument_array]
|
|
2557
|
+
else:
|
|
2558
|
+
arrays = [_s.mtime, _s.mvrad, _s.msvrad]
|
|
2559
|
+
if join_instruments:
|
|
2560
|
+
arrays += [_s.instrument_array[_s.mask]]
|
|
2561
|
+
|
|
2562
|
+
if indicators in (False, None):
|
|
2563
|
+
indicator_names = []
|
|
2564
|
+
else:
|
|
2565
|
+
if indicators is True:
|
|
2566
|
+
indicator_names = indicator_sets["default"]
|
|
2567
|
+
elif isinstance(indicators, str):
|
|
2568
|
+
try:
|
|
2569
|
+
indicator_names = indicator_sets[indicators]
|
|
2570
|
+
except KeyError:
|
|
2571
|
+
logger.error(f"unknown indicator set '{indicators}'")
|
|
2572
|
+
logger.error(f"available: {list(indicator_sets.keys())}")
|
|
2573
|
+
return
|
|
2574
|
+
elif isinstance(indicators, list) and all(isinstance(i, str) for i in indicators):
|
|
2575
|
+
indicator_names = indicators
|
|
2417
2576
|
|
|
2577
|
+
if save_masked:
|
|
2578
|
+
arrays += [getattr(_s, ind) for ind in indicator_names]
|
|
2418
2579
|
else:
|
|
2419
|
-
|
|
2420
|
-
|
|
2421
|
-
|
|
2422
|
-
|
|
2580
|
+
arrays += [getattr(_s, ind)[_s.mask] for ind in indicator_names]
|
|
2581
|
+
|
|
2582
|
+
d = np.stack(arrays, axis=1)
|
|
2583
|
+
if not save_nans:
|
|
2584
|
+
# raise NotImplementedError
|
|
2585
|
+
if np.isnan(d).any():
|
|
2586
|
+
# remove observations where any of the indicators are # NaN
|
|
2587
|
+
nan_mask = np.isnan(d[:, 3:]).any(axis=1)
|
|
2588
|
+
d = d[~nan_mask]
|
|
2589
|
+
if self.verbose:
|
|
2590
|
+
msg = f'{inst}: masking {nan_mask.sum()} observations with NaN in indicators'
|
|
2591
|
+
logger.warning(msg)
|
|
2592
|
+
|
|
2593
|
+
cols = ['rjd', 'vrad', 'svrad']
|
|
2594
|
+
cols += ['inst'] if join_instruments else []
|
|
2595
|
+
cols += indicator_names
|
|
2423
2596
|
|
|
2424
|
-
|
|
2425
|
-
header = '
|
|
2597
|
+
if format == 'rdb':
|
|
2598
|
+
header = '\t'.join(cols)
|
|
2599
|
+
header += '\n'
|
|
2600
|
+
header += '\t'.join(['-' * len(c) for c in header.strip().split('\t')])
|
|
2601
|
+
else:
|
|
2602
|
+
header = ','.join(cols)
|
|
2426
2603
|
|
|
2427
|
-
|
|
2428
|
-
|
|
2429
|
-
|
|
2604
|
+
if join_instruments:
|
|
2605
|
+
file = f'{star_name}.{format}'
|
|
2606
|
+
if postfix is not None:
|
|
2607
|
+
file = f'{star_name}_{postfix}.{format}'
|
|
2608
|
+
else:
|
|
2609
|
+
file = f'{star_name}_{inst}.{format}'
|
|
2610
|
+
if postfix is not None:
|
|
2611
|
+
file = f'{star_name}_{inst}_{postfix}.{format}'
|
|
2430
2612
|
|
|
2613
|
+
if directory is not None:
|
|
2614
|
+
file = os.path.join(directory, file)
|
|
2431
2615
|
files.append(file)
|
|
2432
|
-
file = os.path.join(directory, file)
|
|
2433
2616
|
|
|
2434
2617
|
N = len(arrays[0])
|
|
2435
|
-
with open(file, 'w') as f:
|
|
2436
|
-
|
|
2618
|
+
with open(file, 'a' if join_instruments and _i != 0 else 'w') as f:
|
|
2619
|
+
if join_instruments and _i != 0:
|
|
2620
|
+
pass
|
|
2621
|
+
else:
|
|
2622
|
+
f.write(header + '\n')
|
|
2623
|
+
|
|
2437
2624
|
for i in range(N):
|
|
2438
2625
|
for j, a in enumerate(arrays):
|
|
2439
2626
|
f.write(str(a[i]))
|
|
2440
2627
|
if j < len(arrays) - 1:
|
|
2441
|
-
f.write('\t')
|
|
2628
|
+
f.write('\t' if format == 'rdb' else ',')
|
|
2442
2629
|
f.write('\n')
|
|
2443
2630
|
|
|
2444
2631
|
# np.savetxt(file, d, header=header, delimiter='\t', comments='', fmt='%f')
|
|
2445
2632
|
|
|
2446
|
-
if self.verbose:
|
|
2633
|
+
if self.verbose and not join_instruments:
|
|
2447
2634
|
logger.info(f'saving to {file}')
|
|
2448
2635
|
|
|
2636
|
+
if self.verbose and join_instruments:
|
|
2637
|
+
logger.info(f'saving to {files[0]}')
|
|
2638
|
+
|
|
2639
|
+
if join_instruments:
|
|
2640
|
+
files = [files[0]]
|
|
2641
|
+
|
|
2449
2642
|
return files
|
|
2450
2643
|
|
|
2451
2644
|
def checksum(self, write_to=None):
|