arvi 0.1.8__tar.gz → 0.1.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. {arvi-0.1.8 → arvi-0.1.11}/PKG-INFO +1 -1
  2. arvi-0.1.11/arvi/HZ.py +95 -0
  3. {arvi-0.1.8 → arvi-0.1.11}/arvi/__init__.py +6 -0
  4. {arvi-0.1.8 → arvi-0.1.11}/arvi/binning.py +17 -0
  5. arvi-0.1.11/arvi/config.py +2 -0
  6. {arvi-0.1.8 → arvi-0.1.11}/arvi/dace_wrapper.py +64 -48
  7. arvi-0.1.11/arvi/data/extra/HD86226_PFS1.rdb +50 -0
  8. arvi-0.1.11/arvi/data/extra/HD86226_PFS2.rdb +59 -0
  9. arvi-0.1.11/arvi/data/extra/metadata.json +10 -0
  10. arvi-0.1.11/arvi/extra_data.py +71 -0
  11. {arvi-0.1.8 → arvi-0.1.11}/arvi/plots.py +190 -30
  12. {arvi-0.1.8 → arvi-0.1.11}/arvi/simbad_wrapper.py +23 -6
  13. {arvi-0.1.8 → arvi-0.1.11}/arvi/stats.py +32 -2
  14. {arvi-0.1.8 → arvi-0.1.11}/arvi/timeseries.py +342 -70
  15. {arvi-0.1.8 → arvi-0.1.11}/arvi/utils.py +53 -3
  16. {arvi-0.1.8 → arvi-0.1.11}/arvi.egg-info/PKG-INFO +1 -1
  17. {arvi-0.1.8 → arvi-0.1.11}/arvi.egg-info/SOURCES.txt +8 -1
  18. {arvi-0.1.8 → arvi-0.1.11}/pyproject.toml +1 -1
  19. arvi-0.1.11/tests/test_binning.py +26 -0
  20. arvi-0.1.11/tests/test_stats.py +32 -0
  21. arvi-0.1.8/arvi/config.py +0 -1
  22. {arvi-0.1.8 → arvi-0.1.11}/.github/workflows/docs-gh-pages.yml +0 -0
  23. {arvi-0.1.8 → arvi-0.1.11}/.github/workflows/install.yml +0 -0
  24. {arvi-0.1.8 → arvi-0.1.11}/.github/workflows/python-publish.yml +0 -0
  25. {arvi-0.1.8 → arvi-0.1.11}/.gitignore +0 -0
  26. {arvi-0.1.8 → arvi-0.1.11}/LICENSE +0 -0
  27. {arvi-0.1.8 → arvi-0.1.11}/README.md +0 -0
  28. {arvi-0.1.8 → arvi-0.1.11}/arvi/data/info.svg +0 -0
  29. {arvi-0.1.8 → arvi-0.1.11}/arvi/data/obs_affected_ADC_issues.dat +0 -0
  30. {arvi-0.1.8 → arvi-0.1.11}/arvi/data/obs_affected_blue_cryostat_issues.dat +0 -0
  31. {arvi-0.1.8 → arvi-0.1.11}/arvi/instrument_specific.py +0 -0
  32. {arvi-0.1.8 → arvi-0.1.11}/arvi/lbl_wrapper.py +0 -0
  33. {arvi-0.1.8 → arvi-0.1.11}/arvi/nasaexo_wrapper.py +0 -0
  34. {arvi-0.1.8 → arvi-0.1.11}/arvi/programs.py +0 -0
  35. {arvi-0.1.8 → arvi-0.1.11}/arvi/reports.py +0 -0
  36. {arvi-0.1.8 → arvi-0.1.11}/arvi/setup_logger.py +0 -0
  37. {arvi-0.1.8 → arvi-0.1.11}/arvi/translations.py +0 -0
  38. {arvi-0.1.8 → arvi-0.1.11}/arvi.egg-info/dependency_links.txt +0 -0
  39. {arvi-0.1.8 → arvi-0.1.11}/arvi.egg-info/requires.txt +0 -0
  40. {arvi-0.1.8 → arvi-0.1.11}/arvi.egg-info/top_level.txt +0 -0
  41. {arvi-0.1.8 → arvi-0.1.11}/docs/API.md +0 -0
  42. {arvi-0.1.8 → arvi-0.1.11}/docs/detailed.md +0 -0
  43. {arvi-0.1.8 → arvi-0.1.11}/docs/index.md +0 -0
  44. {arvi-0.1.8 → arvi-0.1.11}/docs/logo/detective.png +0 -0
  45. {arvi-0.1.8 → arvi-0.1.11}/docs/logo/logo.png +0 -0
  46. {arvi-0.1.8 → arvi-0.1.11}/mkdocs.yml +0 -0
  47. {arvi-0.1.8 → arvi-0.1.11}/setup.cfg +0 -0
  48. {arvi-0.1.8 → arvi-0.1.11}/setup.py +0 -0
  49. {arvi-0.1.8 → arvi-0.1.11}/tests/test_import_object.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: arvi
3
- Version: 0.1.8
3
+ Version: 0.1.11
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
arvi-0.1.11/arvi/HZ.py ADDED
@@ -0,0 +1,95 @@
1
+ from collections import namedtuple
2
+ import numpy as np
3
+ from astropy.constants import G
4
+ from astropy import units
5
+
6
+ # Coeffcients to be used in the analytical expression to calculate habitable
7
+ # zone flux boundaries
8
+ seffsun = [1.776, 1.107, 0.356, 0.320, 1.188, 0.99]
9
+ a = [2.136e-4, 1.332e-4, 6.171e-5, 5.547e-5, 1.433e-4, 1.209e-4]
10
+ b = [2.533e-8, 1.580e-8, 1.698e-9, 1.526e-9, 1.707e-8, 1.404e-8]
11
+ c = [-1.332e-11, -8.308e-12, -3.198e-12, -2.874e-12, -8.968e-12, -7.418e-12]
12
+ d = [-3.097e-15, -1.931e-15, -5.575e-16, -5.011e-16, -2.084e-15, -1.713e-15]
13
+ seffsun = np.array(seffsun)
14
+ a, b, c, d = np.array([a, b, c, d])
15
+
16
+
17
+ def getHZ(teff, lum, which='conservative'):
18
+ """
19
+ Calculate the limits of the HZ, according to Kopparapu et al.(2013).
20
+
21
+ Parameters
22
+ ----------
23
+ teff : float
24
+ Stellar effective temperature
25
+ lum : float
26
+ Stellar luminosity (in units of the solar luminosity)
27
+ which : str
28
+ Either 'conservative' or 'optimistic' habitable zone
29
+
30
+ Returns
31
+ -------
32
+ innerHZf, outerHZf : floats
33
+ Inner and outer limits of the HZ [in stellar flux compared to the Sun].
34
+ innerHZd, outerHZd : floats
35
+ Inner and outer limits of the HZ [in AU].
36
+ """
37
+ tstar = teff - 5780
38
+ seff = seffsun + a * tstar + b * tstar**2 + c * tstar**3 + d * tstar**4
39
+ dist = np.sqrt(lum / seff)
40
+
41
+ # the order of the coefficients is
42
+ # recentVenus
43
+ # runawayGreenhouse
44
+ # maxGreenhouse
45
+ # earlyMars
46
+ # runaway5Me
47
+ # runaway10Me
48
+
49
+ if which in ('conservative', 'con'):
50
+ return (seff[1], seff[2]), (dist[1], dist[2])
51
+ elif which in ('optimistic', 'opt'):
52
+ return (seff[0], seff[3]), (dist[0], dist[3])
53
+ else:
54
+ raise ValueError(f'Could not recognise `which={which}`')
55
+
56
+
57
+ def getHZ_period(teff, Mstar, Mplanet, lum=1, Mplanet_units='earth',
58
+ which='conservative'):
59
+ """
60
+ Calculate the period limits of the HZ.
61
+
62
+ Parameters
63
+ ----------
64
+ teff : float
65
+ Stellar effective temperature
66
+ Mstar : float
67
+ Mass of the star
68
+ Mplanet : float
69
+ Mass of the planet
70
+ lum : float
71
+ Stellar luminosity (in units of the solar luminosity)
72
+ Mplanet_units : str
73
+ Units of the planet mass, 'earth' or 'jupiter'
74
+ which : str
75
+ Either 'conservative' or 'optimistic' habitable zone
76
+
77
+ Returns
78
+ -------
79
+ innerHZd, outerHZd : floats
80
+ Inner and outer limits of the HZ [in days].
81
+ """
82
+ # this function is nothing more than Kepler's 3rd law
83
+
84
+ f = 4 * np.pi**2 / G
85
+
86
+ HZa = np.array(getHZ(teff, lum, which)[1]) * units.AU
87
+
88
+ Mstar = Mstar * units.solMass
89
+
90
+ if Mplanet_units.lower() == 'earth':
91
+ Mplanet = Mplanet * units.earthMass
92
+ elif Mplanet_units.lower() in ('jupiter', 'jup'):
93
+ Mplanet = Mplanet * units.jupiterMass
94
+
95
+ return np.sqrt(f * HZa**3 / (Mstar + Mplanet)).to(units.day)
@@ -5,6 +5,12 @@ from .timeseries import RV
5
5
  _ran_once = False
6
6
 
7
7
  def __getattr__(name: str):
8
+ if name in (
9
+ '_ipython_canary_method_should_not_exist_',
10
+ '_repr_mimebundle_',
11
+ ):
12
+ return
13
+
8
14
  global _ran_once # can't do it any other way :(
9
15
  if _ran_once:
10
16
  return RV(name)
@@ -1,6 +1,8 @@
1
1
  import numpy as np
2
2
  from numpy.testing import suppress_warnings
3
3
 
4
+ from .setup_logger import logger
5
+
4
6
  ###############################################################################
5
7
  # the following is mostly a copy of the scipy implementation of
6
8
  # binned_statistic and binned_statistic_dd
@@ -379,3 +381,18 @@ def binRV(time, rv, err=None, stat='wmean', tstat='wmean', estat='addquad',
379
381
  if n_consecutive and consecutive_step:
380
382
  return btime, brv, random_choices
381
383
  return btime, brv
384
+
385
+
386
+ def bin_ccf_mask(time, ccf_mask):
387
+ indices = binRV(time, None, binning_indices=True)
388
+ indices = np.r_[indices, time.size]
389
+ bmask = []
390
+ for i1, i2 in zip(indices, indices[1:]):
391
+ um = np.unique(ccf_mask[i1:i2]).squeeze()
392
+ if um.size > 1:
393
+ logger.error(f'Non-unique CCF mask within one night (t={time[i1]:.1f}). '
394
+ 'Setting to NaN, but RV should be discarded')
395
+ bmask.append('nan')
396
+ else:
397
+ bmask.append(um)
398
+ return np.array(bmask)
@@ -0,0 +1,2 @@
1
+ return_self = False
2
+ check_internet = False
@@ -5,10 +5,10 @@ import numpy as np
5
5
  from dace_query import DaceClass
6
6
  from dace_query.spectroscopy import SpectroscopyClass, Spectroscopy as default_Spectroscopy
7
7
  from .setup_logger import logger
8
- from .utils import create_directory, all_logging_disabled, stdout_disabled
8
+ from .utils import create_directory, all_logging_disabled, stdout_disabled, tqdm
9
9
 
10
10
 
11
- def load_spectroscopy():
11
+ def load_spectroscopy() -> SpectroscopyClass:
12
12
  if 'DACERC' in os.environ:
13
13
  dace = DaceClass(dace_rc_config_path=os.environ['DACERC'])
14
14
  return SpectroscopyClass(dace_instance=dace)
@@ -151,10 +151,18 @@ def get_observations(star, instrument=None, verbose=True):
151
151
 
152
152
 
153
153
  def check_existing(output_directory, files, type):
154
+ """ Check how many of `files` exist in `output_directory` """
154
155
  existing = [
155
156
  f.partition('.fits')[0] for f in os.listdir(output_directory)
156
157
  if type in f
157
158
  ]
159
+
160
+ # also check for lowercase type
161
+ existing += [
162
+ f.partition('.fits')[0] for f in os.listdir(output_directory)
163
+ if type.lower() in f
164
+ ]
165
+
158
166
  if os.name == 'nt': # on Windows, be careful with ':' in filename
159
167
  import re
160
168
  existing = [re.sub(r'T(\d+)_(\d+)_(\d+)', r'T\1:\2:\3', f) for f in existing]
@@ -171,13 +179,14 @@ def check_existing(output_directory, files, type):
171
179
  return np.array(missing)
172
180
 
173
181
  def download(files, type, output_directory):
174
-
182
+ """ Download files from DACE """
175
183
  Spectroscopy = load_spectroscopy()
176
184
  # with stdout_disabled(), all_logging_disabled():
177
- Spectroscopy.download_files(files, file_type=type,
185
+ Spectroscopy.download_files(files, file_type=type.lower(),
178
186
  output_directory=output_directory)
179
187
 
180
188
  def extract_fits(output_directory):
189
+ """ Extract fits files from tar.gz file """
181
190
  file = os.path.join(output_directory, 'spectroscopy_download.tar.gz')
182
191
  with tarfile.open(file, "r") as tar:
183
192
  files = []
@@ -192,14 +201,16 @@ def extract_fits(output_directory):
192
201
  return files
193
202
 
194
203
 
195
- def do_download_ccf(raw_files, output_directory, clobber=False, verbose=True):
204
+ def do_download_filetype(type, raw_files, output_directory, clobber=False,
205
+ verbose=True, chunk_size=20):
206
+ """ Download CCFs / S1Ds / S2Ds from DACE """
196
207
  raw_files = np.atleast_1d(raw_files)
197
208
 
198
209
  create_directory(output_directory)
199
210
 
200
211
  # check existing files to avoid re-downloading
201
212
  if not clobber:
202
- raw_files = check_existing(output_directory, raw_files, 'CCF')
213
+ raw_files = check_existing(output_directory, raw_files, type)
203
214
 
204
215
  # any file left to download?
205
216
  if raw_files.size == 0:
@@ -209,66 +220,71 @@ def do_download_ccf(raw_files, output_directory, clobber=False, verbose=True):
209
220
 
210
221
  if verbose:
211
222
  n = raw_files.size
212
- logger.info(f"Downloading {n} CCFs into '{output_directory}'...")
223
+ logger.info(f"Downloading {n} {type}s into '{output_directory}'...")
213
224
 
214
- download(raw_files, 'ccf', output_directory)
225
+ # avoid an empty chunk
226
+ if chunk_size > n:
227
+ chunk_size = n
215
228
 
216
- if verbose:
217
- logger.info('Extracting .fits files')
229
+ for files in tqdm(zip(*(iter(raw_files),) * chunk_size), total=n // chunk_size):
230
+ download(files, type, output_directory)
231
+ extract_fits(output_directory)
218
232
 
219
- extract_fits(output_directory)
233
+ logger.info('Extracted .fits files')
220
234
 
221
235
 
222
- def do_download_s1d(raw_files, output_directory, clobber=False, verbose=True):
223
- raw_files = np.atleast_1d(raw_files)
236
+ # def do_download_s1d(raw_files, output_directory, clobber=False, verbose=True):
237
+ # """ Download S1Ds from DACE """
238
+ # raw_files = np.atleast_1d(raw_files)
224
239
 
225
- create_directory(output_directory)
240
+ # create_directory(output_directory)
226
241
 
227
- # check existing files to avoid re-downloading
228
- if not clobber:
229
- raw_files = check_existing(output_directory, raw_files, 'S1D')
242
+ # # check existing files to avoid re-downloading
243
+ # if not clobber:
244
+ # raw_files = check_existing(output_directory, raw_files, 'S1D')
230
245
 
231
- # any file left to download?
232
- if raw_files.size == 0:
233
- if verbose:
234
- logger.info('no files to download')
235
- return
246
+ # # any file left to download?
247
+ # if raw_files.size == 0:
248
+ # if verbose:
249
+ # logger.info('no files to download')
250
+ # return
236
251
 
237
- if verbose:
238
- n = raw_files.size
239
- logger.info(f"Downloading {n} S1Ds into '{output_directory}'...")
252
+ # if verbose:
253
+ # n = raw_files.size
254
+ # logger.info(f"Downloading {n} S1Ds into '{output_directory}'...")
240
255
 
241
- download(raw_files, 's1d', output_directory)
256
+ # download(raw_files, 's1d', output_directory)
242
257
 
243
- if verbose:
244
- logger.info('Extracting .fits files')
258
+ # if verbose:
259
+ # logger.info('Extracting .fits files')
245
260
 
246
- extract_fits(output_directory)
261
+ # extract_fits(output_directory)
247
262
 
248
263
 
249
- def do_download_s2d(raw_files, output_directory, clobber=False, verbose=True):
250
- raw_files = np.atleast_1d(raw_files)
264
+ # def do_download_s2d(raw_files, output_directory, clobber=False, verbose=True):
265
+ # """ Download S2Ds from DACE """
266
+ # raw_files = np.atleast_1d(raw_files)
251
267
 
252
- create_directory(output_directory)
268
+ # create_directory(output_directory)
253
269
 
254
- # check existing files to avoid re-downloading
255
- if not clobber:
256
- raw_files = check_existing(output_directory, raw_files, 'S2D')
270
+ # # check existing files to avoid re-downloading
271
+ # if not clobber:
272
+ # raw_files = check_existing(output_directory, raw_files, 'S2D')
257
273
 
258
- # any file left to download?
259
- if raw_files.size == 0:
260
- if verbose:
261
- logger.info('no files to download')
262
- return
274
+ # # any file left to download?
275
+ # if raw_files.size == 0:
276
+ # if verbose:
277
+ # logger.info('no files to download')
278
+ # return
263
279
 
264
- if verbose:
265
- n = raw_files.size
266
- logger.info(f"Downloading {n} S2Ds into '{output_directory}'...")
280
+ # if verbose:
281
+ # n = raw_files.size
282
+ # logger.info(f"Downloading {n} S2Ds into '{output_directory}'...")
267
283
 
268
- download(raw_files, 's2d', output_directory)
284
+ # download(raw_files, 's2d', output_directory)
269
285
 
270
- if verbose:
271
- logger.info('Extracting .fits files')
286
+ # if verbose:
287
+ # logger.info('Extracting .fits files')
272
288
 
273
- extracted_files = extract_fits(output_directory)
274
- return extracted_files
289
+ # extracted_files = extract_fits(output_directory)
290
+ # return extracted_files
@@ -0,0 +1,50 @@
1
+ bjd vrad svrad sindex note
2
+ --- ---- ----- ------ ----
3
+ 55198.80046232133736339165 1.69 1.8200 0.1647 pfs1
4
+ 55198.80272248463552386966 7.35 1.9800 0.1714 pfs1
5
+ 55252.70830374259912787238 3.79 1.6000 0.1575 pfs1
6
+ 55256.67966442795204784488 1.40 1.5900 0.1930 pfs1
7
+ 55339.53760546142984821927 10.71 1.1200 0.1590 pfs1
8
+ 55339.54366497264436475234 12.31 1.0300 0.1615 pfs1
9
+ 55581.81336310460210370366 2.01 1.5400 0.1653 pfs1
10
+ 55588.75747179496465832926 5.36 1.9400 0.1741 pfs1
11
+ 55588.76391210197834880091 0.96 1.7700 0.1600 pfs1
12
+ 55664.60144363600556971505 -2.50 1.8300 0.1605 pfs1
13
+ 55953.81271478361486515496 -12.68 1.7600 0.1664 pfs1
14
+ 55955.78313663220797025133 -4.83 1.6300 0.1595 pfs1
15
+ 55958.78249719663108407985 -13.95 1.7300 0.1607 pfs1
16
+ 56087.49751283174737181980 -6.21 1.2200 0.1629 pfs1
17
+ 56087.50102254691228154115 -12.52 1.3100 0.2072 pfs1
18
+ 56092.49371338664695940679 -7.55 1.2100 0.1588 pfs1
19
+ 56281.80971364229299069848 -15.15 1.7200 0.1536 pfs1
20
+ 56343.71369983421755023301 -10.69 1.6500 0.1544 pfs1
21
+ 56356.73510193424954195507 -10.83 1.4400 0.1498 pfs1
22
+ 56428.52030044805815123254 -12.86 1.2900 0.1494 pfs1
23
+ 56432.51966593080487655243 -10.69 1.4100 0.1511 pfs1
24
+ 56438.50234741292752005393 -2.34 1.3000 0.1669 pfs1
25
+ 56694.70676536645714804763 -5.71 1.6900 0.1556 pfs1
26
+ 56697.69280276263998530339 1.81 1.6900 0.1470 pfs1
27
+ 56701.70015445011449628510 -5.08 1.5100 0.1458 pfs1
28
+ 56732.69609971049703744939 3.97 1.6800 0.1544 pfs1
29
+ 56734.71282864823933778098 7.00 1.4800 0.1484 pfs1
30
+ 56817.47683799520291358931 3.61 1.1700 0.1499 pfs1
31
+ 57021.77403655086800426943 -8.31 1.5600 0.1447 pfs1
32
+ 57025.76921799563115200726 -4.49 1.6300 0.1464 pfs1
33
+ 57055.75204189672695065383 3.03 1.6100 0.1466 pfs1
34
+ 57062.66716833305508771446 -4.02 1.6400 0.1457 pfs1
35
+ 57067.62944563883502269164 4.15 1.8400 0.1501 pfs1
36
+ 57117.62039468254715757212 -3.21 1.6900 0.1510 pfs1
37
+ 57388.78167686419419624144 -9.75 1.6500 0.1547 pfs1
38
+ 57395.73140734456137579400 -4.64 1.8100 0.1537 pfs1
39
+ 57452.68808872029239864787 -12.29 1.3100 0.1525 pfs1
40
+ 57468.64404368715167947812 -6.42 1.6400 0.1560 pfs1
41
+ 57474.66793088545728096506 -9.87 1.6600 0.1543 pfs1
42
+ 57478.63241514035780710401 -6.08 1.6700 0.1506 pfs1
43
+ 57498.58900430634002987063 -7.94 1.7100 0.1614 pfs1
44
+ 57555.47357154744440776994 -7.63 1.3000 0.1634 pfs1
45
+ 57737.83878056255798583152 6.39 1.9200 0.1745 pfs1
46
+ 57759.78635257452424411895 -15.35 1.5700 0.1561 pfs1
47
+ 57763.76870947524730581790 -18.94 1.6700 0.1560 pfs1
48
+ 57769.73997213117309001973 -9.05 1.5000 0.1547 pfs1
49
+ 57825.71407201319289015373 -10.32 1.5300 0.1576 pfs1
50
+ 57851.57088638147524761735 -3.14 1.9100 0.1611 pfs1
@@ -0,0 +1,59 @@
1
+ bjd vrad svrad sindex note
2
+ --- ---- ----- ------ ----
3
+ 58271.48476701452273118775 -0.39 1.0600 0.1576 pfs2
4
+ 58469.85188790411484660581 7.66 1.6000 0.1709 pfs2
5
+ 58469.85452811854338506237 6.11 1.5400 0.1746 pfs2
6
+ 58469.85708832627824449446 4.70 1.7900 0.1747 pfs2
7
+ 58507.82192559534996689763 -4.30 1.6100 0.1669 pfs2
8
+ 58527.71946652651968179271 -1.36 1.6400 0.1672 pfs2
9
+ 58527.72314662046119337901 -3.06 1.6800 0.1668 pfs2
10
+ 58530.71117664554913062602 3.42 1.6600 0.1611 pfs2
11
+ 58530.71417670958180679008 2.70 1.7200 0.1620 pfs2
12
+ 58530.71736677762783074286 3.57 1.5200 0.1626 pfs2
13
+ 58618.45879877419247350190 3.71 1.8200 0.2130 pfs2
14
+ 58618.46154856201974325813 0.66 1.1200 0.1625 pfs2
15
+ 58618.46607821243924263399 1.14 1.1400 0.1643 pfs2
16
+ 58618.54730191529779403936 0.75 1.1900 0.1762 pfs2
17
+ 58618.55266149781709827948 0.92 1.1300 0.1661 pfs2
18
+ 58619.45689150650832743850 -4.38 1.3000 0.1732 pfs2
19
+ 58619.45968128995627921540 -2.79 1.0900 0.1678 pfs2
20
+ 58619.46247107380804663990 -1.45 1.1800 0.1728 pfs2
21
+ 58619.57906197325974062551 -1.01 1.2900 0.1806 pfs2
22
+ 58619.58182175672118319198 -1.77 1.2900 0.1821 pfs2
23
+ 58619.58463153612683527172 2.15 1.4300 0.1823 pfs2
24
+ 58620.52444841803662711754 1.78 1.2200 0.1705 pfs2
25
+ 58620.52726819673625868745 -1.67 1.2300 0.1709 pfs2
26
+ 58620.53005797750483907294 -2.07 1.2900 0.1790 pfs2
27
+ 58620.61090160662024572957 0.50 1.0600 0.1666 pfs2
28
+ 58620.61716111190617084503 1.46 1.1300 0.1679 pfs2
29
+ 58621.45879538112058071420 0.23 1.1200 0.1766 pfs2
30
+ 58621.46448493526622769423 2.79 1.1000 0.1600 pfs2
31
+ 58621.46914457000093534589 5.11 1.2500 0.1656 pfs2
32
+ 58621.57597614719270495698 4.89 1.1200 0.1713 pfs2
33
+ 58621.58011581905338971410 1.91 1.1900 0.1766 pfs2
34
+ 58621.58434548389777773991 0.95 1.2300 0.1836 pfs2
35
+ 58622.45716698968499258626 1.05 1.1800 0.1692 pfs2
36
+ 58622.45998676735325716436 -1.59 1.2500 0.1749 pfs2
37
+ 58622.46285654186794999987 0.63 1.1300 0.1686 pfs2
38
+ 58622.57642754959124431480 1.66 1.0900 0.1664 pfs2
39
+ 58622.58053722261684015393 -0.68 1.1300 0.1661 pfs2
40
+ 58623.55484038770191546064 0.32 1.1300 0.1672 pfs2
41
+ 58623.56155985150689957663 -4.31 1.0400 0.1722 pfs2
42
+ 58624.46357848408115387429 -1.26 1.0900 0.1632 pfs2
43
+ 58624.46821811556947068311 -3.43 1.0900 0.1657 pfs2
44
+ 58624.55298135494194866624 -0.65 1.2400 0.1724 pfs2
45
+ 58624.55781096817190700676 0.00 1.2200 0.1611 pfs2
46
+ 58625.46538890280135092326 1.06 1.1800 0.1641 pfs2
47
+ 58625.47024851558853697497 1.90 1.0500 0.1586 pfs2
48
+ 58625.56686077608901541680 3.21 1.1600 0.1648 pfs2
49
+ 58625.57112043322740646545 6.55 1.2300 0.1657 pfs2
50
+ 58626.52836415075398690533 3.57 1.2200 0.1717 pfs2
51
+ 58626.53363372676540166140 6.35 1.1600 0.1674 pfs2
52
+ 58626.58020997056337364484 0.33 1.1900 0.1673 pfs2
53
+ 58626.58559953517396934330 5.12 1.0800 0.1661 pfs2
54
+ 58627.45867979986178397667 0.10 0.9600 0.1577 pfs2
55
+ 58627.46152957162485108711 -0.50 1.0200 0.1585 pfs2
56
+ 58627.46432934695258154534 -1.31 1.0600 0.1591 pfs2
57
+ 58627.59090913418367563281 1.08 1.2000 0.1770 pfs2
58
+ 58627.59374890412800596096 -0.50 1.2000 0.1760 pfs2
59
+ 58627.59652867844306456391 4.50 1.1100 0.1766 pfs2
@@ -0,0 +1,10 @@
1
+ {
2
+
3
+ "HD86226_PFS1.rdb": {
4
+ "reference": "Teske et al. 2020 (AJ, 160, 2)"
5
+ },
6
+ "HD86226_PFS2.rdb": {
7
+ "reference": "Teske et al. 2020 (AJ, 160, 2)"
8
+ }
9
+
10
+ }
@@ -0,0 +1,71 @@
1
+ import os
2
+ from glob import glob
3
+ import json
4
+
5
+ from numpy import full
6
+ from .setup_logger import logger
7
+ from . import timeseries
8
+
9
+ refs = {
10
+ 'HD86226': 'Teske et al. 2020 (AJ, 160, 2)'
11
+ }
12
+
13
+ def get_extra_data(star, instrument=None, path=None, verbose=True):
14
+ if path is None:
15
+ path = os.path.dirname(__file__)
16
+ path = os.path.join(path, 'data', 'extra')
17
+
18
+ metadata = json.load(open(os.path.join(path, 'metadata.json'), 'r'))
19
+ # print(metadata)
20
+
21
+ files = glob(os.path.join(path, star + '*'))
22
+ files = [f for f in files if os.path.isfile(f)]
23
+ files = [f for f in files if not os.path.basename(f).endswith('.zip')]
24
+
25
+ if len(files) == 0:
26
+ raise FileNotFoundError
27
+
28
+ def get_instruments(files):
29
+ instruments = [os.path.basename(f).split('.')[0] for f in files]
30
+ instruments = [i.split('_', maxsplit=1)[1] for i in instruments]
31
+ return instruments
32
+
33
+ instruments = get_instruments(files)
34
+
35
+ if instrument is not None:
36
+ if not any([instrument in i for i in instruments]):
37
+ raise FileNotFoundError
38
+ files = [f for f in files if instrument in f]
39
+ instruments = get_instruments(files)
40
+
41
+ if verbose:
42
+ logger.info(f'loading extra data for {star}')
43
+
44
+ units = len(files) * ['ms']
45
+ reference = len(files) * [None]
46
+ did_sa = len(files) * [False]
47
+
48
+ for i, file in enumerate(files):
49
+ file_basename = os.path.basename(file)
50
+ if file_basename in metadata:
51
+ if 'instrument' in metadata[file_basename]:
52
+ instruments[i] = metadata[file_basename]['instrument']
53
+ if 'units' in metadata[file_basename]:
54
+ units[i] = metadata[file_basename]['units']
55
+ if 'reference' in metadata[file_basename]:
56
+ reference[i] = metadata[file_basename]['reference']
57
+ if 'corrected_for_secular_acceleration' in metadata[file_basename]:
58
+ did_sa[i] = metadata[file_basename]['corrected_for_secular_acceleration']
59
+
60
+ s = timeseries.RV.from_rdb(files[0], star=star, instrument=instruments[0], units=units[0])
61
+ for file, instrument, unit in zip(files[1:], instruments[1:], units[1:]):
62
+ s = s + timeseries.RV.from_rdb(file, star=star, instrument=instrument, units=unit)
63
+
64
+ for i, (inst, ref, inst_did_sa) in enumerate(zip(s.instruments, reference, did_sa)):
65
+ _s = getattr(s, inst)
66
+ if ref is not None:
67
+ _s.pub_reference = full(_s.N, ref)
68
+ if inst_did_sa:
69
+ _s._did_secular_acceleration = True
70
+
71
+ return s