arvi 0.1.3__tar.gz → 0.1.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arvi might be problematic. Click here for more details.

Files changed (41) hide show
  1. {arvi-0.1.3 → arvi-0.1.6}/.github/workflows/install.yml +1 -1
  2. {arvi-0.1.3 → arvi-0.1.6}/.gitignore +3 -0
  3. {arvi-0.1.3 → arvi-0.1.6}/PKG-INFO +2 -14
  4. {arvi-0.1.3 → arvi-0.1.6}/README.md +1 -13
  5. {arvi-0.1.3 → arvi-0.1.6}/arvi/__init__.py +1 -2
  6. {arvi-0.1.3 → arvi-0.1.6}/arvi/dace_wrapper.py +21 -8
  7. {arvi-0.1.3 → arvi-0.1.6}/arvi/lbl_wrapper.py +130 -17
  8. arvi-0.1.6/arvi/nasaexo_wrapper.py +137 -0
  9. {arvi-0.1.3 → arvi-0.1.6}/arvi/plots.py +37 -12
  10. {arvi-0.1.3 → arvi-0.1.6}/arvi/reports.py +12 -8
  11. {arvi-0.1.3 → arvi-0.1.6}/arvi/simbad_wrapper.py +16 -3
  12. {arvi-0.1.3 → arvi-0.1.6}/arvi/stats.py +21 -2
  13. {arvi-0.1.3 → arvi-0.1.6}/arvi/timeseries.py +369 -79
  14. {arvi-0.1.3 → arvi-0.1.6}/arvi.egg-info/PKG-INFO +2 -14
  15. {arvi-0.1.3 → arvi-0.1.6}/arvi.egg-info/SOURCES.txt +1 -0
  16. {arvi-0.1.3 → arvi-0.1.6}/arvi.egg-info/requires.txt +1 -1
  17. {arvi-0.1.3 → arvi-0.1.6}/pyproject.toml +4 -4
  18. {arvi-0.1.3 → arvi-0.1.6}/.github/workflows/docs-gh-pages.yml +0 -0
  19. {arvi-0.1.3 → arvi-0.1.6}/.github/workflows/python-publish.yml +0 -0
  20. {arvi-0.1.3 → arvi-0.1.6}/LICENSE +0 -0
  21. {arvi-0.1.3 → arvi-0.1.6}/arvi/binning.py +0 -0
  22. {arvi-0.1.3 → arvi-0.1.6}/arvi/config.py +0 -0
  23. {arvi-0.1.3 → arvi-0.1.6}/arvi/data/info.svg +0 -0
  24. {arvi-0.1.3 → arvi-0.1.6}/arvi/data/obs_affected_ADC_issues.dat +0 -0
  25. {arvi-0.1.3 → arvi-0.1.6}/arvi/data/obs_affected_blue_cryostat_issues.dat +0 -0
  26. {arvi-0.1.3 → arvi-0.1.6}/arvi/instrument_specific.py +0 -0
  27. {arvi-0.1.3 → arvi-0.1.6}/arvi/programs.py +0 -0
  28. {arvi-0.1.3 → arvi-0.1.6}/arvi/setup_logger.py +0 -0
  29. {arvi-0.1.3 → arvi-0.1.6}/arvi/translations.py +0 -0
  30. {arvi-0.1.3 → arvi-0.1.6}/arvi/utils.py +0 -0
  31. {arvi-0.1.3 → arvi-0.1.6}/arvi.egg-info/dependency_links.txt +0 -0
  32. {arvi-0.1.3 → arvi-0.1.6}/arvi.egg-info/top_level.txt +0 -0
  33. {arvi-0.1.3 → arvi-0.1.6}/docs/API.md +0 -0
  34. {arvi-0.1.3 → arvi-0.1.6}/docs/detailed.md +0 -0
  35. {arvi-0.1.3 → arvi-0.1.6}/docs/index.md +0 -0
  36. {arvi-0.1.3 → arvi-0.1.6}/docs/logo/detective.png +0 -0
  37. {arvi-0.1.3 → arvi-0.1.6}/docs/logo/logo.png +0 -0
  38. {arvi-0.1.3 → arvi-0.1.6}/mkdocs.yml +0 -0
  39. {arvi-0.1.3 → arvi-0.1.6}/setup.cfg +0 -0
  40. {arvi-0.1.3 → arvi-0.1.6}/setup.py +0 -0
  41. {arvi-0.1.3 → arvi-0.1.6}/tests/test_import_object.py +0 -0
@@ -5,7 +5,7 @@ name: Install-Test
5
5
 
6
6
  on:
7
7
  push:
8
- branches: [ "main" ]
8
+ #branches: [ "main" ]
9
9
  pull_request:
10
10
  branches: [ "main" ]
11
11
 
@@ -1,3 +1,6 @@
1
+ *.fits
2
+
3
+
1
4
  # Byte-compiled / optimized / DLL files
2
5
  __pycache__/
3
6
  *.py[cod]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: arvi
3
- Version: 0.1.3
3
+ Version: 0.1.6
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -20,8 +20,8 @@ Requires-Dist: dace-query
20
20
  Requires-Dist: loguru
21
21
  Requires-Dist: mpldatacursor
22
22
  Requires-Dist: tqdm
23
- Requires-Dist: pyexoplaneteu
24
23
  Requires-Dist: pySWEETCat
24
+ Requires-Dist: kepmodel
25
25
 
26
26
  <p align="center">
27
27
  <img width = "140" src="https://github.com/j-faria/arvi/blob/main/docs/logo/logo.png?raw=true"/>
@@ -29,18 +29,6 @@ Requires-Dist: pySWEETCat
29
29
 
30
30
 
31
31
 
32
- ## Goals
33
-
34
- - _Fast_:
35
- all `arvi` operations must be completed in under **1 second**
36
-
37
- - _Reproducible_:
38
- **everyone** must be able to use `arvi`
39
-
40
- - _Accurate_:
41
- `arvi` must always provide the **correct result**
42
-
43
-
44
32
  #### Actions
45
33
 
46
34
  [![Deploy docs](https://github.com/j-faria/arvi/actions/workflows/docs-gh-pages.yml/badge.svg)](https://github.com/j-faria/arvi/actions/workflows/docs-gh-pages.yml)
@@ -4,20 +4,8 @@
4
4
 
5
5
 
6
6
 
7
- ## Goals
8
-
9
- - _Fast_:
10
- all `arvi` operations must be completed in under **1 second**
11
-
12
- - _Reproducible_:
13
- **everyone** must be able to use `arvi`
14
-
15
- - _Accurate_:
16
- `arvi` must always provide the **correct result**
17
-
18
-
19
7
  #### Actions
20
8
 
21
9
  [![Deploy docs](https://github.com/j-faria/arvi/actions/workflows/docs-gh-pages.yml/badge.svg)](https://github.com/j-faria/arvi/actions/workflows/docs-gh-pages.yml)
22
10
  [![Install-Test](https://github.com/j-faria/arvi/actions/workflows/install.yml/badge.svg)](https://github.com/j-faria/arvi/actions/workflows/install.yml)
23
- [![Upload Python Package](https://github.com/j-faria/arvi/actions/workflows/python-publish.yml/badge.svg)](https://github.com/j-faria/arvi/actions/workflows/python-publish.yml)
11
+ [![Upload Python Package](https://github.com/j-faria/arvi/actions/workflows/python-publish.yml/badge.svg)](https://github.com/j-faria/arvi/actions/workflows/python-publish.yml)
@@ -1,5 +1,4 @@
1
- __version__ = '0.1.3'
2
- __all__ = []
1
+ __all__ = ['RV']
3
2
 
4
3
  from .timeseries import RV
5
4
 
@@ -38,6 +38,9 @@ def get_arrays(result, latest_pipeline=True, ESPRESSO_mode='HR11', NIRPS_mode='H
38
38
  logger.warning(f'no observations for requested ESPRESSO mode ({ESPRESSO_mode})')
39
39
 
40
40
  if latest_pipeline:
41
+ if verbose and len(pipelines) > 1:
42
+ logger.info(f'selecting latest pipeline ({pipelines[0]}) for {inst}')
43
+
41
44
  pipelines = [pipelines[0]]
42
45
 
43
46
  for pipe in pipelines:
@@ -149,14 +152,22 @@ def get_observations(star, instrument=None, save_rdb=False, verbose=True):
149
152
 
150
153
  def check_existing(output_directory, files, type):
151
154
  existing = [
152
- f.partition('_')[0] for f in os.listdir(output_directory)
155
+ f.partition('.fits')[0] for f in os.listdir(output_directory)
153
156
  if type in f
154
157
  ]
158
+ if os.name == 'nt': # on Windows, be careful with ':' in filename
159
+ import re
160
+ existing = [re.sub(r'T(\d+)_(\d+)_(\d+)', r'T\1:\2:\3', f) for f in existing]
161
+
162
+ # remove type of file (e.g. _CCF_A)
163
+ existing = [f.partition('_')[0] for f in existing]
164
+
155
165
  missing = []
156
166
  for file in files:
157
167
  if any(other in file for other in existing):
158
168
  continue
159
169
  missing.append(file)
170
+
160
171
  return np.array(missing)
161
172
 
162
173
  def download(files, type, output_directory):
@@ -168,13 +179,15 @@ def download(files, type, output_directory):
168
179
 
169
180
  def extract_fits(output_directory):
170
181
  file = os.path.join(output_directory, 'spectroscopy_download.tar.gz')
171
- tar = tarfile.open(file, "r")
172
- files = []
173
- for member in tar.getmembers():
174
- if member.isreg(): # skip if the TarInfo is not a file
175
- member.name = os.path.basename(member.name) # remove the path
176
- tar.extract(member, output_directory)
177
- files.append(member.name)
182
+ with tarfile.open(file, "r") as tar:
183
+ files = []
184
+ for member in tar.getmembers():
185
+ if member.isreg(): # skip if the TarInfo is not a file
186
+ member.name = os.path.basename(member.name) # remove the path
187
+ if os.name == 'nt': # on Windows, be careful with ':' in filename
188
+ member.name = member.name.replace(':', '_')
189
+ tar.extract(member, output_directory)
190
+ files.append(member.name)
178
191
  os.remove(file)
179
192
  return files
180
193
 
@@ -64,6 +64,9 @@ def run_lbl(self, instrument, files, id=None,
64
64
  if mode == 'HA':
65
65
  rparams['INSTRUMENT'] = 'NIRPS_HA'
66
66
  rparams['DATA_SOURCE'] = 'ESO'
67
+ elif 'CARMENES' in instrument:
68
+ rparams['INSTRUMENT'] = 'CARMENES'
69
+ rparams['DATA_SOURCE'] = None
67
70
 
68
71
  # SPIROU: APERO or CADC
69
72
  # NIRPS_HA: APERO or ESO
@@ -165,25 +168,133 @@ def run_lbl(self, instrument, files, id=None,
165
168
  lbl_wrap(rparams)
166
169
 
167
170
 
168
- def load_lbl(self, instrument=None, filename=None, tell=False):
171
+ def get_lbl_apero(self):
172
+ main_url = 'http://apero.exoplanets.ca/ari/nirps/nirps_he_online/objects/'
173
+
174
+ translate = {
175
+ 'LP804-27': 'LP_804M27', 'HIP79431': 'LP_804M27',
176
+ }
177
+
178
+ if self.star in translate:
179
+ star = translate[self.star]
180
+ else:
181
+ star = self.star
182
+
183
+ # special case
184
+ if star == 'Proxima':
185
+ star = star.upper()
186
+
187
+
188
+ got_rdb = False
189
+ url = main_url + f'{star}.html'
190
+ password = input('password: ')
191
+ resp = requests.get(url, auth=HTTPBasicAuth('nirps', password))
192
+ rdb = re.findall('href="([\w.\/]+lbl_\w+_\w+.rdb)', resp.text)
193
+ if len(rdb) != 0:
194
+ got_rdb = True
195
+
196
+ if not got_rdb:
197
+ url = main_url + f'{star.replace("GJ", "GL")}.html'
198
+ resp = requests.get(url, auth=HTTPBasicAuth('nirps', password))
199
+ rdb = re.findall('href="([\w.\/]+lbl_\w+_\w+.rdb)', resp.text)
200
+ if len(rdb) != 0:
201
+ got_rdb = True
202
+
203
+ if not got_rdb:
204
+ logger.error(f'Cannot find APERO rdb file for {star}')
205
+ raise ValueError
206
+
207
+ # (arbitrarily) choose first file
208
+ rdb = rdb[0]
209
+ print(main_url + rdb)
210
+ resp = requests.get(main_url + rdb, auth=HTTPBasicAuth('nirps', password))
211
+ with open(os.path.basename(rdb), 'w') as f:
212
+ f.write(resp.text)
213
+
214
+ with io.StringIO(resp.text) as f:
215
+ RDB = np.genfromtxt(f, delimiter='\t', names=True, invalid_raise=False,
216
+ comments='N\tN', dtype=None, encoding=None)
217
+
218
+ s = RV.from_arrays(self.star, RDB['rjd'], RDB['vrad'], RDB['svrad'],
219
+ 'NIRPS_LBL')#, mask=self.NIRPS.mask)
220
+
221
+ s._quantities = []
222
+
223
+ s.fwhm = RDB['fwhm']
224
+ s.fwhm_err = RDB['sig_fwhm']
225
+ s._quantities.append('fwhm')
226
+ s._quantities.append('fwhm_err')
227
+
228
+ # s.secular_acceleration()
229
+
230
+ if self._did_adjust_means:
231
+ s.vrad -= wmean(s.vrad, s.svrad)
232
+ s.fwhm -= wmean(s.fwhm, s.fwhm_err)
233
+
234
+ # store other columns
235
+ columns = (
236
+ 'dW', 'sdW',
237
+ 'contrast', 'sig_contrast>contrast_err',
238
+ 'vrad_achromatic', 'svrad_achromatic',
239
+ 'vrad_chromatic_slope', 'svrad_chromatic_slope',
240
+ # 'vrad_g', 'svrad_g',
241
+ # 'vrad_r', 'svrad_r',
242
+ # 'vrad_457nm', 'svrad_457nm',
243
+ # 'vrad_473nm', 'svrad_473nm',
244
+ # 'vrad_490nm', 'svrad_490nm',
245
+ # 'vrad_507nm', 'svrad_507nm',
246
+ # 'vrad_524nm', 'svrad_524nm',
247
+ # 'vrad_542nm', 'svrad_542nm',
248
+ # 'vrad_561nm', 'svrad_561nm',
249
+ # 'vrad_581nm', 'svrad_581nm',
250
+ # 'vrad_601nm', 'svrad_601nm',
251
+ # 'vrad_621nm', 'svrad_621nm',
252
+ # 'vrad_643nm', 'svrad_643nm',
253
+ # 'vrad_665nm', 'svrad_665nm',
254
+ # 'vrad_688nm', 'svrad_688nm',
255
+ # 'vrad_712nm', 'svrad_712nm',
256
+ # 'vrad_737nm', 'svrad_737nm'
257
+ )
258
+ for col in columns:
259
+ if '>' in col: # store with a different name
260
+ setattr(s, col.split('>')[1], RDB[col.split('>')[0]])
261
+ s._quantities.append(col.split('>')[1])
262
+ else:
263
+ setattr(s, col, RDB[col])
264
+ s._quantities.append(col)
265
+
266
+ setattr(self, 'NIRPS_LBL', s)
267
+ self.instruments.append('NIRPS_LBL')
268
+
269
+
270
+ def load_lbl(self, instrument=None, filename=None, tell=False, id=None):
169
271
  lbl_run_dir = 'LBL_run_dir'
170
- print(tell)
171
- if tell:
172
- fits_file = os.path.join(lbl_run_dir, 'lblrdb',
173
- f'lbl_{self.star}_{instrument}_{self.star}_{instrument}_TELL.fits')
272
+
273
+ if id is None:
274
+ slug = f'{self.star}_{instrument}_{self.star}_{instrument}'
174
275
  else:
175
- fits_file = os.path.join(lbl_run_dir, 'lblrdb',
176
- f'lbl_{self.star}_{instrument}_{self.star}_{instrument}.fits')
276
+ slug = f'{self.star}_{instrument}_{id}_{self.star}_{instrument}_{id}'
277
+
278
+ f = f'lbl_{slug}'
279
+
280
+ if tell:
281
+ f = f + f'_TELL'
282
+
283
+ f = f + '.fits'
284
+
285
+ fits_file = os.path.join(lbl_run_dir, 'lblrdb', f)
286
+
287
+ # print(fits_file, os.path.exists(fits_file))
177
288
 
178
- print(fits_file)
179
289
  if not os.path.exists(fits_file):
180
- if instrument is None:
181
- logger.error(
182
- f'File "{fits_file}" does not exist, and instrument not provided')
183
- return
184
- else:
185
- fits_file = os.path.join(lbl_run_dir, 'lblrdb',
186
- f'lbl_{self.star}_{instrument}_{self.star}_{instrument}.fits')
290
+ raise FileNotFoundError(fits_file)
291
+ # if instrument is None:
292
+ # logger.error(
293
+ # f'File "{fits_file}" does not exist, and instrument not provided')
294
+ # return
295
+ # else:
296
+ # fits_file = os.path.join(lbl_run_dir, 'lblrdb',
297
+ # f'lbl_{self.star}_{instrument}_{self.star}_{instrument}.fits')
187
298
 
188
299
  hdu = fits.open(fits_file)
189
300
  RDB = hdu[9].data
@@ -194,7 +305,7 @@ def load_lbl(self, instrument=None, filename=None, tell=False):
194
305
  s.fwhm = RDB['fwhm']
195
306
  s.fwhm_err = RDB['sig_fwhm']
196
307
 
197
- s.secular_acceleration()
308
+ # s.secular_acceleration()
198
309
 
199
310
  if self._did_adjust_means:
200
311
  s.vrad -= wmean(s.vrad, s.svrad)
@@ -223,7 +334,9 @@ def load_lbl(self, instrument=None, filename=None, tell=False):
223
334
  'vrad_665nm', 'svrad_665nm',
224
335
  'vrad_688nm', 'svrad_688nm',
225
336
  'vrad_712nm', 'svrad_712nm',
226
- 'vrad_737nm', 'svrad_737nm'
337
+ 'vrad_737nm', 'svrad_737nm',
338
+ #
339
+ 'HIERARCH ESO QC BERV>berv'
227
340
  )
228
341
  for col in columns:
229
342
  try:
@@ -0,0 +1,137 @@
1
+ from copy import copy
2
+
3
+ import requests
4
+ from io import StringIO
5
+ import numpy as np
6
+
7
+ from .setup_logger import logger
8
+ from kepmodel.rv import RvModel
9
+ from spleaf.term import Error
10
+
11
+
12
+ url = 'https://exoplanetarchive.ipac.caltech.edu/TAP/sync?'
13
+
14
+ STAR_QUERY = [
15
+ 'select *',
16
+ 'from ps',
17
+ 'where',
18
+ 'default_flag=1',
19
+ 'and',
20
+ "hostname like '{star}'"
21
+ ]
22
+
23
+
24
+ def run_query(query):
25
+ link = f'{url}query={query}&format=csv'
26
+ r = requests.get(link)
27
+ data = np.genfromtxt(StringIO(r.text), delimiter=',', names=True,
28
+ dtype=None, encoding=None)
29
+ return r, data
30
+
31
+ class Planets:
32
+ def __init__(self, system):
33
+ self.s = system
34
+ self.verbose = system.verbose
35
+
36
+ self.star = system.star.replace('GJ', 'GJ ').replace('HD', 'HD ')
37
+
38
+ query = ' '.join(STAR_QUERY).replace(' ', '+')
39
+ query = query.format(star=self.star)
40
+
41
+ if self.verbose:
42
+ logger.info('querying NASA Exoplanet Archive...')
43
+
44
+ self.response, self.data = run_query(query)
45
+ self.np = self.data.size
46
+
47
+ # try again with other ids
48
+ if self.np == 0:
49
+ hdname = [i for i in self.s.simbad.ids if 'HD' in i]
50
+ if len(hdname) != 0:
51
+ hdname = hdname[0]
52
+ if self.verbose:
53
+ logger.info(f"trying with the HD name '{hdname}'...")
54
+ STAR_QUERY_HD = STAR_QUERY[:-1]
55
+ STAR_QUERY_HD.append(f"hd_name like '{hdname}'")
56
+ query = ' '.join(STAR_QUERY_HD).replace(' ', '+')
57
+ self.response, self.data = run_query(query)
58
+ self.np = self.data.size
59
+
60
+ hipname = [i for i in self.s.simbad.ids if 'HIP' in i]
61
+ if len(hipname) != 0:
62
+ hipname = hipname[0]
63
+ if self.verbose:
64
+ logger.info(f"trying with the HIP name '{hipname}'...")
65
+ STAR_QUERY_HIP = STAR_QUERY[:-1]
66
+ STAR_QUERY_HIP.append(f"hip_name like '{hipname}'")
67
+ query = ' '.join(STAR_QUERY_HIP).replace(' ', '+')
68
+ self.response, self.data = run_query(query)
69
+ self.np = self.data.size
70
+
71
+ if self.verbose:
72
+ logger.info(f'found {self.np} planets')
73
+
74
+ self.set_parameters()
75
+
76
+ def set_parameters(self):
77
+ self.P = np.atleast_1d(self.data['pl_orbper'])
78
+ self.K = np.atleast_1d(self.data['pl_rvamp'])
79
+ self.e = np.atleast_1d(self.data['pl_orbeccen'])
80
+
81
+ ts = self.s._time_sorter
82
+ self.model = RvModel(self.s.time[ts], self.s.vrad[ts],
83
+ err=Error(self.s.svrad[ts]))
84
+
85
+ for inst in self.s.instruments:
86
+ self.model.add_lin(1.0*(self.s.instrument_array[ts]==inst), f'offset_inst_{inst}')
87
+
88
+ for i in range(self.np):
89
+ if self.K[i] == False:
90
+ self.model.add_keplerian_from_period(self.P[i], fit=True)
91
+ else:
92
+ self.model.add_keplerian([self.P[i], self.K[i], self.e[i], 0.0, 0.0],
93
+ ['P', 'K', 'e', 'M0', 'omega'], fit=True)
94
+
95
+ def fit_lin(self, adjust_data=True):
96
+ self.model.show_param()
97
+ self.model.fit_lin()
98
+ self.model.show_param()
99
+ if adjust_data:
100
+ for inst in self.s.instruments:
101
+ _s = getattr(self.s, inst)
102
+ _s.vrad -= self.model.get_param(f'lin.offset_inst_{inst}')
103
+ self.s._build_arrays()
104
+
105
+ def fit_angles(self):
106
+ old_param = copy(self.model.fit_param)
107
+ fit_param = [f'kep.{i}.M0' for i in range(self.np)]
108
+ fit_param += [f'kep.{i}.omega' for i in range(self.np)]
109
+ self.model.fit_param = fit_param
110
+ self.model.fit()
111
+ self.model.fit_param = old_param
112
+ self.model.show_param()
113
+
114
+ def fit_all(self, adjust_data=False):
115
+ self.model.fit()
116
+
117
+ newP = np.array([self.model.get_param(f'kep.{i}.P') for i in range(self.np)])
118
+ if self.verbose and not np.allclose(self.P, newP):
119
+ logger.warning(f'periods changed: {self.P} --> {newP}')
120
+
121
+ newK = np.array([self.model.get_param(f'kep.{i}.K') for i in range(self.np)])
122
+ if self.verbose and not np.allclose(self.K, newK):
123
+ logger.warning(f'amplitudes changed: {self.K} --> {newK}')
124
+
125
+ newE = np.array([self.model.get_param(f'kep.{i}.e') for i in range(self.np)])
126
+ if self.verbose and not np.allclose(self.e, newE):
127
+ logger.warning(f'eccentricities changed: {self.e} --> {newE}')
128
+
129
+ if adjust_data:
130
+ for inst in self.s.instruments:
131
+ _s = getattr(self.s, inst)
132
+ _s.vrad -= self.model.get_param(f'lin.offset_inst_{inst}')
133
+ self.s._build_arrays()
134
+
135
+ def __repr__(self):
136
+ return f'{self.star}({self.np} planets, '\
137
+ f'P={list(self.P)}, K={list(self.K)}, e={list(self.e)})'
@@ -12,9 +12,9 @@ from .setup_logger import logger
12
12
  from . import config
13
13
 
14
14
 
15
- def plot(self, ax=None, show_masked=False, time_offset=0, remove_50000=False,
16
- tooltips=True, label=None, N_in_label=False, versus_n=False, show_histogram=False,
17
- **kwargs):
15
+ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
16
+ remove_50000=False, tooltips=True, label=None, N_in_label=False,
17
+ versus_n=False, show_histogram=False, **kwargs):
18
18
  """ Plot the RVs
19
19
 
20
20
  Args:
@@ -22,6 +22,8 @@ def plot(self, ax=None, show_masked=False, time_offset=0, remove_50000=False,
22
22
  Axis to plot to. Defaults to None.
23
23
  show_masked (bool, optional):
24
24
  Show masked points. Defaults to False.
25
+ instrument (str, optional):
26
+ Which instrument to plot. Defaults to None, or plot all instruments.
25
27
  time_offset (int, optional):
26
28
  Value to subtract from time. Defaults to 0.
27
29
  remove_50000 (bool, optional):
@@ -55,16 +57,21 @@ def plot(self, ax=None, show_masked=False, time_offset=0, remove_50000=False,
55
57
  ax, axh = ax
56
58
  fig = ax.figure
57
59
 
58
- kwargs.setdefault('fmt', 'o')
60
+ kwargs.setdefault('marker', 'o')
61
+ kwargs.setdefault('ls', '')
59
62
  kwargs.setdefault('capsize', 0)
60
63
  kwargs.setdefault('ms', 4)
61
64
 
62
65
  if remove_50000:
63
66
  time_offset = 50000
64
67
 
68
+ instruments = self._check_instrument(instrument)
69
+
65
70
  cursors = {}
66
- for inst in self.instruments:
71
+ for inst in instruments:
67
72
  s = self if self._child else getattr(self, inst)
73
+ if s.mask.sum() == 0:
74
+ continue
68
75
 
69
76
  if label is None:
70
77
  _label = f'{inst:10s} ({s.N})' if N_in_label else inst
@@ -162,8 +169,9 @@ def plot(self, ax=None, show_masked=False, time_offset=0, remove_50000=False,
162
169
  return fig, ax
163
170
 
164
171
 
165
- def plot_quantity(self, quantity, ax=None, show_masked=False, time_offset=0,
166
- remove_50000=False, tooltips=False, N_in_label=False, **kwargs):
172
+ def plot_quantity(self, quantity, ax=None, show_masked=False, instrument=None,
173
+ time_offset=0, remove_50000=False, tooltips=False,
174
+ N_in_label=False, **kwargs):
167
175
  if self.N == 0:
168
176
  if self.verbose:
169
177
  logger.error('no data to plot')
@@ -178,14 +186,17 @@ def plot_quantity(self, quantity, ax=None, show_masked=False, time_offset=0,
178
186
  else:
179
187
  fig = ax.figure
180
188
 
181
- kwargs.setdefault('fmt', 'o')
189
+ kwargs.setdefault('marker', 'o')
190
+ kwargs.setdefault('ls', '')
182
191
  kwargs.setdefault('capsize', 0)
183
192
  kwargs.setdefault('ms', 4)
184
193
 
185
194
  if remove_50000:
186
195
  time_offset = 50000
187
196
 
188
- for inst in self.instruments:
197
+ instruments = self._check_instrument(instrument)
198
+
199
+ for inst in instruments:
189
200
  s = self if self._child else getattr(self, inst)
190
201
  label = f'{inst:10s} ({s.N})' if N_in_label else inst
191
202
 
@@ -218,7 +229,7 @@ def plot_quantity(self, quantity, ax=None, show_masked=False, time_offset=0,
218
229
  elif quantity == 'bispan':
219
230
  ax.set_ylabel(f'BIS [{self.units}]')
220
231
  elif quantity == 'rhk':
221
- ax.set_ylabel("$\log$ R'$_{HK}$")
232
+ ax.set_ylabel(r"$\log$ R'$_{HK}$")
222
233
 
223
234
  if remove_50000:
224
235
  ax.set_xlabel('BJD - 2450000 [days]')
@@ -236,7 +247,7 @@ plot_bis = partialmethod(plot_quantity, quantity='bispan')
236
247
  plot_rhk = partialmethod(plot_quantity, quantity='rhk')
237
248
 
238
249
 
239
- def gls(self, ax=None, label=None, fap=True, picker=True, **kwargs):
250
+ def gls(self, ax=None, label=None, fap=True, picker=True, instrument=None, **kwargs):
240
251
  if self.N == 0:
241
252
  if self.verbose:
242
253
  logger.error('no data to compute gls')
@@ -247,7 +258,21 @@ def gls(self, ax=None, label=None, fap=True, picker=True, **kwargs):
247
258
  else:
248
259
  fig = ax.figure
249
260
 
250
- gls = LombScargle(self.mtime, self.mvrad, self.msvrad)
261
+ if instrument is not None:
262
+ instrument = self._check_instrument(instrument)
263
+ if instrument is not None:
264
+ instrument_mask = np.isin(self.instrument_array, instrument)
265
+ t = self.time[instrument_mask & self.mask]
266
+ y = self.vrad[instrument_mask & self.mask]
267
+ e = self.svrad[instrument_mask & self.mask]
268
+ if self.verbose:
269
+ logger.info(f'calculating periodogram for instrument {instrument}')
270
+ else:
271
+ t = self.time[self.mask]
272
+ y = self.vrad[self.mask]
273
+ e = self.svrad[self.mask]
274
+
275
+ self._gls = gls = LombScargle(t, y, e)
251
276
  freq, power = gls.autopower(maximum_frequency=1.0, samples_per_peak=10)
252
277
  ax.semilogx(1/freq, power, picker=picker, label=label, **kwargs)
253
278
 
@@ -92,11 +92,15 @@ def report(self, save=None):
92
92
  if save is True:
93
93
  save = f'report_{"".join(self.star.split())}.pdf'
94
94
 
95
- with PdfPages(save) as pdf:
96
- pdf.attach_note('hello', positionRect=[5, 15, 20, 30])
97
-
98
- if self.verbose:
99
- logger.info(f'saving to {save}')
100
- pdf.savefig(fig)
101
- plt.close('all')
102
- # os.system(f'evince {save} &')
95
+ if save.endswith('.png'):
96
+ fig.savefig(save)
97
+ else:
98
+ with PdfPages(save) as pdf:
99
+ #pdf.attach_note('hello', positionRect=[5, 15, 20, 30])
100
+
101
+ if self.verbose:
102
+ logger.info(f'saving to {save}')
103
+ pdf.savefig(fig)
104
+ # os.system(f'evince {save} &')
105
+
106
+ return fig
@@ -20,17 +20,24 @@ WHERE id = '{star}';
20
20
  """
21
21
 
22
22
  BV_QUERY = """
23
- SELECT B, V from allfluxes
23
+ SELECT B, V FROM allfluxes
24
24
  JOIN ident USING(oidref)
25
25
  WHERE id = '{star}';
26
26
  """
27
27
 
28
28
  IDS_QUERY = """
29
- SELECT ids from ids
29
+ SELECT ids FROM ids
30
30
  JOIN ident USING(oidref)
31
31
  WHERE id = '{star}';
32
32
  """
33
33
 
34
+ OID_QUERY = """
35
+ SELECT basic.OID FROM basic
36
+ JOIN ident ON oidref = oid
37
+ WHERE id = '{star}';
38
+ """
39
+
40
+
34
41
  def run_query(query):
35
42
  url = 'http://simbad.u-strasbg.fr/simbad/sim-tap/sync'
36
43
  response = requests.post(url,
@@ -87,6 +94,10 @@ class simbad:
87
94
  star (str): The name of the star to query simbad
88
95
  """
89
96
  self.star = star
97
+
98
+ # oid = run_query(query=OID_QUERY.format(star=star))
99
+ # self.oid = str(oid.split()[-1])
100
+
90
101
  try:
91
102
  table1 = run_query(query=QUERY.format(star=star))
92
103
  cols, values = parse_table(table1)
@@ -101,6 +112,9 @@ class simbad:
101
112
  raise ValueError(f'simbad query for {star} failed')
102
113
 
103
114
  for col, val in zip(cols, values):
115
+ if col == 'oid':
116
+ setattr(self, col, str(val))
117
+ continue
104
118
  try:
105
119
  setattr(self, col, float(val))
106
120
  except ValueError:
@@ -125,7 +139,6 @@ class simbad:
125
139
  if self.sp_type[:2] in effective_temperatures:
126
140
  self.teff = effective_temperatures[self.sp_type[:2]]
127
141
 
128
-
129
142
  def __repr__(self):
130
143
  V = self.V
131
144
  sp_type = self.sp_type