arvi 0.2.1__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arvi might be problematic. Click here for more details.

arvi/__init__.py CHANGED
@@ -1,22 +1,22 @@
1
- __all__ = ['RV']
1
+ __all__ = ['RV', 'config', 'simbad', 'gaia']
2
2
 
3
3
  from importlib.metadata import version, PackageNotFoundError
4
-
5
- from .config import config
6
- from .timeseries import RV
7
-
8
4
  try:
9
5
  __version__ = version("arvi")
10
6
  except PackageNotFoundError:
11
7
  # package is not installed
12
8
  pass
13
9
 
14
- ## OLD
15
- # # the __getattr__ function is always called twice, so we need this
16
- # # to only build and return the RV object on the second time
17
- # _ran_once = False
10
+ from .config import config
11
+ from .simbad_wrapper import simbad
12
+ from .gaia_wrapper import gaia
13
+
14
+ from .timeseries import RV
18
15
 
19
16
  def __getattr__(name: str):
17
+ if not config.fancy_import:
18
+ raise AttributeError
19
+
20
20
  if name in (
21
21
  '_ipython_canary_method_should_not_exist_',
22
22
  '_ipython_display_',
@@ -29,15 +29,5 @@ def __getattr__(name: str):
29
29
  globals()[name] = RV(name)
30
30
  return globals()[name]
31
31
  except ValueError as e:
32
- raise ImportError(e) from None
33
- # raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
34
-
35
- ## OLD
36
- # # can't do it any other way :(
37
- # global _ran_once
32
+ raise AttributeError(e)
38
33
 
39
- # if _ran_once:
40
- # _ran_once = False
41
- # return RV(name)
42
- # else:
43
- # _ran_once = True
arvi/binning.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import numpy as np
2
2
 
3
- from .setup_logger import logger
3
+ from .setup_logger import setup_logger
4
4
 
5
5
  ###############################################################################
6
6
  # the following is mostly a copy of the scipy implementation of
@@ -390,6 +390,7 @@ def binRV(time, rv, err=None, stat='wmean', tstat='wmean', estat='addquad',
390
390
 
391
391
 
392
392
  def bin_ccf_mask(time, ccf_mask):
393
+ logger = setup_logger()
393
394
  indices = binRV(time, None, binning_indices=True)
394
395
  indices = np.r_[indices, time.size]
395
396
  bmask = []
arvi/config.py CHANGED
@@ -31,6 +31,8 @@ class config:
31
31
  'check_internet': False,
32
32
  # make all DACE requests without using a .dacerc file
33
33
  'request_as_public': False,
34
+ # enable from arvi import star_name
35
+ 'fancy_import': True,
34
36
  # debug
35
37
  'debug': False,
36
38
  }
@@ -43,10 +45,16 @@ class config:
43
45
  # return {'return_self': 'help!'}
44
46
  return {}
45
47
 
46
- if self.__user_config.has_option('config', name):
47
- self.__conf[name] = self.__user_config.get('config', name)
48
+ try:
49
+ if self.__user_config.has_option('config', name):
50
+ value = self.__user_config.get('config', name)
51
+ value = True if value == 'True' else value
52
+ value = False if value == 'False' else value
53
+ self.__conf[name] = value
48
54
 
49
- return self.__conf[name]
55
+ return self.__conf[name]
56
+ except KeyError:
57
+ raise KeyError(f"unknown config option '{name}'")
50
58
 
51
59
  def __setattr__(self, name, value):
52
60
  if name in config.__setters:
@@ -54,7 +62,7 @@ class config:
54
62
  else:
55
63
  if 'config' not in self.__user_config:
56
64
  self.__user_config.add_section('config')
57
- self.__user_config.set('config', name, value)
65
+ self.__user_config.set('config', name, str(value))
58
66
  save_config(self.__user_config)
59
67
  # raise NameError(f"unknown configuration name '{name}'")
60
68
 
arvi/dace_wrapper.py CHANGED
@@ -5,13 +5,17 @@ import collections
5
5
  from functools import lru_cache
6
6
  from itertools import islice
7
7
  import numpy as np
8
- from dace_query import DaceClass
9
- from dace_query.spectroscopy import SpectroscopyClass, Spectroscopy as default_Spectroscopy
10
- from .setup_logger import logger
8
+
9
+ from .setup_logger import setup_logger
11
10
  from .utils import create_directory, all_logging_disabled, stdout_disabled, tqdm
12
11
 
13
12
 
14
- def load_spectroscopy(user=None) -> SpectroscopyClass:
13
+ def load_spectroscopy(user=None):
14
+ logger = setup_logger()
15
+ with all_logging_disabled():
16
+ from dace_query.spectroscopy import SpectroscopyClass, Spectroscopy as default_Spectroscopy
17
+ from dace_query import DaceClass
18
+
15
19
  from .config import config
16
20
  # requesting as public
17
21
  if config.request_as_public:
@@ -39,10 +43,13 @@ def load_spectroscopy(user=None) -> SpectroscopyClass:
39
43
  logger.info(f'using credentials for user {user} in ~/.dacerc')
40
44
  return SpectroscopyClass(dace_instance=dace)
41
45
  # default
46
+ if not os.path.exists(os.path.expanduser('~/.dacerc')):
47
+ logger.warning('requesting DACE data as public (no .dacerc file found)')
42
48
  return default_Spectroscopy
43
49
 
44
50
  @lru_cache()
45
51
  def get_dace_id(star, verbose=True):
52
+ logger = setup_logger()
46
53
  filters = {"obj_id_catname": {"equal": [star]}}
47
54
  try:
48
55
  with all_logging_disabled():
@@ -54,6 +61,7 @@ def get_dace_id(star, verbose=True):
54
61
  raise ValueError from None
55
62
 
56
63
  def get_arrays(result, latest_pipeline=True, ESPRESSO_mode='HR11', NIRPS_mode='HE', verbose=True):
64
+ logger = setup_logger()
57
65
  arrays = []
58
66
  instruments = [str(i) for i in result.keys()]
59
67
 
@@ -153,6 +161,7 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
153
161
  dictionary with data from DACE
154
162
  """
155
163
  Spectroscopy = load_spectroscopy(user)
164
+
156
165
  found_dace_id = False
157
166
  try:
158
167
  dace_id = get_dace_id(star, verbose=verbose)
@@ -254,8 +263,10 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
254
263
  return r
255
264
 
256
265
  def get_observations(star, instrument=None, user=None, main_id=None, verbose=True):
266
+ logger = setup_logger()
257
267
  if instrument is None:
258
268
  Spectroscopy = load_spectroscopy(user)
269
+
259
270
  try:
260
271
  with stdout_disabled(), all_logging_disabled():
261
272
  result = Spectroscopy.get_timeseries(target=star,
@@ -438,6 +449,7 @@ def extract_fits(output_directory, filename=None):
438
449
 
439
450
 
440
451
  def do_symlink_filetype(type, raw_files, output_directory, clobber=False, top_level=None, verbose=True):
452
+ logger = setup_logger()
441
453
  terminations = {
442
454
  'CCF': '_CCF_A.fits',
443
455
  'S1D': '_S1D_A.fits',
@@ -483,6 +495,7 @@ def do_symlink_filetype(type, raw_files, output_directory, clobber=False, top_le
483
495
  def do_download_filetype(type, raw_files, output_directory, clobber=False, user=None,
484
496
  verbose=True, chunk_size=20, parallel_limit=30):
485
497
  """ Download CCFs / S1Ds / S2Ds from DACE """
498
+ logger = setup_logger()
486
499
  raw_files = np.atleast_1d(raw_files)
487
500
 
488
501
  create_directory(output_directory)
arvi/exofop_wrapper.py ADDED
@@ -0,0 +1,62 @@
1
+ import csv
2
+ import requests
3
+ import time
4
+ import importlib.resources as resources
5
+ import numpy as np
6
+
7
+ from .setup_logger import setup_logger
8
+
9
+ def get_toi_list(verbose=True):
10
+ logger = setup_logger()
11
+ toi_list = resources.files('arvi') / 'data' / 'exofop_toi_list.csv'
12
+ now = time.time()
13
+ download = not toi_list.exists() or toi_list.stat().st_mtime < now - 48 * 60 * 60
14
+ if download:
15
+ if verbose:
16
+ logger.info('Downloading exofop TOI list (can take a while)...')
17
+ r = requests.get('https://exofop.ipac.caltech.edu/tess/download_toi.php?sort=toi&output=csv')
18
+ with open(toi_list, 'wb') as f:
19
+ f.write(r.content)
20
+ return toi_list
21
+
22
+ class exofop:
23
+ def __init__(self, star: str, verbose=True, _debug=False):
24
+ self.star = star
25
+ self.verbose = verbose
26
+
27
+ toi_list = get_toi_list(verbose=verbose)
28
+ tsv = ('|'.join(i) for i in csv.reader(open(toi_list, encoding='utf8')))
29
+ self.data = np.genfromtxt(tsv, delimiter='|',
30
+ names=True, encoding='utf8', dtype=None)
31
+
32
+
33
+ try:
34
+ if self.star.startswith('TIC'):
35
+ self.tic = self.star
36
+ w = self.data['TIC_ID'] == int(self.star[3:])
37
+ self.toi = 'TOI-' + str(int(self.data['TOI'][w][0]))
38
+ else:
39
+ toi = self.star.replace('TOI-', '')
40
+ toi = toi if toi.endswith('.01') else toi + '.01'
41
+ toi_float = float(toi)
42
+ if toi_float not in self.data['TOI']:
43
+ raise ValueError
44
+ w = self.data['TOI'] == toi_float
45
+ self.tic = 'TIC' + str(int(self.data['TIC_ID'][w][0]))
46
+ self.toi = self.star
47
+ except ValueError:
48
+ raise ValueError(f'{self.star} not found in exofop TOI list')
49
+ else:
50
+ self.ra = str(self.data['RA'][w][0])
51
+ self.dec = str(self.data['Dec'][w][0])
52
+
53
+ self.epoch = float(self.data['Epoch_BJD'][w][0])
54
+ self.period = float(self.data['Period_days'][w][0])
55
+ if self.period == 0.0:
56
+ self.period = np.nan
57
+ self.duration = float(self.data['Duration_hours'][w][0])
58
+ self.depth = float(self.data['Depth_ppm'][w][0])
59
+
60
+
61
+ def __repr__(self):
62
+ return f'{self.star} (TIC={self.tic}, epoch={self.epoch:.3f}, period={self.period:.3f})'
arvi/extra_data.py CHANGED
@@ -3,8 +3,7 @@ from glob import glob
3
3
  import json
4
4
 
5
5
  from numpy import full
6
- from .setup_logger import logger
7
- from . import timeseries
6
+ from .setup_logger import setup_logger
8
7
 
9
8
  refs = {
10
9
  'HD86226': 'Teske et al. 2020 (AJ, 160, 2)'
@@ -12,16 +11,21 @@ refs = {
12
11
 
13
12
  def get_extra_data(star, instrument=None, path=None, verbose=True,
14
13
  check_for_kms=True):
14
+ from . import timeseries
15
+ logger = setup_logger()
15
16
  if path is None:
16
17
  path = os.path.dirname(__file__)
17
18
  path = os.path.join(path, 'data', 'extra')
19
+ metadata = json.load(open(os.path.join(path, 'metadata.json'), 'r'))
20
+ # print(metadata)
21
+ else:
22
+ metadata = {}
18
23
 
19
- metadata = json.load(open(os.path.join(path, 'metadata.json'), 'r'))
20
- # print(metadata)
21
-
22
- files = glob(os.path.join(path, star.replace(' ', '') + '*.rdb'))
24
+ files = glob(os.path.join(path, star + '*.rdb'))
25
+ files += glob(os.path.join(path, star.replace(' ', '') + '*.rdb'))
23
26
  files = [f for f in files if os.path.isfile(f)]
24
- files = [f for f in files if not os.path.basename(f).endswith('.zip')]
27
+ files = [f for f in files if not f.endswith('_actin.rdb')]
28
+ files = list(set(files))
25
29
 
26
30
  if len(files) == 0:
27
31
  raise FileNotFoundError
arvi/gaia_wrapper.py CHANGED
@@ -73,7 +73,7 @@ class gaia:
73
73
  plx (float): parallax
74
74
  radial_velocity (float): radial velocity
75
75
  """
76
- def __init__(self, star:str, simbad=None):
76
+ def __init__(self, star:str, simbad=None, _debug=False):
77
77
  """
78
78
  Args:
79
79
  star (str): The name of the star to query simbad
@@ -83,6 +83,8 @@ class gaia:
83
83
  if simbad is None:
84
84
  from .simbad_wrapper import simbad as Simbad
85
85
  simbad = Simbad(star)
86
+ if _debug:
87
+ print(simbad)
86
88
 
87
89
  ra = simbad.ra
88
90
  dec = simbad.dec
@@ -95,10 +97,14 @@ class gaia:
95
97
  try:
96
98
  if star in translate:
97
99
  table = run_query(query=QUERY_ID.format(id=translate[star]))
98
- elif hasattr(simbad, 'gaia_id'):
100
+ elif hasattr(simbad, 'gaia_id') and simbad.gaia_id is not None:
99
101
  table = run_query(query=QUERY_ID.format(id=simbad.gaia_id))
100
102
  else:
101
103
  table = run_query(query=QUERY.format(**args))
104
+
105
+ if _debug:
106
+ print('table:', table)
107
+
102
108
  results = parse_csv(table)[0]
103
109
  except IndexError:
104
110
  raise ValueError(f'Gaia query for {star} failed')
@@ -1,7 +1,7 @@
1
1
  import os, sys
2
2
  import numpy as np
3
3
 
4
- from .setup_logger import logger
4
+ from .setup_logger import setup_logger
5
5
  from .utils import ESPRESSO_ADC_issues, ESPRESSO_cryostat_issues
6
6
 
7
7
 
@@ -27,6 +27,7 @@ ESPRESSO_technical_intervention = 58665
27
27
 
28
28
  def divide_ESPRESSO(self):
29
29
  """ Split ESPRESSO data into separate sub ESP18 and ESP19 subsets """
30
+ logger = setup_logger()
30
31
  if self._check_instrument('ESPRESSO', strict=False) is None:
31
32
  return
32
33
  if 'ESPRESSO18' in self.instruments and 'ESPRESSO19' in self.instruments:
@@ -64,6 +65,7 @@ def divide_ESPRESSO(self):
64
65
 
65
66
  def divide_HARPS(self):
66
67
  """ Split HARPS data into separate sub HARPS03 and HARPS15 subsets """
68
+ logger = setup_logger()
67
69
  if self._check_instrument('HARPS', strict=False) is None:
68
70
  return
69
71
  if 'HARPS03' in self.instruments and 'HARPS15' in self.instruments:
@@ -100,6 +102,7 @@ def divide_HARPS(self):
100
102
 
101
103
 
102
104
  def check(self, instrument):
105
+ logger = setup_logger()
103
106
  instruments = self._check_instrument(instrument)
104
107
  if instruments is None:
105
108
  if self.verbose:
@@ -118,6 +121,7 @@ def HARPS_commissioning(self, mask=True, plot=True):
118
121
  plot (bool, optional):
119
122
  Whether to plot the masked points.
120
123
  """
124
+ logger = setup_logger()
121
125
  if check(self, 'HARPS') is None:
122
126
  return
123
127
 
@@ -149,6 +153,7 @@ def HARPS_fiber_commissioning(self, mask=True, plot=True):
149
153
  plot (bool, optional):
150
154
  Whether to plot the masked points.
151
155
  """
156
+ logger = setup_logger()
152
157
  if check(self, 'HARPS') is None:
153
158
  return
154
159
 
@@ -182,6 +187,7 @@ def ADC_issues(self, mask=True, plot=True, check_headers=False):
182
187
  check_headers (bool, optional):
183
188
  Whether to (double-)check the headers for missing/zero keywords.
184
189
  """
190
+ logger = setup_logger()
185
191
  instruments = self._check_instrument('ESPRESSO')
186
192
 
187
193
  if instruments is None:
@@ -225,6 +231,7 @@ def blue_cryostat_issues(self, mask=True, plot=True):
225
231
  mask (bool, optional): Whether to mask out the points.
226
232
  plot (bool, optional): Whether to plot the masked points.
227
233
  """
234
+ logger = setup_logger()
228
235
  instruments = self._check_instrument('ESPRESSO')
229
236
 
230
237
  if instruments is None:
@@ -259,6 +266,7 @@ def qc_scired_issues(self, plot=False, **kwargs):
259
266
  Args:
260
267
  plot (bool, optional): Whether to plot the masked points.
261
268
  """
269
+ logger = setup_logger()
262
270
  from .headers import get_headers
263
271
 
264
272
  instruments = self._check_instrument('ESPRESSO')
@@ -299,38 +307,40 @@ def qc_scired_issues(self, plot=False, **kwargs):
299
307
  return affected
300
308
 
301
309
 
302
- def known_issues(self, mask=True, plot=False, **kwargs):
303
- """ Identify and optionally mask known instrumental issues.
304
-
305
- Args:
306
- mask (bool, optional): Whether to mask out the points.
307
- plot (bool, optional): Whether to plot the masked points.
308
- """
309
- try:
310
- adc = ADC_issues(self, mask, plot, **kwargs)
311
- except IndexError:
312
- logger.error('are the data binned? cannot proceed to mask these points...')
313
-
314
- try:
315
- cryostat = blue_cryostat_issues(self, mask, plot)
316
- except IndexError:
317
- logger.error('are the data binned? cannot proceed to mask these points...')
318
-
319
- try:
320
- harps_comm = HARPS_commissioning(self, mask, plot)
321
- except IndexError:
322
- logger.error('are the data binned? cannot proceed to mask these points...')
323
-
324
- try:
325
- harps_fibers = HARPS_fiber_commissioning(self, mask, plot)
326
- except IndexError:
327
- logger.error('are the data binned? cannot proceed to mask these points...')
328
-
329
- # if None in (adc, cryostat, harps_comm, harps_fibers):
330
- # return
331
-
332
- try:
333
- # return adc | cryostat
334
- return np.logical_or.reduce((adc, cryostat, harps_comm, harps_fibers))
335
- except UnboundLocalError:
336
- return
310
+ class ISSUES:
311
+ def known_issues(self, mask=True, plot=False, **kwargs):
312
+ """ Identify and optionally mask known instrumental issues.
313
+
314
+ Args:
315
+ mask (bool, optional): Whether to mask out the points.
316
+ plot (bool, optional): Whether to plot the masked points.
317
+ """
318
+ logger = setup_logger()
319
+ try:
320
+ adc = ADC_issues(self, mask, plot, **kwargs)
321
+ except IndexError:
322
+ logger.error('are the data binned? cannot proceed to mask these points...')
323
+
324
+ try:
325
+ cryostat = blue_cryostat_issues(self, mask, plot)
326
+ except IndexError:
327
+ logger.error('are the data binned? cannot proceed to mask these points...')
328
+
329
+ try:
330
+ harps_comm = HARPS_commissioning(self, mask, plot)
331
+ except IndexError:
332
+ logger.error('are the data binned? cannot proceed to mask these points...')
333
+
334
+ try:
335
+ harps_fibers = HARPS_fiber_commissioning(self, mask, plot)
336
+ except IndexError:
337
+ logger.error('are the data binned? cannot proceed to mask these points...')
338
+
339
+ # if None in (adc, cryostat, harps_comm, harps_fibers):
340
+ # return
341
+
342
+ try:
343
+ # return adc | cryostat
344
+ return np.logical_or.reduce((adc, cryostat, harps_comm, harps_fibers))
345
+ except UnboundLocalError:
346
+ return
arvi/plots.py CHANGED
@@ -5,7 +5,7 @@ import numpy as np
5
5
 
6
6
  from astropy.timeseries import LombScargle
7
7
 
8
- from .setup_logger import logger
8
+ from .setup_logger import setup_logger
9
9
  from .config import config
10
10
  from .stats import wmean
11
11
 
@@ -172,6 +172,7 @@ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
172
172
  Figure: the figure
173
173
  Axes: the axis
174
174
  """
175
+ logger = setup_logger()
175
176
  if self.N == 0:
176
177
  if self.verbose:
177
178
  logger.error('no data to plot')
@@ -230,13 +231,13 @@ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
230
231
  _label = label
231
232
 
232
233
  if versus_n:
233
- container = ax.errorbar(np.arange(1, s.mtime.size + 1), s.mvrad, s.msvrad,
234
- label=_label, picker=True, marker=next(markers), zorder=next(zorders),
235
- **kwargs)
234
+ x = np.arange(1, s.mtime.size + 1)
236
235
  else:
237
- container = ax.errorbar(s.mtime - time_offset, s.mvrad, s.msvrad,
238
- label=_label, picker=True, marker=next(markers), zorder=next(zorders),
239
- **kwargs)
236
+ x = s.mtime - time_offset
237
+
238
+ container = ax.errorbar(x, s.mvrad, s.msvrad, label=_label,
239
+ picker=True, marker=next(markers), zorder=next(zorders), **kwargs)
240
+
240
241
 
241
242
  containers[inst] = list(container)
242
243
 
@@ -357,8 +358,6 @@ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
357
358
  plt.connect('pick_event', on_pick_point)
358
359
 
359
360
 
360
-
361
-
362
361
  if show_histogram:
363
362
  axh.legend()
364
363
 
@@ -408,6 +407,7 @@ def plot(self, ax=None, show_masked=False, instrument=None, time_offset=0,
408
407
  def plot_quantity(self, quantity, ax=None, show_masked=False, instrument=None,
409
408
  time_offset=0, remove_50000=False, tooltips=False, show_legend=True,
410
409
  N_in_label=False, **kwargs):
410
+ logger = setup_logger()
411
411
  if self.N == 0:
412
412
  if self.verbose:
413
413
  logger.error('no data to plot')
@@ -533,6 +533,7 @@ def gls(self, ax=None, label=None, instrument=None,
533
533
  Whether to adjust (subtract) the weighted means of each instrument.
534
534
  Default is `config.adjust_means_gls`.
535
535
  """
536
+ logger = setup_logger()
536
537
  if self.N == 0:
537
538
  if self.verbose:
538
539
  logger.error('no data to compute gls')
@@ -585,10 +586,15 @@ def gls(self, ax=None, label=None, instrument=None,
585
586
  maximum_frequency = kwargs.pop('maximum_frequency', 1.0)
586
587
  minimum_frequency = kwargs.pop('minimum_frequency', None)
587
588
  samples_per_peak = kwargs.pop('samples_per_peak', 10)
589
+ kw = {
590
+ 'maximum_frequency': maximum_frequency,
591
+ 'minimum_frequency': minimum_frequency,
592
+ 'samples_per_peak': samples_per_peak
593
+ }
594
+
595
+ freq, power = gls.autopower(**kw)
588
596
 
589
- freq, power = gls.autopower(maximum_frequency=maximum_frequency,
590
- minimum_frequency=minimum_frequency,
591
- samples_per_peak=samples_per_peak)
597
+ show_peak_fap = kwargs.pop('show_peak_fap', False)
592
598
 
593
599
  if ax is None:
594
600
  fig, ax = plt.subplots(1, 1, constrained_layout=True)
@@ -607,14 +613,22 @@ def gls(self, ax=None, label=None, instrument=None,
607
613
  if isinstance(fap, float):
608
614
  fap_level = fap
609
615
 
610
- fap = gls.false_alarm_level(fap_level, method=fap_method)
616
+ fap = gls.false_alarm_level(fap_level, method=fap_method, **kw)
611
617
 
612
- if fap > 0.05 and fap_method == 'baluev':
613
- logger.warning('FAP is high (>5%), the analytical estimate may be underestimated. Using the bootstrap method instead.')
614
- fap = gls.false_alarm_level(fap_level, method='bootstrap')
618
+ # if fap > 0.05 and fap_method == 'baluev':
619
+ # logger.warning('FAP is high (>5%), the analytical estimate may be underestimated. Using the bootstrap method instead.')
620
+ # fap = gls.false_alarm_level(fap_level, method='bootstrap')
615
621
 
616
622
  ax.axhline(fap, color='k', alpha=0.2, zorder=-1)
617
623
 
624
+ if show_peak_fap:
625
+ peak_per = 1/freq[np.argmax(power)]
626
+ peak_power = np.max(power)
627
+ peak_fap = gls.false_alarm_probability(peak_power, method=fap_method, **kw)
628
+ ax.plot(peak_per, peak_power, 'o', color='r', zorder=1)
629
+ ax.annotate(f'{peak_per:1.3f} days\nFAP: {peak_fap:1.1e}', (peak_per, peak_power),
630
+ va='top', textcoords='offset points', xytext=(10, 0), zorder=1)
631
+
618
632
  ax.set(xlabel='Period [days]', ylabel='Normalized power', ylim=(0, None))
619
633
  ax.minorticks_on()
620
634
 
@@ -681,7 +695,7 @@ def gls_quantity(self, quantity, ax=None, instrument=None,
681
695
  Whether to adjust (subtract) the weighted means of each instrument.
682
696
  Default is `config.adjust_means_gls`.
683
697
  """
684
-
698
+ logger = setup_logger()
685
699
  if not hasattr(self, quantity):
686
700
  if self.verbose:
687
701
  logger.error(f"cannot find '{quantity}' attribute")
@@ -743,10 +757,17 @@ def gls_quantity(self, quantity, ax=None, instrument=None,
743
757
  else:
744
758
  fig = ax.figure
745
759
 
746
- spp = kwargs.get('samples_per_peak', 5)
760
+ maximum_frequency = kwargs.pop('maximum_frequency', 1.0)
761
+ minimum_frequency = kwargs.pop('minimum_frequency', None)
762
+ samples_per_peak = kwargs.pop('samples_per_peak', 10)
763
+ kw = {
764
+ 'maximum_frequency': maximum_frequency,
765
+ 'minimum_frequency': minimum_frequency,
766
+ 'samples_per_peak': samples_per_peak
767
+ }
747
768
 
748
769
  gls = LombScargle(t, y, ye)
749
- freq, power = gls.autopower(maximum_frequency=1.0, samples_per_peak=spp)
770
+ freq, power = gls.autopower(**kw)
750
771
 
751
772
  ax.semilogx(1/freq, power, picker=picker, **kwargs)
752
773
 
@@ -755,11 +776,11 @@ def gls_quantity(self, quantity, ax=None, instrument=None,
755
776
  if isinstance(fap, float):
756
777
  fap_level = fap
757
778
 
758
- fap = gls.false_alarm_level(fap_level, method=fap_method)
779
+ fap = gls.false_alarm_level(fap_level, method=fap_method, **kw)
759
780
 
760
- if fap > 0.05 and fap_method == 'baluev':
761
- logger.warning('FAP is high (>5%), the analytical estimate may be underestimated. Using the bootstrap method instead.')
762
- fap = gls.false_alarm_level(fap_level, method='bootstrap')
781
+ # if fap > 0.05 and fap_method == 'baluev':
782
+ # logger.warning('FAP is high (>5%), the analytical estimate may be underestimated. Using the bootstrap method instead.')
783
+ # fap = gls.false_alarm_level(fap_level, method='bootstrap', **kw)
763
784
 
764
785
  ax.axhline(fap, color='k', alpha=0.2, zorder=-1)
765
786
 
@@ -794,6 +815,8 @@ def window_function(self, ax1=None, ax2=None, instrument=None, crosshair=False,
794
815
  crosshair (bool):
795
816
  If True, a crosshair will be drawn on the plot.
796
817
  """
818
+ logger = setup_logger()
819
+
797
820
  if self.N == 0:
798
821
  if self.verbose:
799
822
  logger.error('no data to compute window function')