arvi 0.2.8__py3-none-any.whl → 0.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
arvi/dace_wrapper.py CHANGED
@@ -10,7 +10,7 @@ from .setup_logger import setup_logger
10
10
  from .utils import create_directory, all_logging_disabled, stdout_disabled, timer, tqdm
11
11
 
12
12
 
13
- def load_spectroscopy(user=None):
13
+ def load_spectroscopy(user=None, verbose=True):
14
14
  logger = setup_logger()
15
15
  with all_logging_disabled():
16
16
  from dace_query.spectroscopy import SpectroscopyClass, Spectroscopy as default_Spectroscopy
@@ -19,7 +19,8 @@ def load_spectroscopy(user=None):
19
19
  from .config import config
20
20
  # requesting as public
21
21
  if config.request_as_public:
22
- logger.warning('requesting DACE data as public')
22
+ if verbose:
23
+ logger.warning('requesting DACE data as public')
23
24
  with all_logging_disabled():
24
25
  dace = DaceClass(dace_rc_config_path='none')
25
26
  return SpectroscopyClass(dace_instance=dace)
@@ -168,7 +169,7 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
168
169
  dict:
169
170
  dictionary with data from DACE
170
171
  """
171
- Spectroscopy = load_spectroscopy(user)
172
+ Spectroscopy = load_spectroscopy(user, verbose)
172
173
  found_dace_id = False
173
174
  with timer('dace_id query'):
174
175
  try:
@@ -283,7 +284,7 @@ def get_observations_from_instrument(star, instrument, user=None, main_id=None,
283
284
  def get_observations(star, instrument=None, user=None, main_id=None, verbose=True):
284
285
  logger = setup_logger()
285
286
  if instrument is None:
286
- Spectroscopy = load_spectroscopy(user)
287
+ Spectroscopy = load_spectroscopy(user, verbose)
287
288
 
288
289
  try:
289
290
  with stdout_disabled(), all_logging_disabled():
arvi/plots.py CHANGED
@@ -1,6 +1,7 @@
1
1
  from functools import partialmethod, wraps
2
2
  from itertools import cycle
3
3
 
4
+ import matplotlib.pyplot as plt
4
5
  import numpy as np
5
6
 
6
7
  from astropy.timeseries import LombScargle
@@ -9,9 +10,6 @@ from .setup_logger import setup_logger
9
10
  from .config import config
10
11
  from .stats import wmean
11
12
 
12
- from .utils import lazy_import
13
- plt = lazy_import('matplotlib.pyplot')
14
-
15
13
 
16
14
  def plot_settings(func):
17
15
  @wraps(func)
arvi/timeseries.py CHANGED
@@ -6,6 +6,8 @@ from glob import glob
6
6
  import warnings
7
7
  from copy import deepcopy
8
8
  from datetime import datetime, timezone
9
+
10
+ # import lazy_loader as lazy
9
11
  import numpy as np
10
12
 
11
13
  from .setup_logger import setup_logger
@@ -24,10 +26,11 @@ from .HZ import getHZ_period
24
26
  from .instrument_specific import ISSUES
25
27
  from .reports import REPORTS
26
28
  from .utils import sanitize_path, strtobool, there_is_internet, timer, chdir
27
- from .utils import lazy_import
29
+ # from .utils import lazy_import
28
30
 
29
- units = lazy_import('astropy.units')
30
- # from astropy import units
31
+ # units = lazy_import('astropy.units')
32
+ # units = lazy.load('astropy.units')
33
+ from astropy import units
31
34
 
32
35
  class ExtraFields:
33
36
  @property
@@ -577,7 +580,8 @@ class RV(ISSUES, REPORTS):
577
580
  # --> not just in rhk and rhk_err...
578
581
  if data[arr].dtype == float and (bad := data[arr] == -99999).any():
579
582
  data[arr][bad] = np.nan
580
-
583
+ if data[arr].dtype == float and (bad := data[arr] == -99).any():
584
+ data[arr][bad] = np.nan
581
585
  setattr(s, arr, data[arr][ind])
582
586
  s._quantities.append(arr)
583
587
 
@@ -1912,6 +1916,8 @@ class RV(ISSUES, REPORTS):
1912
1916
 
1913
1917
  # create copy of self to be returned
1914
1918
  snew = deepcopy(self)
1919
+ # store original object
1920
+ snew._unbinned = deepcopy(self)
1915
1921
 
1916
1922
  all_bad_quantities = []
1917
1923
 
@@ -2101,23 +2107,32 @@ class RV(ISSUES, REPORTS):
2101
2107
  if config.return_self:
2102
2108
  return self
2103
2109
 
2104
- def detrend(self, degree=1):
2105
- """ Detrend the RVs of all instruments """
2110
+ def detrend(self, degree: int=1):
2111
+ """
2112
+ Detrend the RVs of all instruments using a polynomial of degree `degree`
2113
+ """
2106
2114
  instrument_indices = np.unique_inverse(self.instrument_array).inverse_indices
2107
- def fun(p, t, degree, ninstruments, just_model=False, index=None):
2115
+ instrument_indices_masked = np.unique_inverse(self.instrument_array[self.mask]).inverse_indices
2116
+
2117
+ def fun(p, t, degree, ninstruments, just_model=False, index=None, masked=True):
2108
2118
  polyp, offsets = p[:degree], p[-ninstruments:]
2109
2119
  polyp = np.r_[polyp, 0.0]
2110
2120
  if index is None:
2111
- model = offsets[instrument_indices] + np.polyval(polyp, t)
2121
+ if masked:
2122
+ model = offsets[instrument_indices_masked] + np.polyval(polyp, t)
2123
+ else:
2124
+ model = offsets[instrument_indices] + np.polyval(polyp, t)
2112
2125
  else:
2113
2126
  model = offsets[index] + np.polyval(polyp, t)
2114
2127
  if just_model:
2115
2128
  return model
2116
2129
  return self.mvrad - model
2130
+
2117
2131
  coef = np.polyfit(self.mtime, self.mvrad, degree)
2118
2132
  x0 = np.append(coef, [0.0] * (len(self.instruments) - 1))
2119
- print(x0)
2133
+ # print(x0)
2120
2134
  fun(x0, self.mtime, degree, len(self.instruments))
2135
+
2121
2136
  from scipy.optimize import leastsq
2122
2137
  xbest, _ = leastsq(fun, x0, args=(self.mtime, degree, len(self.instruments)))
2123
2138
 
@@ -2127,12 +2142,13 @@ class RV(ISSUES, REPORTS):
2127
2142
  self.plot(ax=ax)
2128
2143
  for i, inst in enumerate(self.instruments):
2129
2144
  s = getattr(self, inst)
2130
- ax.plot(s.time, fun(xbest, s.time, degree, len(self.instruments), just_model=True, index=i),
2145
+ ax.plot(s.time,
2146
+ fun(xbest, s.time, degree, len(self.instruments), just_model=True, index=i, masked=False),
2131
2147
  color=f'C{i}')
2132
2148
  ax.set_title('original', loc='left', fontsize=10)
2133
2149
  ax.set_title(f'coefficients: {xbest[:degree]}', loc='right', fontsize=10)
2134
2150
 
2135
- self.add_to_vrad(-fun(xbest, self.time, degree, len(self.instruments), just_model=True))
2151
+ self.add_to_vrad(-fun(xbest, self.time, degree, len(self.instruments), just_model=True, masked=False))
2136
2152
  ax = fig.add_subplot(2, 1, 2)
2137
2153
  self.plot(ax=ax)
2138
2154
  ax.set_title('detrended', loc='left', fontsize=10)
@@ -2141,7 +2157,7 @@ class RV(ISSUES, REPORTS):
2141
2157
  # axs[1].errorbar(self.mtime, fun(xbest, self.mtime, degree, len(self.instruments)), self.msvrad, fmt='o')
2142
2158
 
2143
2159
  return
2144
-
2160
+
2145
2161
 
2146
2162
 
2147
2163
 
@@ -2301,24 +2317,28 @@ class RV(ISSUES, REPORTS):
2301
2317
  self.units = new_units
2302
2318
 
2303
2319
 
2304
- def put_at_systemic_velocity(self):
2320
+ def put_at_systemic_velocity(self, factor=1.0):
2305
2321
  """
2306
- For instruments in which mean(RV) < ptp(RV), "move" RVs to the systemic
2307
- velocity from simbad. This is useful if some instruments are centered
2308
- at zero while others are not, and instead of calling `.adjust_means()`,
2309
- but it only works when the systemic velocity is smaller than ptp(RV).
2322
+ For instruments in which mean(RV) < `factor` * ptp(RV), "move" RVs to
2323
+ the systemic velocity from simbad. This is useful if some instruments
2324
+ are centered at zero while others are not, and instead of calling
2325
+ `.adjust_means()`, but it only works when the systemic velocity is
2326
+ smaller than `factor` * ptp(RV).
2310
2327
  """
2311
2328
  changed = False
2312
2329
  for inst in self.instruments:
2330
+ changed_inst = False
2313
2331
  s = getattr(self, inst)
2314
2332
  if s.mask.any():
2315
- if np.abs(s.mvrad.mean()) < np.ptp(s.mvrad):
2333
+ if np.abs(s.mvrad.mean()) < factor * np.ptp(s.mvrad):
2316
2334
  s.vrad += self.simbad.rvz_radvel * 1e3
2317
- changed = True
2335
+ changed = changed_inst = True
2318
2336
  else: # all observations are masked, use non-masked arrays
2319
- if np.abs(s.vrad.mean()) < np.ptp(s.vrad):
2337
+ if np.abs(s.vrad.mean()) < factor * np.ptp(s.vrad):
2320
2338
  s.vrad += self.simbad.rvz_radvel * 1e3
2321
- changed = True
2339
+ changed = changed_inst = True
2340
+ if changed_inst and self.verbose:
2341
+ logger.info(f"putting {inst} RVs at systemic velocity")
2322
2342
  if changed:
2323
2343
  self._build_arrays()
2324
2344
 
@@ -2341,33 +2361,66 @@ class RV(ISSUES, REPORTS):
2341
2361
  self._build_arrays()
2342
2362
 
2343
2363
 
2344
- def save(self, directory=None, instrument=None, full=False, postfix=None,
2345
- save_masked=False, save_nans=True):
2346
- """ Save the observations in .rdb files.
2364
+ def save(self, directory=None, instrument=None, format='rdb',
2365
+ indicators=False, join_instruments=False, postfix=None,
2366
+ save_masked=False, save_nans=True, **kwargs):
2367
+ """ Save the observations in .rdb or .csv files.
2347
2368
 
2348
2369
  Args:
2349
2370
  directory (str, optional):
2350
2371
  Directory where to save the .rdb files.
2351
2372
  instrument (str, optional):
2352
2373
  Instrument for which to save observations.
2353
- full (bool, optional):
2354
- Save just RVs and errors (False) or more indicators (True).
2374
+ format (str, optional):
2375
+ Format to use ('rdb' or 'csv').
2376
+ indicators (bool, str, list[str], optional):
2377
+ Save only RVs and errors (False) or more indicators. If True,
2378
+ use a default list, if `str`, use an existing list, if list[str]
2379
+ provide a sequence of specific indicators.
2380
+ join_instruments (bool, optional):
2381
+ Join all instruments in a single file.
2355
2382
  postfix (str, optional):
2356
2383
  Postfix to add to the filenames ([star]_[instrument]_[postfix].rdb).
2384
+ save_masked (bool, optional)
2385
+ If True, also save masked observations (those for which
2386
+ self.mask == True)
2357
2387
  save_nans (bool, optional)
2358
2388
  Whether to save NaN values in the indicators, if they exist. If
2359
2389
  False, the full observation which contains NaN values is not saved.
2360
2390
  """
2391
+ if format not in ('rdb', 'csv'):
2392
+ logger.error(f"format must be 'rdb' or 'csv', got '{format}'")
2393
+ return
2394
+
2361
2395
  star_name = self.star.replace(' ', '')
2362
2396
 
2363
- if directory is None:
2364
- directory = '.'
2365
- else:
2397
+ if directory is not None:
2366
2398
  os.makedirs(directory, exist_ok=True)
2367
2399
 
2400
+ indicator_sets = {
2401
+ "default": [
2402
+ "fwhm", "fwhm_err",
2403
+ "bispan", "bispan_err",
2404
+ "contrast", "contrast_err",
2405
+ "rhk", "rhk_err",
2406
+ "berv",
2407
+ ],
2408
+ "CORALIE": [
2409
+ "fwhm", "fwhm_err",
2410
+ "bispan", "bispan_err",
2411
+ "contrast", "contrast_err",
2412
+ "haindex", "haindex_err",
2413
+ "berv",
2414
+ ],
2415
+ }
2416
+
2417
+ if 'full' in kwargs:
2418
+ logger.warning('argument `full` is deprecated, use `indicators` instead')
2419
+ indicators = kwargs['full']
2420
+
2368
2421
  files = []
2369
2422
 
2370
- for inst in self.instruments:
2423
+ for _i, inst in enumerate(self.instruments):
2371
2424
  if instrument is not None:
2372
2425
  if instrument not in inst:
2373
2426
  continue
@@ -2377,75 +2430,95 @@ class RV(ISSUES, REPORTS):
2377
2430
  if not _s.mask.any(): # all observations are masked, don't save
2378
2431
  continue
2379
2432
 
2380
- if full:
2381
- if save_masked:
2382
- arrays = [
2383
- _s.time, _s.vrad, _s.svrad,
2384
- _s.fwhm, _s.fwhm_err,
2385
- _s.bispan, _s.bispan_err,
2386
- _s.contrast, _s.contrast_err,
2387
- _s.rhk, _s.rhk_err,
2388
- _s.berv,
2389
- ]
2390
- else:
2391
- arrays = [
2392
- _s.mtime, _s.mvrad, _s.msvrad,
2393
- _s.fwhm[_s.mask], _s.fwhm_err[_s.mask],
2394
- _s.bispan[_s.mask], _s.bispan_err[_s.mask],
2395
- _s.contrast[_s.mask], _s.contrast_err[_s.mask],
2396
- _s.rhk[_s.mask], _s.rhk_err[_s.mask],
2397
- _s.berv[_s.mask],
2398
- ]
2399
- if not save_nans:
2400
- raise NotImplementedError
2401
- # if np.isnan(d).any():
2402
- # # remove observations where any of the indicators are # NaN
2403
- # nan_mask = np.isnan(d[:, 3:]).any(axis=1)
2404
- # d = d[~nan_mask]
2405
- # if self.verbose:
2406
- # logger.warning(f'masking {nan_mask.sum()} observations with NaN in indicators')
2407
-
2408
- header = '\t'.join(['rjd', 'vrad', 'svrad',
2409
- 'fwhm', 'sig_fwhm',
2410
- 'bispan', 'sig_bispan',
2411
- 'contrast', 'sig_contrast',
2412
- 'rhk', 'sig_rhk',
2413
- 'berv',
2414
- ])
2415
- header += '\n'
2416
- header += '\t'.join(['-' * len(c) for c in header.strip().split('\t')])
2433
+ if save_masked:
2434
+ arrays = [_s.time, _s.vrad, _s.svrad]
2435
+ if join_instruments:
2436
+ arrays += [_s.instrument_array]
2437
+ else:
2438
+ arrays = [_s.mtime, _s.mvrad, _s.msvrad]
2439
+ if join_instruments:
2440
+ arrays += [_s.instrument_array[_s.mask]]
2441
+
2442
+ if indicators in (False, None):
2443
+ indicator_names = []
2444
+ else:
2445
+ if indicators is True:
2446
+ indicator_names = indicator_sets["default"]
2447
+ elif isinstance(indicators, str):
2448
+ try:
2449
+ indicator_names = indicator_sets[indicators]
2450
+ except KeyError:
2451
+ logger.error(f"unknown indicator set '{indicators}'")
2452
+ logger.error(f"available: {list(indicator_sets.keys())}")
2453
+ return
2454
+ elif isinstance(indicators, list) and all(isinstance(i, str) for i in indicators):
2455
+ indicator_names = indicators
2417
2456
 
2457
+ if save_masked:
2458
+ arrays += [getattr(_s, ind) for ind in indicator_names]
2418
2459
  else:
2419
- if save_masked:
2420
- arrays = [_s.time, _s.vrad, _s.svrad]
2421
- else:
2422
- arrays = [_s.mtime, _s.mvrad, _s.msvrad]
2460
+ arrays += [getattr(_s, ind)[_s.mask] for ind in indicator_names]
2461
+
2462
+ d = np.stack(arrays, axis=1)
2463
+ if not save_nans:
2464
+ # raise NotImplementedError
2465
+ if np.isnan(d).any():
2466
+ # remove observations where any of the indicators are # NaN
2467
+ nan_mask = np.isnan(d[:, 3:]).any(axis=1)
2468
+ d = d[~nan_mask]
2469
+ if self.verbose:
2470
+ msg = f'{inst}: masking {nan_mask.sum()} observations with NaN in indicators'
2471
+ logger.warning(msg)
2423
2472
 
2424
- # d = np.stack(arrays, axis=1)
2425
- header = 'rjd\tvrad\tsvrad\n---\t----\t-----'
2473
+ cols = ['rjd', 'vrad', 'svrad']
2474
+ cols += ['inst'] if join_instruments else []
2475
+ cols += indicator_names
2476
+
2477
+ if format == 'rdb':
2478
+ header = '\t'.join(cols)
2479
+ header += '\n'
2480
+ header += '\t'.join(['-' * len(c) for c in header.strip().split('\t')])
2481
+ else:
2482
+ header = ','.join(cols)
2426
2483
 
2427
- file = f'{star_name}_{inst}.rdb'
2428
- if postfix is not None:
2429
- file = f'{star_name}_{inst}_{postfix}.rdb'
2484
+ if join_instruments:
2485
+ file = f'{star_name}.{format}'
2486
+ if postfix is not None:
2487
+ file = f'{star_name}_{postfix}.{format}'
2488
+ else:
2489
+ file = f'{star_name}_{inst}.{format}'
2490
+ if postfix is not None:
2491
+ file = f'{star_name}_{inst}_{postfix}.{format}'
2430
2492
 
2493
+ if directory is not None:
2494
+ file = os.path.join(directory, file)
2431
2495
  files.append(file)
2432
- file = os.path.join(directory, file)
2433
2496
 
2434
2497
  N = len(arrays[0])
2435
- with open(file, 'w') as f:
2436
- f.write(header + '\n')
2498
+ with open(file, 'a' if join_instruments and _i != 0 else 'w') as f:
2499
+ if join_instruments and _i != 0:
2500
+ pass
2501
+ else:
2502
+ f.write(header + '\n')
2503
+
2437
2504
  for i in range(N):
2438
2505
  for j, a in enumerate(arrays):
2439
2506
  f.write(str(a[i]))
2440
2507
  if j < len(arrays) - 1:
2441
- f.write('\t')
2508
+ f.write('\t' if format == 'rdb' else ',')
2442
2509
  f.write('\n')
2443
2510
 
2444
2511
  # np.savetxt(file, d, header=header, delimiter='\t', comments='', fmt='%f')
2445
2512
 
2446
- if self.verbose:
2513
+ if self.verbose and not join_instruments:
2447
2514
  logger.info(f'saving to {file}')
2448
2515
 
2516
+ if self.verbose and join_instruments:
2517
+ logger.info(f'saving to {files[0]}')
2518
+
2519
+ if join_instruments:
2520
+ files = [files[0]]
2521
+
2449
2522
  return files
2450
2523
 
2451
2524
  def checksum(self, write_to=None):
arvi/utils.py CHANGED
@@ -2,6 +2,7 @@ import os
2
2
  import sys
3
3
  import time
4
4
  from contextlib import contextmanager
5
+
5
6
  try:
6
7
  from unittest.mock import patch
7
8
  except ImportError:
@@ -98,8 +99,9 @@ def sanitize_path(path):
98
99
  path = path.replace('*', '_')
99
100
  return path
100
101
 
101
- def pretty_print_table(rows, line_between_rows=True, logger=None):
102
- """
102
+ def pretty_print_table(rows, line_between_rows=True, string=False,
103
+ markdown=False, latex=False, logger=None):
104
+ r"""
103
105
  Example Output
104
106
  ┌──────┬─────────────┬────┬───────┐
105
107
  │ True │ short │ 77 │ catty │
@@ -110,25 +112,76 @@ def pretty_print_table(rows, line_between_rows=True, logger=None):
110
112
  └──────┴─────────────┴────┴───────┘
111
113
  """
112
114
  _print = logger.info if logger else print
115
+ if string:
116
+ def _print(x, s):
117
+ s += x + '\n'
118
+ return s
119
+ else:
120
+ if logger:
121
+ def _print(x, _):
122
+ logger.info(x)
123
+ else:
124
+ def _print(x, _):
125
+ print(x)
126
+
127
+ if latex or markdown:
128
+ line_between_rows = False
129
+
130
+ s = ''
113
131
 
114
132
  # find the max length of each column
115
133
  max_col_lens = list(map(max, zip(*[(len(str(cell)) for cell in row) for row in rows])))
116
134
 
135
+ if markdown:
136
+ bar_char = '|'
137
+ else:
138
+ bar_char = r'│'
139
+
117
140
  # print the table's top border
118
- _print('┌' + '┬'.join('─' * (n + 2) for n in max_col_lens) + '┐')
141
+ if markdown:
142
+ pass
143
+ elif latex:
144
+ s = _print(r'\begin{table*}', s)
145
+ # s = _print(r'\centering', s)
146
+ s = _print(r'\begin{tabular}' + '{' + ' c ' * len(rows[0]) + '}', s)
147
+ else:
148
+ s = _print(r'┌' + r'┬'.join(r'─' * (n + 2) for n in max_col_lens) + r'┐', s)
119
149
 
120
- rows_separator = '├' + '┼'.join('─' * (n + 2) for n in max_col_lens) + '┤'
150
+ if markdown:
151
+ header_separator = bar_char + bar_char.join('-' * (n + 2) for n in max_col_lens) + bar_char
121
152
 
122
- row_fstring = ' '.join("{: <%s}" % n for n in max_col_lens)
153
+ rows_separator = r'├' + r'┼'.join(r'─' * (n + 2) for n in max_col_lens) + r'┤'
154
+
155
+ if latex:
156
+ row_fstring = ' & '.join("{: <%s}" % n for n in max_col_lens)
157
+ else:
158
+ row_fstring = bar_char.center(3).join("{: <%s}" % n for n in max_col_lens)
123
159
 
124
160
  for i, row in enumerate(rows):
125
- _print('│ ' + row_fstring.format(*map(str, row)) + ' │')
161
+ if markdown and i == 1:
162
+ s = _print(header_separator, s)
163
+
164
+ if latex:
165
+ s = _print(row_fstring.format(*map(str, row)) + r' \\', s)
166
+ else:
167
+ s = _print(bar_char + ' ' + row_fstring.format(*map(str, row)) + ' ' + bar_char, s)
126
168
 
169
+
127
170
  if line_between_rows and i < len(rows) - 1:
128
- _print(rows_separator)
171
+ s = _print(rows_separator, s)
172
+
129
173
 
130
174
  # print the table's bottom border
131
- _print('└' + '┴'.join('─' * (n + 2) for n in max_col_lens) + '┘')
175
+ if markdown:
176
+ pass
177
+ elif latex:
178
+ s = _print(r'\end{tabular}', s)
179
+ s = _print(r'\end{table*}', s)
180
+ else:
181
+ s = _print(r'└' + r'┴'.join(r'─' * (n + 2) for n in max_col_lens) + r'┘', s)
182
+
183
+ if string:
184
+ return s
132
185
 
133
186
 
134
187
  def strtobool(val):
@@ -238,3 +291,28 @@ def get_object_fast(file):
238
291
  value = f.read(20)
239
292
  return value.decode().split("'")[1].strip()
240
293
 
294
+
295
+ def get_simbad_oid(self):
296
+ import requests
297
+ if isinstance(self, str):
298
+ star = self
299
+ else:
300
+ star = self.star
301
+ oid = requests.post('https://simbad.cds.unistra.fr/simbad/sim-tap/sync',
302
+ data=dict(format='text', request='doQuery', lang='adql', phase='run',
303
+ query=f"SELECT basic.OID FROM basic JOIN ident ON oidref = oid WHERE id = '{star}';"))
304
+ oid = oid.text.split()[-1]
305
+ return oid
306
+
307
+
308
+
309
+ # from https://stackoverflow.com/questions/37765197/darken-or-lighten-a-color-in-matplotlib
310
+ def adjust_lightness(color, amount=0.5):
311
+ import matplotlib.colors as mc
312
+ import colorsys
313
+ try:
314
+ c = mc.cnames[color]
315
+ except KeyError:
316
+ c = color
317
+ c = colorsys.rgb_to_hls(*mc.to_rgb(c))
318
+ return colorsys.hls_to_rgb(c[0], max(0, min(1, amount * c[1])), c[2])
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arvi
3
- Version: 0.2.8
3
+ Version: 0.2.9
4
4
  Summary: The Automated RV Inspector
5
5
  Author-email: João Faria <joao.faria@unige.ch>
6
6
  License: MIT
@@ -4,7 +4,7 @@ arvi/ariadne_wrapper.py,sha256=YvilopJa9T4NwPcj3Nah_U8smSeSAU5-HYZMb_GJ-BQ,2232
4
4
  arvi/berv.py,sha256=eKnpuPC1w45UrUEyFRbs9F9j3bXz3kxYzNXbnRgvFQM,17596
5
5
  arvi/binning.py,sha256=NK9y9bUrdyWCbh79LkcRABHG-n5MtlETMHMvLj1z-OM,15437
6
6
  arvi/config.py,sha256=JkHSwF-EEqwwbcc8thGgbFc9udDZPjQH-9XFjqDepBY,2337
7
- arvi/dace_wrapper.py,sha256=zgA8cT5eqYvHVMBfBO8aqua24FDfDilnDdPA3AajVew,25896
7
+ arvi/dace_wrapper.py,sha256=08hNZMCt2Kd9x8BvJHZHml4cDCcbBsY1lBheuXk9yGk,25952
8
8
  arvi/exofop_wrapper.py,sha256=8S7UEcrBAgANIweMV0-CvaWaVTPgGVo8vQQk_KRa0nU,2414
9
9
  arvi/extra_data.py,sha256=Xi65pI5kkzqlMmHGl9xFoumtH699611pJJ5PV-a_IfU,3397
10
10
  arvi/gaia_wrapper.py,sha256=HTuigIduin3raWfSC7QYuQxDk2dEXYH_4egRkzzg7Xw,4379
@@ -13,7 +13,7 @@ arvi/instrument_specific.py,sha256=ycLhtT3oeNtSREm9bmWICaT8uureYcl3NFzbdDYRMVY,1
13
13
  arvi/kima_wrapper.py,sha256=GrAZWkDCg8ukhW41M1VTadSbab0GBa6BIzjtAtvjk58,3891
14
14
  arvi/lbl_wrapper.py,sha256=_ViGVkpakvuBR_xhu9XJRV5EKHpj5Go6jBZGJZMIS2Y,11850
15
15
  arvi/nasaexo_wrapper.py,sha256=mWt7eHgSZe4MBKCmUvMPTyUPGuiwGTqKugNBvmjOg9s,7306
16
- arvi/plots.py,sha256=fHc6ScATCzvM4KQ77TYfHYmY6HSZ4N4oMYsLEUvxJpU,35279
16
+ arvi/plots.py,sha256=U4VUNyIx4h_rEFd7ZWgBcawUcIGcURES0A4VXIBKp3U,35240
17
17
  arvi/programs.py,sha256=M8o8hXr6W22dMiIX3Nxz4pgb8lsJXApDlq7HStyTfqs,9047
18
18
  arvi/reports.py,sha256=CKmtg5rewMyT26gbWeoZDYrL0z5Sbb6cTJry0HWk_rs,7445
19
19
  arvi/setup_logger.py,sha256=dHzO2gPjw6CaKWpYZd2f83z09tmxgi--qpp7k1jROjI,615
@@ -22,17 +22,17 @@ arvi/sophie_wrapper.py,sha256=KUeWccXud5_Lrx72S1HSemHIZRdjd2oLvqyofwsL0QQ,3440
22
22
  arvi/spectra.py,sha256=ebF1ocodTastLx0CyqLSpE8EZNDXBF8riyfxMr3L6H0,7491
23
23
  arvi/stats.py,sha256=ilzzGL9ew-SyVa9eEdrYCpD3DliOAwhoNUg9LIlHjzU,2583
24
24
  arvi/stellar.py,sha256=GQ7yweuBRnfkJ0M5eWjvLd8uvGq_by81PbXfidBvWis,4918
25
- arvi/timeseries.py,sha256=HUsJE5F6-urGNjt5Q-SX0IeREM_vo9M8WdXZ2WR24b8,97260
25
+ arvi/timeseries.py,sha256=9-EjmhMFMo8IJ_Erqf5SnEeKi4J2x0Pt518V30jQ8bw,100162
26
26
  arvi/translations.py,sha256=PUSrn4zvYO2MqGzUxlFGwev_tBkgJaJrIYs6NKHzbWo,951
27
- arvi/utils.py,sha256=EY4hdwGcTUZg_tPT3yQ7ShLIVm9dAfmJC0c7toAVSKI,7221
27
+ arvi/utils.py,sha256=x_zDTW1vp672CZe-m9-KXo5IVk-JKOs2wme_ta4t8MU,9402
28
28
  arvi/data/info.svg,sha256=0IMI6W-eFoTD8acnury79WJJakpBwLa4qKS4JWpsXiI,489
29
29
  arvi/data/obs_affected_ADC_issues.dat,sha256=tn93uOL0eCTYhireqp1wG-_c3CbxPA7C-Rf-pejVY8M,10853
30
30
  arvi/data/obs_affected_blue_cryostat_issues.dat,sha256=z4AK17xfz8tGTDv1FjRvQFnio4XA6PNNfDXuicewHk4,1771
31
31
  arvi/data/extra/HD86226_PFS1.rdb,sha256=vfAozbrKHM_j8dYkCBJsuHyD01KEM1asghe2KInwVao,3475
32
32
  arvi/data/extra/HD86226_PFS2.rdb,sha256=F2P7dB6gVyzCglUjNheB0hIHVClC5RmARrGwbrY1cfo,4114
33
33
  arvi/data/extra/metadata.json,sha256=C69hIw6CohyES6BI9vDWjxwSz7N4VOYX0PCgjXtYFmU,178
34
- arvi-0.2.8.dist-info/licenses/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
35
- arvi-0.2.8.dist-info/METADATA,sha256=f3ma5e17xovCE6ImErgCAkYANL6wRvaXbqN65xVZExw,1932
36
- arvi-0.2.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
37
- arvi-0.2.8.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
38
- arvi-0.2.8.dist-info/RECORD,,
34
+ arvi-0.2.9.dist-info/licenses/LICENSE,sha256=6JfQgl7SpM55t0EHMFNMnNh-AdkpGW25MwMiTnhdWQg,1068
35
+ arvi-0.2.9.dist-info/METADATA,sha256=KLtdToTvrCF-TT0P04dctgqKRHH4eTOL47hspjj8qaU,1932
36
+ arvi-0.2.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
37
+ arvi-0.2.9.dist-info/top_level.txt,sha256=4EeiKDVLD45ztuflTGfQ3TU8GVjJg5Y95xS5XjI-utU,5
38
+ arvi-0.2.9.dist-info/RECORD,,
File without changes