timewise 1.0.0a6__py3-none-any.whl → 1.0.0a7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,11 +16,12 @@ import pandas as pd
16
16
  from bson import encode
17
17
 
18
18
  from ampel.alert.AmpelAlert import AmpelAlert
19
+ from ampel.base.AmpelABC import AmpelABC
19
20
  from ampel.alert.BaseAlertSupplier import BaseAlertSupplier
20
21
  from ampel.view.ReadOnlyDict import ReadOnlyDict
21
22
 
22
23
 
23
- class TimewiseAlertSupplier(BaseAlertSupplier):
24
+ class TimewiseAlertSupplier(BaseAlertSupplier, AmpelABC):
24
25
  """
25
26
  Iterable class that, for each transient name provided by the underlying alert_loader
26
27
  returns a PhotoAlert instance.
@@ -10,14 +10,16 @@ from typing import Dict, get_args
10
10
 
11
11
  import numpy as np
12
12
  import pandas as pd
13
- from astropy.table import Table, vstack
13
+ from astropy.table import vstack
14
14
  from ampel.abstract.AbsAlertLoader import AbsAlertLoader
15
+ from ampel.base.AmpelABC import AmpelABC
15
16
  from timewise.tables import TableType
16
17
  from timewise.config import TimewiseConfig
17
18
  from timewise.types import TaskID
19
+ from timewise.util.path import expand
18
20
 
19
21
 
20
- class TimewiseFileLoader(AbsAlertLoader[Dict]):
22
+ class TimewiseFileLoader(AbsAlertLoader[Dict], AmpelABC):
21
23
  """
22
24
  Load alerts from one of more files.
23
25
  """
@@ -33,8 +35,10 @@ class TimewiseFileLoader(AbsAlertLoader[Dict]):
33
35
  def __init__(self, **kwargs) -> None:
34
36
  super().__init__(**kwargs)
35
37
 
36
- self.logger.info(f"loading timewise config file {self.timewise_config_file}")
37
- timewise_config = TimewiseConfig.from_yaml(self.timewise_config_file)
38
+ expanded_config_file = expand(self.timewise_config_file)
39
+
40
+ self.logger.info(f"loading timewise config file {expanded_config_file}")
41
+ timewise_config = TimewiseConfig.from_yaml(expanded_config_file)
38
42
  dl = timewise_config.download.build_downloader()
39
43
  self._timewise_backend = dl.backend
40
44
 
@@ -9,9 +9,10 @@
9
9
  from typing import Any
10
10
 
11
11
  from ampel.model.ingest.CompilerOptions import CompilerOptions
12
+ from ampel.base.AmpelABC import AmpelABC
12
13
 
13
14
 
14
- class TiCompilerOptions(CompilerOptions):
15
+ class TiCompilerOptions(CompilerOptions, AmpelABC):
15
16
  stock: dict[str, Any] = {"tag": "TIMEWISE"}
16
17
  t0: dict[str, Any] = {"tag": "TIMEWISE"}
17
18
  t1: dict[str, Any] = {"tag": "TIMEWISE"}
@@ -10,8 +10,6 @@ from typing import Iterable, Sequence
10
10
 
11
11
  import numpy as np
12
12
  from numpy import typing as npt
13
- from ampel.base.AuxUnitRegister import AuxUnitRegister
14
- from astropy.coordinates.angle_utilities import angular_separation, position_angle
15
13
  from sklearn.cluster import HDBSCAN
16
14
  from pymongo import MongoClient
17
15
 
@@ -20,11 +18,21 @@ from ampel.struct.T1CombineResult import T1CombineResult
20
18
  from ampel.types import DataPointId
21
19
  from ampel.abstract.AbsT1CombineUnit import AbsT1CombineUnit
22
20
  from ampel.model.UnitModel import UnitModel
21
+ from ampel.base.AuxUnitRegister import AuxUnitRegister
23
22
 
24
23
  from ampel.timewise.util.pdutil import datapoints_to_dataframe
25
24
  from ampel.timewise.util.AuxDiagnosticPlotter import AuxDiagnosticPlotter
26
25
  from timewise.process import keys
27
26
 
27
+ from importlib.util import find_spec
28
+
29
+ if find_spec("astropy.coordinates.angle_utilities"):
30
+ # astropy < v6.0.0
31
+ from astropy.coordinates.angle_utilities import angular_separation, position_angle
32
+ else:
33
+ # astropy >= v6.0.0
34
+ from astropy.coordinates.angles import angular_separation, position_angle
35
+
28
36
 
29
37
  class T1HDBSCAN(AbsT1CombineUnit):
30
38
  input_mongo_db_name: str
@@ -36,12 +36,12 @@ class TimewiseFilter(AbsAlertFilter):
36
36
  visits, counts = np.unique(visit_map, return_counts=True)
37
37
  visit_passed = counts >= self.det_per_visit
38
38
  if not all(visit_passed):
39
- self.logger.info(None, extra={"min_det_per_visit": min(counts).item()})
39
+ self.logger.debug(None, extra={"min_det_per_visit": min(counts).item()})
40
40
  return None
41
41
 
42
42
  # enough visits
43
43
  if not len(visits) >= self.n_visits:
44
- self.logger.info(None, extra={"n_visits": len(visits)})
44
+ self.logger.debug(None, extra={"n_visits": len(visits)})
45
45
  return None
46
46
 
47
47
  return True
@@ -6,6 +6,7 @@
6
6
  # Date: 24.09.2025
7
7
  # Last Modified Date: 24.09.2025
8
8
  # Last Modified By: Jannis Necker <jannis.necker@gmail.com>
9
+ from typing import Literal
9
10
  from scipy import stats
10
11
 
11
12
  from ampel.abstract.AbsLightCurveT2Unit import AbsLightCurveT2Unit
@@ -32,6 +33,16 @@ class T2StackVisits(AbsLightCurveT2Unit):
32
33
  # threshold above which to exclude outliers
33
34
  outlier_threshold: float = 5
34
35
 
36
+ # methods to calculate mean and std
37
+ mean_name: Literal["mean", "median"] = "median"
38
+ std_name: Literal["std", "sdom", "sdom-1"] = "sdom-1"
39
+ correction_name: Literal["debias", "tdist", "none"] = "tdist"
40
+
41
+ # see timewise.process.stacking
42
+ calculate_pvalues: bool = False
43
+ use_single_exposure_errors: bool = True
44
+ median_zeropoint_per_visit: bool = True
45
+
35
46
  def process(self, light_curve: LightCurve) -> UBson | UnitResult:
36
47
  columns = [
37
48
  "ra",
@@ -53,4 +64,10 @@ class T2StackVisits(AbsLightCurveT2Unit):
53
64
  outlier_threshold=self.outlier_threshold,
54
65
  outlier_quantile=self.outlier_quantile,
55
66
  clean_outliers=self.clean_outliers,
67
+ mean_name=self.mean_name,
68
+ std_name=self.std_name,
69
+ correction_name=self.correction_name,
70
+ calculate_pvalues=self.calculate_pvalues,
71
+ use_single_exposure_errors=self.use_single_exposure_errors,
72
+ median_zeropoint_per_visit=self.median_zeropoint_per_visit,
56
73
  ).to_dict(orient="records")
@@ -5,13 +5,14 @@ from numpy import typing as npt
5
5
 
6
6
  from ampel.plot.create import create_plot_record
7
7
  from ampel.base.AmpelBaseModel import AmpelBaseModel
8
+ from ampel.base.AmpelABC import AmpelABC
8
9
  from ampel.model.PlotProperties import PlotProperties
9
10
  from ampel.content.NewSVGRecord import NewSVGRecord
10
11
 
11
12
  from timewise.plot.diagnostic import DiagnosticPlotter
12
13
 
13
14
 
14
- class AuxDiagnosticPlotter(AmpelBaseModel):
15
+ class AuxDiagnosticPlotter(AmpelBaseModel, AmpelABC):
15
16
  plot_properties: PlotProperties
16
17
  cutout: Literal["sdss", "panstarrs"] = DiagnosticPlotter.model_fields[
17
18
  "cutout"
timewise/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "1.0.0a6"
1
+ __version__ = "1.0.0a7"
@@ -6,6 +6,7 @@ from astropy.table import Table
6
6
 
7
7
  from .base import Backend
8
8
  from ..types import TaskID
9
+ from ..util.path import expand
9
10
 
10
11
 
11
12
  logger = logging.getLogger(__name__)
@@ -15,17 +16,21 @@ class FileSystemBackend(Backend):
15
16
  type: Literal["filesystem"] = "filesystem"
16
17
  base_path: Path
17
18
 
19
+ @property
20
+ def expanded_path(self) -> Path:
21
+ return expand(self.base_path)
22
+
18
23
  # ----------------------------
19
24
  # Helpers for paths
20
25
  # ----------------------------
21
26
  def _meta_path(self, task: TaskID) -> Path:
22
- return self.base_path / f"{task}.meta.json"
27
+ return self.expanded_path / f"{task}.meta.json"
23
28
 
24
29
  def _marker_path(self, task: TaskID) -> Path:
25
- return self.base_path / f"{task}.ok"
30
+ return self.expanded_path / f"{task}.ok"
26
31
 
27
32
  def _data_path(self, task: TaskID) -> Path:
28
- return self.base_path / f"{task}.fits"
33
+ return self.expanded_path / f"{task}.fits"
29
34
 
30
35
  # ----------------------------
31
36
  # Metadata
timewise/chunking.py CHANGED
@@ -1,20 +1,28 @@
1
- from typing import Iterator
1
+ import logging
2
+ from functools import cached_property
2
3
  from pathlib import Path
4
+ from typing import Iterator
5
+
3
6
  import numpy as np
4
- from numpy import typing as npt
5
7
  import pandas as pd
6
- import logging
8
+ from numpy import typing as npt
7
9
 
8
10
  logger = logging.getLogger(__name__)
9
11
 
10
12
 
11
13
  class Chunk:
12
14
  def __init__(
13
- self, chunk_id: int, indices: npt.ArrayLike, row_indices: npt.ArrayLike
15
+ self, chunk_id: int, input_csv, row_indices: npt.NDArray[np.int_]
14
16
  ):
15
17
  self.chunk_id = chunk_id
16
- self.indices = indices
17
18
  self.row_numbers = row_indices
19
+ self.input_csv = input_csv
20
+
21
+ @cached_property
22
+ def indices(self) -> pd.Index:
23
+ start = min(self.row_numbers)
24
+ stop = max(self.row_numbers) + 1
25
+ return pd.read_csv(self.input_csv, skiprows=start, nrows=stop - start).index
18
26
 
19
27
 
20
28
  class Chunker:
@@ -45,6 +53,5 @@ class Chunker:
45
53
  raise IndexError(f"Invalid chunk_id {chunk_id}")
46
54
  start = chunk_id * self.chunk_size
47
55
  stop = min(start + self.chunk_size, self._n_rows)
48
- indices = pd.read_csv(self.input_csv, skiprows=start, nrows=stop - start).index
49
56
  logger.debug(f"chunk {chunk_id}: from {start} to {stop}")
50
- return Chunk(chunk_id, indices, np.arange(start=start, stop=stop))
57
+ return Chunk(chunk_id, self.input_csv, np.arange(start=start, stop=stop))
timewise/io/config.py CHANGED
@@ -7,6 +7,7 @@ from .download import Downloader
7
7
  from ..query import QueryType
8
8
  from ..backend import BackendType
9
9
  from ..types import TYPE_MAP
10
+ from ..util.path import expand
10
11
 
11
12
 
12
13
  class DownloadConfig(BaseModel):
@@ -19,15 +20,19 @@ class DownloadConfig(BaseModel):
19
20
 
20
21
  service_url: str = "https://irsa.ipac.caltech.edu/TAP"
21
22
 
23
+ @property
24
+ def expanded_input_csv(self) -> Path:
25
+ return expand(self.input_csv)
26
+
22
27
  @model_validator(mode="after")
23
28
  def validate_input_csv_columns(self) -> "DownloadConfig":
24
29
  """Ensure that the input CSV contains all columns required by queries."""
25
30
  # only validate if the CSV actually exists
26
- if not self.input_csv.exists():
27
- raise ValueError(f"CSV file does not exist: {self.input_csv}")
31
+ if not self.expanded_input_csv.exists():
32
+ raise ValueError(f"CSV file does not exist: {self.expanded_input_csv}")
28
33
 
29
34
  # read just the header and first 10 lines
30
- input_table = pd.read_csv(self.input_csv, nrows=10)
35
+ input_table = pd.read_csv(self.expanded_input_csv, nrows=10)
31
36
 
32
37
  missing_columns = set()
33
38
  wrong_dtype = set()
@@ -41,7 +46,7 @@ class DownloadConfig(BaseModel):
41
46
  except Exception:
42
47
  wrong_dtype.add(col)
43
48
 
44
- msg = f"CSV file {self.input_csv}: "
49
+ msg = f"CSV file {self.expanded_input_csv}: "
45
50
  if missing_columns:
46
51
  raise KeyError(msg + f"Missing required columns: {sorted(missing_columns)}")
47
52
  if wrong_dtype:
@@ -55,7 +60,7 @@ class DownloadConfig(BaseModel):
55
60
  def build_downloader(self) -> Downloader:
56
61
  return Downloader(
57
62
  service_url=self.service_url,
58
- input_csv=self.input_csv,
63
+ input_csv=self.expanded_input_csv,
59
64
  chunk_size=self.chunk_size,
60
65
  backend=self.backend,
61
66
  queries=self.queries,
@@ -49,6 +49,9 @@ def plot_lightcurve(
49
49
  markeredgecolor="k",
50
50
  ecolor="k",
51
51
  capsize=2,
52
+ zorder=3,
53
+ barsabove=True,
54
+ elinewidth=.5
52
55
  )
53
56
  ax.scatter(
54
57
  stacked_lightcurve[keys.MEAN + "_mjd"][ul_mask_stacked],
@@ -75,6 +78,7 @@ def plot_lightcurve(
75
78
  c=colors[b],
76
79
  markersize=4,
77
80
  alpha=0.3,
81
+ zorder=2
78
82
  )
79
83
 
80
84
  single_ul_m = m & ul_mask_raw
@@ -90,6 +94,7 @@ def plot_lightcurve(
90
94
  alpha=0.3,
91
95
  s=1,
92
96
  label=label,
97
+ zorder=1,
93
98
  )
94
99
 
95
100
  except KeyError as e:
timewise/process/keys.py CHANGED
@@ -1,10 +1,14 @@
1
+ # keys for stacked lightcurve processing
1
2
  MEAN = "mean"
2
3
  MEDIAN = "median"
3
4
  RMS = "rms"
4
5
  UPPER_LIMIT = "ul"
5
6
  NPOINTS = "npoints"
6
7
  ZEROPOINT_EXT = "zeropoint"
7
- FLUX_EXT = "flux"
8
8
  FLUX_DENSITY_EXT = "fluxdensity"
9
+ KSTEST_NORM_EXT = "kstest_norm_pvalue"
10
+
11
+ # keys of the single exposure lightcurve
12
+ FLUX_EXT = "flux"
9
13
  MAG_EXT = "mpro"
10
14
  ERROR_EXT = "sig"
@@ -1,7 +1,8 @@
1
1
  import logging
2
- from typing import cast, Dict, Any
2
+ from typing import cast, Dict, Any, Literal
3
3
 
4
4
  from scipy import stats
5
+ from scipy.special import gamma
5
6
  import numpy as np
6
7
  from numpy import typing as npt
7
8
  import pandas as pd
@@ -20,6 +21,26 @@ MAGNITUDE_ZEROPOINTS: Dict[str, float] = {"w1": 20.752, "w2": 19.596}
20
21
  FLUX_ZEROPOINTS = {"w1": 309.54, "w2": 171.787}
21
22
 
22
23
 
24
+ def std_debias_factor(n: npt.NDArray[np.int64]) -> npt.NDArray[np.float64]:
25
+ # Cureton The American Statistician 22, 22–22 (1968). https://www.jstor.org/stable/2681876
26
+ return gamma((n - 1) / 2) / np.sqrt(2 / (n - 1)) / gamma(n / 2)
27
+
28
+
29
+ def t_distribution_correction(n: npt.NDArray[np.int64]) -> npt.NDArray[np.float64]:
30
+ return stats.t.interval(0.68, df=n - 1)[1] # type: ignore
31
+
32
+
33
+ def no_correction(n: npt.NDArray[np.int64]) -> npt.NDArray[np.float64]:
34
+ return np.ones_like(n, dtype=float)
35
+
36
+
37
+ CORRECTION_FUNCTIONS = {
38
+ "debias": std_debias_factor,
39
+ "tdist": t_distribution_correction,
40
+ "none": no_correction,
41
+ }
42
+
43
+
23
44
  def calculate_epochs(
24
45
  f: pd.Series,
25
46
  e: pd.Series,
@@ -29,6 +50,11 @@ def calculate_epochs(
29
50
  outlier_threshold: float,
30
51
  outlier_quantile: float,
31
52
  outlier_mask: npt.NDArray[np.bool_] | None = None,
53
+ mean_name: Literal["mean", "median"] = "median",
54
+ std_name: Literal["std", "sdom", "sdom-1"] = "sdom-1",
55
+ correction_name: Literal["tdist", "debias", "none"] = "tdist",
56
+ calculate_pvalues: bool = False,
57
+ use_single_exposure_errors: bool = True,
32
58
  ) -> tuple[
33
59
  npt.NDArray[np.float64],
34
60
  npt.NDArray[np.float64],
@@ -36,6 +62,7 @@ def calculate_epochs(
36
62
  npt.NDArray[np.bool_],
37
63
  npt.NDArray[np.bool_],
38
64
  npt.NDArray[np.int64],
65
+ npt.NDArray[np.float64],
39
66
  ]:
40
67
  """
41
68
  Calculates the visits within a raw lightcurve.
@@ -50,8 +77,23 @@ def calculate_epochs(
50
77
  :type counts: np.array
51
78
  :param remove_outliers: whether to remove outliers
52
79
  :type remove_outliers: bool
80
+ :param outlier_threshold: threshold to identify outliers
81
+ :type outlier_threshold: float
82
+ :param outlier_quantile: quantile that the outlier_threshold is multiplied with
83
+ :type outlier_quantile: float
53
84
  :param outlier_mask: the outlier mask
54
- :type outlier_mask: np.array
85
+ :type outlier_mask: np.array, optional
86
+ :param mean_name: name of the numpy function to calculate the mean, defaults to "mean"
87
+ :type mean_name: str, optional
88
+ :param std_name: name of the function to calculate the stacked error, defaults to "std"
89
+ :type std_name: str, optional
90
+ :param correction_name: name of the correction function to apply to the standard deviation, defaults to "debias"
91
+ :type correction_name: str, optional
92
+ :param calculate_pvalues: if true, calculate ks-test p-values to check consistency with normal distribution per visit
93
+ :type calculate_pvalues: bool
94
+ :param use_single_exposure_errors:
95
+ if true, use the maximum of the RMS and the combined single exposure measurements as the final uncertainty
96
+ :type use_single_exposure_errors: bool
55
97
  :return: the epoch
56
98
  :rtype: float
57
99
  """
@@ -64,6 +106,7 @@ def calculate_epochs(
64
106
  np.array([]),
65
107
  np.array([]),
66
108
  np.array([]),
109
+ np.array([]),
67
110
  )
68
111
 
69
112
  u_lims = pd.isna(e)
@@ -97,6 +140,14 @@ def calculate_epochs(
97
140
  n_loops = 0
98
141
 
99
142
  # recalculate uncertainty and median as long as no outliers left
143
+ mean_function = np.mean if mean_name == "mean" else np.median
144
+
145
+ # select function to use to correct standard deviation with
146
+ bias_correction_function = CORRECTION_FUNCTIONS[correction_name]
147
+
148
+ one_points_mask = None
149
+ visits_at_least_two_point = []
150
+
100
151
  while n_remaining_outlier > 0:
101
152
  # make a mask of values to use
102
153
  use_mask = ~outlier_mask & use_mask_ul & ~nan_mask # type: ignore[operator]
@@ -109,7 +160,7 @@ def calculate_epochs(
109
160
  visits_zero_points = np.unique(visit_mask[zero_points_mask[visit_mask]])
110
161
  median[visits_at_least_one_point] = np.array(
111
162
  [
112
- np.median(f[(visit_mask == i) & use_mask])
163
+ mean_function(f[(visit_mask == i) & use_mask])
113
164
  for i in visits_at_least_one_point
114
165
  ]
115
166
  )
@@ -134,10 +185,20 @@ def calculate_epochs(
134
185
  one_points_mask = n_points <= 1
135
186
  # calculate standard deviation
136
187
  std = np.zeros_like(counts, dtype=float)
188
+ extra_factor = (
189
+ 1
190
+ if std_name == "std"
191
+ else 1 / n_points[~one_points_mask]
192
+ if std_name == "sdom"
193
+ else 1 / (n_points[~one_points_mask] - 1)
194
+ )
137
195
  std[~one_points_mask] = (
138
- np.sqrt(mean_deviation[~one_points_mask])
139
- / (n_points[~one_points_mask] - 1)
140
- * stats.t.interval(0.68, df=n_points[~one_points_mask] - 1)[1]
196
+ np.sqrt(
197
+ mean_deviation[~one_points_mask]
198
+ / (n_points[~one_points_mask] - 1)
199
+ * extra_factor
200
+ )
201
+ * bias_correction_function(n_points[~one_points_mask])
141
202
  # for visits with small number of detections we have to correct according to the t distribution
142
203
  )
143
204
  std[one_points_mask] = -np.inf
@@ -155,8 +216,14 @@ def calculate_epochs(
155
216
  single_exp_measurement_errors[n_points > 0] / n_points[n_points > 0]
156
217
  )
157
218
  e_meas[zero_points_mask] = np.nan
158
- # take the maximum value of the measured single exposure errors and the standard deviation
159
- u = np.maximum(std, e_meas)
219
+
220
+ if use_single_exposure_errors:
221
+ # take the maximum value of the measured single exposure errors and the standard deviation
222
+ u = np.maximum(std, e_meas)
223
+ else:
224
+ # take the rms as the measurement uncertainty except if there is only one single exposure
225
+ u = std
226
+ u[one_points_mask] = e_meas[one_points_mask]
160
227
 
161
228
  # Estimate the spread of the flux.
162
229
  # To be robust against outliers, do that with quantiles instead of std
@@ -187,7 +254,29 @@ def calculate_epochs(
187
254
  if n_loops > 20:
188
255
  raise Exception(f"{n_loops}!")
189
256
 
190
- return median, u, bin_ulim_bool, outlier_mask, use_mask, n_points
257
+ # -------------------- calculate std for crosscheck -------------------- #
258
+ if calculate_pvalues:
259
+ npstd = np.zeros_like(counts, dtype=float)
260
+ npstd[~one_points_mask] = np.array( # type: ignore[operator]
261
+ [np.std(f[(visit_mask == i) & use_mask]) for i in visits_at_least_two_point]
262
+ )
263
+ npstd[one_points_mask] = np.nan
264
+
265
+ # ---------------- calculate compatibility with gaussian ---------------- #
266
+ pvalues = np.ones_like(counts, dtype=float)
267
+ pvalues[~one_points_mask] = np.array( # type: ignore[operator]
268
+ [
269
+ stats.kstest(
270
+ f[(visit_mask == i) & use_mask],
271
+ stats.norm(median[i], npstd[i]).cdf, # type: ignore
272
+ ).pvalue
273
+ for i in visits_at_least_two_point
274
+ ]
275
+ )
276
+ else:
277
+ pvalues = np.full_like(counts, -999)
278
+
279
+ return median, u, bin_ulim_bool, outlier_mask, use_mask, n_points, pvalues
191
280
 
192
281
 
193
282
  def stack_visits(
@@ -195,6 +284,12 @@ def stack_visits(
195
284
  outlier_threshold: float,
196
285
  outlier_quantile: float,
197
286
  clean_outliers: bool = True,
287
+ mean_name: Literal["mean", "median"] = "median",
288
+ std_name: Literal["std", "sdom", "sdom-1"] = "sdom-1",
289
+ correction_name: Literal["tdist", "debias", "none"] = "tdist",
290
+ calculate_pvalues: bool = False,
291
+ use_single_exposure_errors: bool = True,
292
+ median_zeropoint_per_visit: bool = True,
198
293
  ):
199
294
  """
200
295
  Combine the data by visits of the satellite of one region in the sky.
@@ -207,6 +302,27 @@ def stack_visits(
207
302
 
208
303
  :param lightcurve: the raw lightcurve
209
304
  :type lightcurve: pandas.DataFrame
305
+ :param outlier_threshold: threshold to identify outliers
306
+ :type outlier_threshold: float
307
+ :param outlier_quantile: quantile that the outlier_threshold is multiplied with
308
+ :type outlier_quantile: float
309
+ :param clean_outliers:
310
+ if True, remove outliers that are outlier_threshold x outlier_quantile far away from the mean per visit,
311
+ default is True
312
+ :type clean_outliers: bool
313
+ :param mean_name: name of the numpy function to calculate the mean, defaults to "mean"
314
+ :type mean_name: str, optional
315
+ :param std_name: name of the function to calculate the stacked error, defaults to "std"
316
+ :type std_name: str, optional
317
+ :param correction_name: name of the correction function to apply to the standard deviation, defaults to "debias"
318
+ :type correction_name: str, optional
319
+ :param calculate_pvalues: if true, calculate ks-test p-values to check consistency with normal distribution per visit
320
+ :type calculate_pvalues: bool
321
+ :param use_single_exposure_errors:
322
+ if true, use the maximum of the RMS and the combined single exposure measurements as the final uncertainty
323
+ :type use_single_exposure_errors: bool
324
+ :param median_zeropoint_per_visit: if true, use the median zeropoint per visit instead of the individual exposure ones
325
+ :type median_zeropoint_per_visit: bool
210
326
  :return: the stacked lightcurve
211
327
  :rtype: pandas.DataFrame
212
328
  """
@@ -236,15 +352,22 @@ def stack_visits(
236
352
  remove_outliers = lum_ext == keys.FLUX_EXT and clean_outliers
237
353
  outlier_mask = outlier_masks.get(keys.FLUX_EXT, None)
238
354
 
239
- mean, u, bin_ulim_bool, outlier_mask, use_mask, n_points = calculate_epochs(
240
- f,
241
- e,
242
- visit_map,
243
- counts,
244
- remove_outliers=remove_outliers,
245
- outlier_mask=outlier_mask,
246
- outlier_quantile=outlier_quantile,
247
- outlier_threshold=outlier_threshold,
355
+ mean, u, bin_ulim_bool, outlier_mask, use_mask, n_points, p_values = (
356
+ calculate_epochs(
357
+ f,
358
+ e,
359
+ visit_map,
360
+ counts,
361
+ remove_outliers=remove_outliers,
362
+ outlier_mask=outlier_mask,
363
+ outlier_quantile=outlier_quantile,
364
+ outlier_threshold=outlier_threshold,
365
+ mean_name=mean_name,
366
+ std_name=std_name,
367
+ correction_name=correction_name,
368
+ calculate_pvalues=calculate_pvalues,
369
+ use_single_exposure_errors=use_single_exposure_errors,
370
+ )
248
371
  )
249
372
  n_outliers = np.sum(outlier_mask)
250
373
 
@@ -257,6 +380,7 @@ def stack_visits(
257
380
  stacked_data[f"{b}{lum_ext}{keys.RMS}"] = u
258
381
  stacked_data[f"{b}{lum_ext}{keys.UPPER_LIMIT}"] = bin_ulim_bool
259
382
  stacked_data[f"{b}{lum_ext}{keys.NPOINTS}"] = n_points
383
+ stacked_data[f"{b}{lum_ext}{keys.KSTEST_NORM_EXT}"] = p_values
260
384
 
261
385
  outlier_masks[lum_ext] = outlier_mask
262
386
  use_masks[lum_ext] = use_mask
@@ -289,20 +413,27 @@ def stack_visits(
289
413
  # if the visit only has upper limits then use the fall-back zeropoint
290
414
  zps_median[bin_ulim_bools[keys.FLUX_EXT]] = MAGNITUDE_ZEROPOINTS[b]
291
415
 
416
+ if median_zeropoint_per_visit:
417
+ use_zp = zps_median[visit_map]
418
+ else:
419
+ zp_nan_mask = np.isnan(zps)
420
+ zps[zp_nan_mask] = zps_median[visit_map[zp_nan_mask]]
421
+ use_zp = zps
422
+
292
423
  # --------------- calculate flux density from instrument flux ---------------- #
293
424
  # get the instrument flux [digital numbers], i.e. source count
294
425
  inst_fluxes_e = lightcurve[f"{b}{keys.ERROR_EXT}{keys.FLUX_EXT}"]
295
426
 
296
427
  # calculate the proportionality constant between flux density and source count
297
428
  mag_zp = FLUX_ZEROPOINTS[b] * 1e3 # in mJy
298
- flux_dens_const = mag_zp * 10 ** (-zps_median / 2.5)
429
+ flux_dens_const = mag_zp * 10 ** (-use_zp / 2.5)
299
430
 
300
431
  # calculate flux densities from instrument counts
301
- flux_densities = inst_fluxes * flux_dens_const[visit_map]
302
- flux_densities_e = inst_fluxes_e * flux_dens_const[visit_map]
432
+ flux_densities = inst_fluxes * flux_dens_const
433
+ flux_densities_e = inst_fluxes_e * flux_dens_const
303
434
 
304
435
  # bin flux densities
305
- mean_fd, u_fd, ul_fd, outlier_mask_fd, use_mask_fd, n_points_fd = (
436
+ mean_fd, u_fd, ul_fd, outlier_mask_fd, use_mask_fd, n_points_fd, p_values_fd = (
306
437
  calculate_epochs(
307
438
  flux_densities,
308
439
  flux_densities_e,
@@ -312,11 +443,17 @@ def stack_visits(
312
443
  outlier_mask=outlier_masks[keys.FLUX_EXT],
313
444
  outlier_threshold=outlier_threshold,
314
445
  outlier_quantile=outlier_quantile,
446
+ mean_name=mean_name,
447
+ std_name=std_name,
448
+ correction_name=correction_name,
449
+ calculate_pvalues=calculate_pvalues,
450
+ use_single_exposure_errors=use_single_exposure_errors,
315
451
  )
316
452
  )
317
453
  stacked_data[f"{b}{keys.MEAN}{keys.FLUX_DENSITY_EXT}"] = mean_fd
318
454
  stacked_data[f"{b}{keys.FLUX_DENSITY_EXT}{keys.RMS}"] = u_fd
319
455
  stacked_data[f"{b}{keys.FLUX_DENSITY_EXT}{keys.UPPER_LIMIT}"] = ul_fd
320
456
  stacked_data[f"{b}{keys.FLUX_DENSITY_EXT}{keys.NPOINTS}"] = n_points_fd
457
+ stacked_data[f"{b}{keys.FLUX_DENSITY_EXT}{keys.KSTEST_NORM_EXT}"] = p_values_fd
321
458
 
322
459
  return pd.DataFrame(stacked_data)
timewise/util/path.py ADDED
@@ -0,0 +1,10 @@
1
+ from pathlib import Path
2
+ import os
3
+
4
+
5
+ def expand(path: Path | str) -> Path:
6
+ """
7
+ Fully expand and resolve the Path with the given environment variables.
8
+ """
9
+ path = Path(path)
10
+ return Path(os.path.expandvars(path)).expanduser().resolve()
@@ -1,23 +1,26 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: timewise
3
- Version: 1.0.0a6
3
+ Version: 1.0.0a7
4
4
  Summary: Download WISE infrared data for many objects and process them with AMPEL
5
5
  License: MIT
6
6
  License-File: LICENSE
7
7
  Author: Jannis Necker
8
8
  Author-email: jannis.necker@gmail.com
9
- Requires-Python: >=3.11,<3.12
9
+ Requires-Python: >=3.11,<3.14
10
10
  Classifier: License :: OSI Approved :: MIT License
11
11
  Classifier: Programming Language :: Python :: 3
12
12
  Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Classifier: Programming Language :: Python :: 3.13
13
15
  Provides-Extra: ampel
14
16
  Provides-Extra: dev
15
17
  Provides-Extra: docs
16
- Requires-Dist: ampel-alerts (==0.10.3a5) ; extra == "ampel"
17
- Requires-Dist: ampel-core (>=0.10.4.post0,<0.11.0) ; extra == "ampel"
18
- Requires-Dist: ampel-photometry (>=0.10.1,<0.11.0) ; extra == "ampel"
18
+ Requires-Dist: ampel-alerts (==0.10.3a6) ; extra == "ampel"
19
+ Requires-Dist: ampel-core (==0.10.6a17) ; extra == "ampel"
20
+ Requires-Dist: ampel-interface (==0.10.5a8) ; extra == "ampel"
21
+ Requires-Dist: ampel-photometry (==0.10.2a1) ; extra == "ampel"
19
22
  Requires-Dist: ampel-plot (>=0.9.1,<0.10.0) ; extra == "ampel"
20
- Requires-Dist: astropy (>=5.1,<6.0.0)
23
+ Requires-Dist: astropy (>=5.1,<8.0.0)
21
24
  Requires-Dist: autodoc_pydantic[erdantic] (>=2.2.0,<3.0.0) ; extra == "docs"
22
25
  Requires-Dist: backoff (>=2.1.2,<3.0.0)
23
26
  Requires-Dist: coveralls (>=3.3.1,<4.0.0) ; extra == "dev"
@@ -1,36 +1,35 @@
1
- ampel/timewise/alert/TimewiseAlertSupplier.py,sha256=RmAWJVOfObaI1x0gt9agRt45WrVj1nhQchaMQ9QgUIA,4127
2
- ampel/timewise/alert/load/TimewiseFileLoader.py,sha256=ChjOpQpyGTA-H_fjnvQGwa37IFfvfJanPmnVKORVElo,4271
3
- ampel/timewise/ingest/TiCompilerOptions.py,sha256=xzGOOWAFoFsazocXLsTqtaHIqt_WUXFaacLUoBVKlbo,758
1
+ ampel/timewise/alert/TimewiseAlertSupplier.py,sha256=HLeQBCLrmVKxqvPc4E2TVj241KWXD9hHK7MJA3EUWJw,4178
2
+ ampel/timewise/alert/load/TimewiseFileLoader.py,sha256=jmLppFEJwjQCSOMUtr-cSP9UbtT0VzqyAf4VDJjUf3w,4409
3
+ ampel/timewise/ingest/TiCompilerOptions.py,sha256=j2eUxSkJEKxAjJCd9plxRRCg6Mv8dNHw72UvKyAxUsA,809
4
4
  ampel/timewise/ingest/TiDataPointShaper.py,sha256=KbAg-J0DkMaJA9K67xMY5Pe0SIP0LNKsBNAMnTYV9zA,3180
5
5
  ampel/timewise/ingest/TiMongoMuxer.py,sha256=YDiqaYiJ1TvyHUMAlNWHBHCpw0tH4Nvf8eyHUxMbzS8,6475
6
6
  ampel/timewise/ingest/tags.py,sha256=ggn0Y6QUnjIwMzOlgKmPX1JPhmnvc21nUrORFhaR0uI,564
7
- ampel/timewise/t1/T1HDBSCAN.py,sha256=vi1PWF0HFAedVJA4rVoR2YOXQ4jcEgEoUICtmg3rZjw,9301
8
- ampel/timewise/t1/TimewiseFilter.py,sha256=ZSaXiSDKXCusknLJRMhe2Awj_YfkM74JVXeRbiZonmA,1564
9
- ampel/timewise/t2/T2StackVisits.py,sha256=x7YcVLz_5fOPAfzso4fFWy4Pwn5txPfRT_Hx49PdbTo,1847
10
- ampel/timewise/util/AuxDiagnosticPlotter.py,sha256=MZP4LWpFgBUPWPxlx8EEFeRc8rhI7FDO_LlbhKtkwrw,1641
7
+ ampel/timewise/t1/T1HDBSCAN.py,sha256=zVhcqQVAjlHgDBd9CJA6rGH3RCrCV84rsIIOWVxODKI,9528
8
+ ampel/timewise/t1/TimewiseFilter.py,sha256=uDjNGsqPOISoFjSH8tX87I3UTW9qGX8rEmkyze8u2ug,1566
9
+ ampel/timewise/t2/T2StackVisits.py,sha256=JBxFZLT-a-8jjenRw45ZVA_FvtcyOBmvMVcpYHyw_Yk,2574
10
+ ampel/timewise/util/AuxDiagnosticPlotter.py,sha256=evvuH5vuEOFkfbkoSQw7ztiEqHos19PP91VQ_SCg_AU,1692
11
11
  ampel/timewise/util/pdutil.py,sha256=CJqeYqL2I6A5WnF5S342bwM4omJY-1XwT1XWY2uk3bc,1524
12
- conf/timewise/ampel.yml,sha256=8NuVSuKqinHT0G66uDPnnw-P7djScKKVHzeFNsH3XZc,360
13
- timewise/__init__.py,sha256=OhKzin_f9k_6U1rnhXEMzgMu70gbjQImfaWjmZiKhjA,24
12
+ timewise/__init__.py,sha256=E7xUjDtIcRMszsR5_yuDt--zdfFpDV9LyF3s2kNKZCU,24
14
13
  timewise/backend/__init__.py,sha256=w79nWfCw8n9g98CkHWJELmb4j9xblWC4DGZOV3_XhH4,134
15
14
  timewise/backend/base.py,sha256=dHxRzu2q3uQ0wdGmDxnn-p68Tp19qChue7HMEu56wNA,1080
16
- timewise/backend/filesystem.py,sha256=GQ4Hrb6_7Q7fKOn6QUl8bqAihAmyeZoTRxElNIwPQ1Y,2465
17
- timewise/chunking.py,sha256=q7njvTSD84gdcvIk54SC-Ob863MsR79RPec8HS-bm4U,1668
15
+ timewise/backend/filesystem.py,sha256=qFBmiOtfv8b3uhhiZSbH8bRIUrpS4f0P8Zl03VxpgAI,2598
16
+ timewise/chunking.py,sha256=uAfAOhWg1v6VbfHNkhBLSCEa0GehPoIZPftGueVmNRs,1845
18
17
  timewise/cli.py,sha256=W8SXlUQo7a5aS4_kW40KTs2UkTLQS12JuhgwkRb23DY,4172
19
18
  timewise/config.py,sha256=ZTSokRZMZDqBqVFV9DxvO-47yE9E9xWF48Rcjb0QG10,1044
20
19
  timewise/io/__init__.py,sha256=S7vb0glKJnw6ztOlrD-0Wma2bQZ2RwpmXDLFJLKBMVo,35
21
- timewise/io/config.py,sha256=aizLxt9l4aeWQNsvcemtQdr_fW1vLmpRSofcgA3Bgvk,2239
20
+ timewise/io/config.py,sha256=u_nlfstvuz2_bfepRZo3y43XAqQ0ilReYg9e081CTQg,2410
22
21
  timewise/io/download.py,sha256=IMNWur1FWPKJ7SNMTjevZgWqPP_eYLzXr4Pjp3XOeXI,10589
23
22
  timewise/io/stable_tap.py,sha256=jukCkBi2d7WACOo_kXTMCppzWUsN-pLVg9EDqHi3qd0,3478
24
23
  timewise/plot/__init__.py,sha256=cc00UenWC_8zAkBH-Ylhs3yCF49tAqZ2Al9MfOoXYDI,120
25
24
  timewise/plot/diagnostic.py,sha256=GRp-OUwz2yzzDu9qdViFg_e3Mxl5t1IvUJXZHuMKB2U,8276
26
- timewise/plot/lightcurve.py,sha256=oK0y6RFzv7QSrO1Qqyc1wNghsTILC9QAfIjuoA8i92I,3757
25
+ timewise/plot/lightcurve.py,sha256=K0UvjmvbCSsRKP3Mu9X5L5AVUwqURR76ESJCIMEgE6Q,3924
27
26
  timewise/plot/panstarrs.py,sha256=X2ZULm7QT91cp4qociG0fVeI0saGLJwyKzL0141Vqis,8014
28
27
  timewise/plot/sdss.py,sha256=cc1zU-4XFkqc8xH5yqCyMsDJf9w_54B_6NeRMjr9Pt8,2622
29
28
  timewise/process/__init__.py,sha256=Yk-j1B1MnBuuaM6eFi43TxdWmFKbwFHvDsuQZt4yB_c,70
30
29
  timewise/process/config.py,sha256=d4hCvNb8JyJf2-0BTQgJaeOmHGZK_aHEDMb_xyqJgbU,1060
31
30
  timewise/process/interface.py,sha256=JOIh1wYuE1s9XdSfBRS7qUXexlr-p5UxdwaSjKlRZxE,5207
32
- timewise/process/keys.py,sha256=0TEVn-BwfzHGlScU-N8AtxgkhA2mUO0wBu4_ol_ylH4,197
33
- timewise/process/stacking.py,sha256=5qk9SYTfDFNFsnw9KAGYW-RndGtdmENKEiIqD6mrJ6I,12707
31
+ timewise/process/keys.py,sha256=MGnTHM4UGUSyX7BaCzoT_dg81NEAiLCxMLBRz3Kq8yc,319
32
+ timewise/process/stacking.py,sha256=vVyrhrBRpqkptNryxNxWwEvXeQLH2GYNUtUxWggYfTk,18997
34
33
  timewise/process/template.yml,sha256=U_xKmygDl3E-viTgZEI8pQIJwWduB52SdI2X9vy61Yo,1037
35
34
  timewise/query/__init__.py,sha256=1OA_FlLI7O0aIDOXHpBKMOyMvYLCd0kQVkzoqbxouyE,242
36
35
  timewise/query/base.py,sha256=GjAoQZpaxVHcpCgPco1y5bpR0Jtd_sp_bYMI26WpAK0,1262
@@ -43,9 +42,10 @@ timewise/types.py,sha256=MB-aqpyos1aCS58QWBas34WcwFHiHOkrDj_d9_ZxuVc,667
43
42
  timewise/util/backoff.py,sha256=bU5yhsBO4U53XEPJ_32tgql9rq_Rzrv7w32nVQYHr64,272
44
43
  timewise/util/csv_utils.py,sha256=5i3Jd4c58-doPs1N_hyYZ8Uc2nvuk9nGwgPNZoNrlu0,298
45
44
  timewise/util/error_threading.py,sha256=uyV1Ri-wf87lpa17Xlp520B1V8DWHh3v9Mk97QrPmv0,2264
45
+ timewise/util/path.py,sha256=-CcpYdjidg5FcZnRyQqrK_EKjc-7USYbJ8gtKatGl6k,254
46
46
  timewise/util/visits.py,sha256=4ZXwH7OXmp98tUPZplvuOtykB048kZoYXyuRTwQxT7w,998
47
- timewise-1.0.0a6.dist-info/METADATA,sha256=2m1fvHTkYBVmko9pENN3GjqWTHpmU9Dgq1Ut54sqmK8,10146
48
- timewise-1.0.0a6.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
49
- timewise-1.0.0a6.dist-info/entry_points.txt,sha256=mYh1HsUFbV7KT8kxiGqVtR3Pk0oEk6Bd-2c5FsYVhG4,45
50
- timewise-1.0.0a6.dist-info/licenses/LICENSE,sha256=sVoNJWiTlH-NarJx0wdsob468Pg3JE6vIIgll4lCa3E,1070
51
- timewise-1.0.0a6.dist-info/RECORD,,
47
+ timewise-1.0.0a7.dist-info/METADATA,sha256=r8fAKUybkR-ACivlwDuMi2_6p7ey6Ia_qycCNsyxYIs,10294
48
+ timewise-1.0.0a7.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
49
+ timewise-1.0.0a7.dist-info/entry_points.txt,sha256=mYh1HsUFbV7KT8kxiGqVtR3Pk0oEk6Bd-2c5FsYVhG4,45
50
+ timewise-1.0.0a7.dist-info/licenses/LICENSE,sha256=sVoNJWiTlH-NarJx0wdsob468Pg3JE6vIIgll4lCa3E,1070
51
+ timewise-1.0.0a7.dist-info/RECORD,,
conf/timewise/ampel.yml DELETED
@@ -1,10 +0,0 @@
1
- unit:
2
- - ampel.timewise.alert.load.TimewiseFileLoader
3
- - ampel.timewise.alert.TimewiseAlertSupplier
4
- - ampel.timewise.ingest.TiDataPointShaper
5
- - ampel.timewise.ingest.TiMongoMuxer
6
- - ampel.timewise.ingest.TiCompilerOptions
7
- - ampel.timewise.t2.T2StackVisits
8
- - ampel.timewise.t1.T1HDBSCAN
9
- - ampel.timewise.util.AuxDiagnosticPlotter
10
- - ampel.timewise.t1.TimewiseFilter