timewise 1.0.0a6__py3-none-any.whl → 1.0.0a8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ampel/timewise/alert/TimewiseAlertSupplier.py +2 -1
- ampel/timewise/alert/load/TimewiseFileLoader.py +8 -4
- ampel/timewise/ingest/TiCompilerOptions.py +2 -1
- ampel/timewise/ingest/TiMongoMuxer.py +4 -6
- ampel/timewise/t1/T1HDBSCAN.py +10 -2
- ampel/timewise/t1/TimewiseFilter.py +2 -2
- ampel/timewise/t2/T2StackVisits.py +17 -0
- ampel/timewise/util/AuxDiagnosticPlotter.py +2 -1
- timewise/__init__.py +1 -1
- timewise/backend/filesystem.py +8 -3
- timewise/chunking.py +31 -7
- timewise/io/config.py +10 -5
- timewise/io/download.py +1 -16
- timewise/io/stable_tap.py +17 -0
- timewise/plot/lightcurve.py +5 -0
- timewise/process/keys.py +5 -1
- timewise/process/stacking.py +159 -22
- timewise/util/path.py +10 -0
- {timewise-1.0.0a6.dist-info → timewise-1.0.0a8.dist-info}/METADATA +12 -9
- {timewise-1.0.0a6.dist-info → timewise-1.0.0a8.dist-info}/RECORD +23 -23
- conf/timewise/ampel.yml +0 -10
- {timewise-1.0.0a6.dist-info → timewise-1.0.0a8.dist-info}/WHEEL +0 -0
- {timewise-1.0.0a6.dist-info → timewise-1.0.0a8.dist-info}/entry_points.txt +0 -0
- {timewise-1.0.0a6.dist-info → timewise-1.0.0a8.dist-info}/licenses/LICENSE +0 -0
|
@@ -16,11 +16,12 @@ import pandas as pd
|
|
|
16
16
|
from bson import encode
|
|
17
17
|
|
|
18
18
|
from ampel.alert.AmpelAlert import AmpelAlert
|
|
19
|
+
from ampel.base.AmpelABC import AmpelABC
|
|
19
20
|
from ampel.alert.BaseAlertSupplier import BaseAlertSupplier
|
|
20
21
|
from ampel.view.ReadOnlyDict import ReadOnlyDict
|
|
21
22
|
|
|
22
23
|
|
|
23
|
-
class TimewiseAlertSupplier(BaseAlertSupplier):
|
|
24
|
+
class TimewiseAlertSupplier(BaseAlertSupplier, AmpelABC):
|
|
24
25
|
"""
|
|
25
26
|
Iterable class that, for each transient name provided by the underlying alert_loader
|
|
26
27
|
returns a PhotoAlert instance.
|
|
@@ -10,14 +10,16 @@ from typing import Dict, get_args
|
|
|
10
10
|
|
|
11
11
|
import numpy as np
|
|
12
12
|
import pandas as pd
|
|
13
|
-
from astropy.table import
|
|
13
|
+
from astropy.table import vstack
|
|
14
14
|
from ampel.abstract.AbsAlertLoader import AbsAlertLoader
|
|
15
|
+
from ampel.base.AmpelABC import AmpelABC
|
|
15
16
|
from timewise.tables import TableType
|
|
16
17
|
from timewise.config import TimewiseConfig
|
|
17
18
|
from timewise.types import TaskID
|
|
19
|
+
from timewise.util.path import expand
|
|
18
20
|
|
|
19
21
|
|
|
20
|
-
class TimewiseFileLoader(AbsAlertLoader[Dict]):
|
|
22
|
+
class TimewiseFileLoader(AbsAlertLoader[Dict], AmpelABC):
|
|
21
23
|
"""
|
|
22
24
|
Load alerts from one of more files.
|
|
23
25
|
"""
|
|
@@ -33,8 +35,10 @@ class TimewiseFileLoader(AbsAlertLoader[Dict]):
|
|
|
33
35
|
def __init__(self, **kwargs) -> None:
|
|
34
36
|
super().__init__(**kwargs)
|
|
35
37
|
|
|
36
|
-
|
|
37
|
-
|
|
38
|
+
expanded_config_file = expand(self.timewise_config_file)
|
|
39
|
+
|
|
40
|
+
self.logger.info(f"loading timewise config file {expanded_config_file}")
|
|
41
|
+
timewise_config = TimewiseConfig.from_yaml(expanded_config_file)
|
|
38
42
|
dl = timewise_config.download.build_downloader()
|
|
39
43
|
self._timewise_backend = dl.backend
|
|
40
44
|
|
|
@@ -9,9 +9,10 @@
|
|
|
9
9
|
from typing import Any
|
|
10
10
|
|
|
11
11
|
from ampel.model.ingest.CompilerOptions import CompilerOptions
|
|
12
|
+
from ampel.base.AmpelABC import AmpelABC
|
|
12
13
|
|
|
13
14
|
|
|
14
|
-
class TiCompilerOptions(CompilerOptions):
|
|
15
|
+
class TiCompilerOptions(CompilerOptions, AmpelABC):
|
|
15
16
|
stock: dict[str, Any] = {"tag": "TIMEWISE"}
|
|
16
17
|
t0: dict[str, Any] = {"tag": "TIMEWISE"}
|
|
17
18
|
t1: dict[str, Any] = {"tag": "TIMEWISE"}
|
|
@@ -32,7 +32,7 @@ class TiMongoMuxer(AbsT0Muxer):
|
|
|
32
32
|
"""
|
|
33
33
|
|
|
34
34
|
# Standard projection used when checking DB for existing PPS/ULS
|
|
35
|
-
projection = {
|
|
35
|
+
projection: dict[str, int] = {
|
|
36
36
|
"_id": 0,
|
|
37
37
|
"id": 1,
|
|
38
38
|
"tag": 1,
|
|
@@ -51,6 +51,8 @@ class TiMongoMuxer(AbsT0Muxer):
|
|
|
51
51
|
"body.dec": 1,
|
|
52
52
|
}
|
|
53
53
|
|
|
54
|
+
unique_key: list[str] = ["mjd", "ra", "dec"]
|
|
55
|
+
|
|
54
56
|
def __init__(self, **kwargs) -> None:
|
|
55
57
|
super().__init__(**kwargs)
|
|
56
58
|
|
|
@@ -116,11 +118,7 @@ class TiMongoMuxer(AbsT0Muxer):
|
|
|
116
118
|
# jd alone is not enough for matching pps because each time is associated with
|
|
117
119
|
# two filters! Also, if there can be multiple sources within the same frame which
|
|
118
120
|
# leads to duplicate MJD and FID. Check position in addition.
|
|
119
|
-
key = (
|
|
120
|
-
dp["body"]["mjd"],
|
|
121
|
-
dp["body"]["ra"],
|
|
122
|
-
dp["body"]["dec"],
|
|
123
|
-
)
|
|
121
|
+
key = tuple(dp["body"][k] for k in self.unique_key)
|
|
124
122
|
|
|
125
123
|
if target := unique_dps_ids.get(key):
|
|
126
124
|
# insert id in order
|
ampel/timewise/t1/T1HDBSCAN.py
CHANGED
|
@@ -10,8 +10,6 @@ from typing import Iterable, Sequence
|
|
|
10
10
|
|
|
11
11
|
import numpy as np
|
|
12
12
|
from numpy import typing as npt
|
|
13
|
-
from ampel.base.AuxUnitRegister import AuxUnitRegister
|
|
14
|
-
from astropy.coordinates.angle_utilities import angular_separation, position_angle
|
|
15
13
|
from sklearn.cluster import HDBSCAN
|
|
16
14
|
from pymongo import MongoClient
|
|
17
15
|
|
|
@@ -20,11 +18,21 @@ from ampel.struct.T1CombineResult import T1CombineResult
|
|
|
20
18
|
from ampel.types import DataPointId
|
|
21
19
|
from ampel.abstract.AbsT1CombineUnit import AbsT1CombineUnit
|
|
22
20
|
from ampel.model.UnitModel import UnitModel
|
|
21
|
+
from ampel.base.AuxUnitRegister import AuxUnitRegister
|
|
23
22
|
|
|
24
23
|
from ampel.timewise.util.pdutil import datapoints_to_dataframe
|
|
25
24
|
from ampel.timewise.util.AuxDiagnosticPlotter import AuxDiagnosticPlotter
|
|
26
25
|
from timewise.process import keys
|
|
27
26
|
|
|
27
|
+
from importlib.util import find_spec
|
|
28
|
+
|
|
29
|
+
if find_spec("astropy.coordinates.angle_utilities"):
|
|
30
|
+
# astropy < v6.0.0
|
|
31
|
+
from astropy.coordinates.angle_utilities import angular_separation, position_angle
|
|
32
|
+
else:
|
|
33
|
+
# astropy >= v6.0.0
|
|
34
|
+
from astropy.coordinates.angles import angular_separation, position_angle
|
|
35
|
+
|
|
28
36
|
|
|
29
37
|
class T1HDBSCAN(AbsT1CombineUnit):
|
|
30
38
|
input_mongo_db_name: str
|
|
@@ -36,12 +36,12 @@ class TimewiseFilter(AbsAlertFilter):
|
|
|
36
36
|
visits, counts = np.unique(visit_map, return_counts=True)
|
|
37
37
|
visit_passed = counts >= self.det_per_visit
|
|
38
38
|
if not all(visit_passed):
|
|
39
|
-
self.logger.
|
|
39
|
+
self.logger.debug(None, extra={"min_det_per_visit": min(counts).item()})
|
|
40
40
|
return None
|
|
41
41
|
|
|
42
42
|
# enough visits
|
|
43
43
|
if not len(visits) >= self.n_visits:
|
|
44
|
-
self.logger.
|
|
44
|
+
self.logger.debug(None, extra={"n_visits": len(visits)})
|
|
45
45
|
return None
|
|
46
46
|
|
|
47
47
|
return True
|
|
@@ -6,6 +6,7 @@
|
|
|
6
6
|
# Date: 24.09.2025
|
|
7
7
|
# Last Modified Date: 24.09.2025
|
|
8
8
|
# Last Modified By: Jannis Necker <jannis.necker@gmail.com>
|
|
9
|
+
from typing import Literal
|
|
9
10
|
from scipy import stats
|
|
10
11
|
|
|
11
12
|
from ampel.abstract.AbsLightCurveT2Unit import AbsLightCurveT2Unit
|
|
@@ -32,6 +33,16 @@ class T2StackVisits(AbsLightCurveT2Unit):
|
|
|
32
33
|
# threshold above which to exclude outliers
|
|
33
34
|
outlier_threshold: float = 5
|
|
34
35
|
|
|
36
|
+
# methods to calculate mean and std
|
|
37
|
+
mean_name: Literal["mean", "median"] = "median"
|
|
38
|
+
std_name: Literal["std", "sdom", "sdom-1"] = "sdom-1"
|
|
39
|
+
correction_name: Literal["debias", "tdist", "none"] = "tdist"
|
|
40
|
+
|
|
41
|
+
# see timewise.process.stacking
|
|
42
|
+
calculate_pvalues: bool = False
|
|
43
|
+
use_single_exposure_errors: bool = True
|
|
44
|
+
median_zeropoint_per_visit: bool = True
|
|
45
|
+
|
|
35
46
|
def process(self, light_curve: LightCurve) -> UBson | UnitResult:
|
|
36
47
|
columns = [
|
|
37
48
|
"ra",
|
|
@@ -53,4 +64,10 @@ class T2StackVisits(AbsLightCurveT2Unit):
|
|
|
53
64
|
outlier_threshold=self.outlier_threshold,
|
|
54
65
|
outlier_quantile=self.outlier_quantile,
|
|
55
66
|
clean_outliers=self.clean_outliers,
|
|
67
|
+
mean_name=self.mean_name,
|
|
68
|
+
std_name=self.std_name,
|
|
69
|
+
correction_name=self.correction_name,
|
|
70
|
+
calculate_pvalues=self.calculate_pvalues,
|
|
71
|
+
use_single_exposure_errors=self.use_single_exposure_errors,
|
|
72
|
+
median_zeropoint_per_visit=self.median_zeropoint_per_visit,
|
|
56
73
|
).to_dict(orient="records")
|
|
@@ -5,13 +5,14 @@ from numpy import typing as npt
|
|
|
5
5
|
|
|
6
6
|
from ampel.plot.create import create_plot_record
|
|
7
7
|
from ampel.base.AmpelBaseModel import AmpelBaseModel
|
|
8
|
+
from ampel.base.AmpelABC import AmpelABC
|
|
8
9
|
from ampel.model.PlotProperties import PlotProperties
|
|
9
10
|
from ampel.content.NewSVGRecord import NewSVGRecord
|
|
10
11
|
|
|
11
12
|
from timewise.plot.diagnostic import DiagnosticPlotter
|
|
12
13
|
|
|
13
14
|
|
|
14
|
-
class AuxDiagnosticPlotter(AmpelBaseModel):
|
|
15
|
+
class AuxDiagnosticPlotter(AmpelBaseModel, AmpelABC):
|
|
15
16
|
plot_properties: PlotProperties
|
|
16
17
|
cutout: Literal["sdss", "panstarrs"] = DiagnosticPlotter.model_fields[
|
|
17
18
|
"cutout"
|
timewise/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "1.0.
|
|
1
|
+
__version__ = "1.0.0a8"
|
timewise/backend/filesystem.py
CHANGED
|
@@ -6,6 +6,7 @@ from astropy.table import Table
|
|
|
6
6
|
|
|
7
7
|
from .base import Backend
|
|
8
8
|
from ..types import TaskID
|
|
9
|
+
from ..util.path import expand
|
|
9
10
|
|
|
10
11
|
|
|
11
12
|
logger = logging.getLogger(__name__)
|
|
@@ -15,17 +16,21 @@ class FileSystemBackend(Backend):
|
|
|
15
16
|
type: Literal["filesystem"] = "filesystem"
|
|
16
17
|
base_path: Path
|
|
17
18
|
|
|
19
|
+
@property
|
|
20
|
+
def expanded_path(self) -> Path:
|
|
21
|
+
return expand(self.base_path)
|
|
22
|
+
|
|
18
23
|
# ----------------------------
|
|
19
24
|
# Helpers for paths
|
|
20
25
|
# ----------------------------
|
|
21
26
|
def _meta_path(self, task: TaskID) -> Path:
|
|
22
|
-
return self.
|
|
27
|
+
return self.expanded_path / f"{task}.meta.json"
|
|
23
28
|
|
|
24
29
|
def _marker_path(self, task: TaskID) -> Path:
|
|
25
|
-
return self.
|
|
30
|
+
return self.expanded_path / f"{task}.ok"
|
|
26
31
|
|
|
27
32
|
def _data_path(self, task: TaskID) -> Path:
|
|
28
|
-
return self.
|
|
33
|
+
return self.expanded_path / f"{task}.fits"
|
|
29
34
|
|
|
30
35
|
# ----------------------------
|
|
31
36
|
# Metadata
|
timewise/chunking.py
CHANGED
|
@@ -1,20 +1,45 @@
|
|
|
1
|
-
|
|
1
|
+
import logging
|
|
2
|
+
from functools import cached_property
|
|
2
3
|
from pathlib import Path
|
|
4
|
+
from typing import Iterator, Sequence, cast
|
|
5
|
+
|
|
3
6
|
import numpy as np
|
|
4
|
-
from numpy import typing as npt
|
|
5
7
|
import pandas as pd
|
|
6
|
-
import
|
|
8
|
+
from numpy import typing as npt
|
|
7
9
|
|
|
8
10
|
logger = logging.getLogger(__name__)
|
|
9
11
|
|
|
10
12
|
|
|
11
13
|
class Chunk:
|
|
12
14
|
def __init__(
|
|
13
|
-
self, chunk_id: int,
|
|
15
|
+
self, chunk_id: int, input_csv, row_indices: npt.NDArray[np.int_]
|
|
14
16
|
):
|
|
15
17
|
self.chunk_id = chunk_id
|
|
16
|
-
self.indices = indices
|
|
17
18
|
self.row_numbers = row_indices
|
|
19
|
+
self.input_csv = input_csv
|
|
20
|
+
|
|
21
|
+
@cached_property
|
|
22
|
+
def indices(self) -> pd.Index:
|
|
23
|
+
start = min(self.row_numbers)
|
|
24
|
+
stop = max(self.row_numbers) + 1
|
|
25
|
+
return pd.read_csv(self.input_csv, skiprows=start, nrows=stop - start).index
|
|
26
|
+
|
|
27
|
+
@property
|
|
28
|
+
def data(self) -> pd.DataFrame:
|
|
29
|
+
start = (
|
|
30
|
+
min(cast(Sequence[int], self.row_numbers)) + 1
|
|
31
|
+
) # plus one to always skip header line
|
|
32
|
+
nrows = (
|
|
33
|
+
max(cast(Sequence[int], self.row_numbers)) - start + 2
|
|
34
|
+
) # plus one: skip header, plus one:
|
|
35
|
+
|
|
36
|
+
columns = list(pd.read_csv(self.input_csv, nrows=0).columns)
|
|
37
|
+
return pd.read_csv(
|
|
38
|
+
filepath_or_buffer=self.input_csv,
|
|
39
|
+
skiprows=start,
|
|
40
|
+
nrows=nrows,
|
|
41
|
+
names=columns,
|
|
42
|
+
)
|
|
18
43
|
|
|
19
44
|
|
|
20
45
|
class Chunker:
|
|
@@ -45,6 +70,5 @@ class Chunker:
|
|
|
45
70
|
raise IndexError(f"Invalid chunk_id {chunk_id}")
|
|
46
71
|
start = chunk_id * self.chunk_size
|
|
47
72
|
stop = min(start + self.chunk_size, self._n_rows)
|
|
48
|
-
indices = pd.read_csv(self.input_csv, skiprows=start, nrows=stop - start).index
|
|
49
73
|
logger.debug(f"chunk {chunk_id}: from {start} to {stop}")
|
|
50
|
-
return Chunk(chunk_id,
|
|
74
|
+
return Chunk(chunk_id, self.input_csv, np.arange(start=start, stop=stop))
|
timewise/io/config.py
CHANGED
|
@@ -7,6 +7,7 @@ from .download import Downloader
|
|
|
7
7
|
from ..query import QueryType
|
|
8
8
|
from ..backend import BackendType
|
|
9
9
|
from ..types import TYPE_MAP
|
|
10
|
+
from ..util.path import expand
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
class DownloadConfig(BaseModel):
|
|
@@ -19,15 +20,19 @@ class DownloadConfig(BaseModel):
|
|
|
19
20
|
|
|
20
21
|
service_url: str = "https://irsa.ipac.caltech.edu/TAP"
|
|
21
22
|
|
|
23
|
+
@property
|
|
24
|
+
def expanded_input_csv(self) -> Path:
|
|
25
|
+
return expand(self.input_csv)
|
|
26
|
+
|
|
22
27
|
@model_validator(mode="after")
|
|
23
28
|
def validate_input_csv_columns(self) -> "DownloadConfig":
|
|
24
29
|
"""Ensure that the input CSV contains all columns required by queries."""
|
|
25
30
|
# only validate if the CSV actually exists
|
|
26
|
-
if not self.
|
|
27
|
-
raise ValueError(f"CSV file does not exist: {self.
|
|
31
|
+
if not self.expanded_input_csv.exists():
|
|
32
|
+
raise ValueError(f"CSV file does not exist: {self.expanded_input_csv}")
|
|
28
33
|
|
|
29
34
|
# read just the header and first 10 lines
|
|
30
|
-
input_table = pd.read_csv(self.
|
|
35
|
+
input_table = pd.read_csv(self.expanded_input_csv, nrows=10)
|
|
31
36
|
|
|
32
37
|
missing_columns = set()
|
|
33
38
|
wrong_dtype = set()
|
|
@@ -41,7 +46,7 @@ class DownloadConfig(BaseModel):
|
|
|
41
46
|
except Exception:
|
|
42
47
|
wrong_dtype.add(col)
|
|
43
48
|
|
|
44
|
-
msg = f"CSV file {self.
|
|
49
|
+
msg = f"CSV file {self.expanded_input_csv}: "
|
|
45
50
|
if missing_columns:
|
|
46
51
|
raise KeyError(msg + f"Missing required columns: {sorted(missing_columns)}")
|
|
47
52
|
if wrong_dtype:
|
|
@@ -55,7 +60,7 @@ class DownloadConfig(BaseModel):
|
|
|
55
60
|
def build_downloader(self) -> Downloader:
|
|
56
61
|
return Downloader(
|
|
57
62
|
service_url=self.service_url,
|
|
58
|
-
input_csv=self.
|
|
63
|
+
input_csv=self.expanded_input_csv,
|
|
59
64
|
chunk_size=self.chunk_size,
|
|
60
65
|
backend=self.backend,
|
|
61
66
|
queries=self.queries,
|
timewise/io/download.py
CHANGED
|
@@ -106,25 +106,10 @@ class Downloader:
|
|
|
106
106
|
# ----------------------------
|
|
107
107
|
# TAP submission and download
|
|
108
108
|
# ----------------------------
|
|
109
|
-
def get_chunk_data(self, chunk: Chunk) -> pd.DataFrame:
|
|
110
|
-
start = (
|
|
111
|
-
min(cast(Sequence[int], chunk.row_numbers)) + 1
|
|
112
|
-
) # plus one to always skip header line
|
|
113
|
-
nrows = (
|
|
114
|
-
max(cast(Sequence[int], chunk.row_numbers)) - start + 2
|
|
115
|
-
) # plus one: skip header, plus one:
|
|
116
|
-
|
|
117
|
-
columns = list(pd.read_csv(self.input_csv, nrows=0).columns)
|
|
118
|
-
return pd.read_csv(
|
|
119
|
-
filepath_or_buffer=self.input_csv,
|
|
120
|
-
skiprows=start,
|
|
121
|
-
nrows=nrows,
|
|
122
|
-
names=columns,
|
|
123
|
-
)
|
|
124
109
|
|
|
125
110
|
def submit_tap_job(self, query: Query, chunk: Chunk) -> TAPJobMeta:
|
|
126
111
|
adql = query.adql
|
|
127
|
-
chunk_df =
|
|
112
|
+
chunk_df = chunk.data
|
|
128
113
|
|
|
129
114
|
assert all(chunk_df.index.isin(chunk.indices)), (
|
|
130
115
|
"Some inputs loaded from wrong chunk!"
|
timewise/io/stable_tap.py
CHANGED
|
@@ -3,6 +3,8 @@ import backoff
|
|
|
3
3
|
import pyvo as vo
|
|
4
4
|
from xml.etree import ElementTree
|
|
5
5
|
|
|
6
|
+
import requests
|
|
7
|
+
|
|
6
8
|
from timewise.util.backoff import backoff_hndlr
|
|
7
9
|
|
|
8
10
|
|
|
@@ -20,6 +22,12 @@ class StableAsyncTAPJob(vo.dal.AsyncTAPJob):
|
|
|
20
22
|
self.submit_response = None
|
|
21
23
|
|
|
22
24
|
@classmethod
|
|
25
|
+
@backoff.on_exception(
|
|
26
|
+
backoff.expo,
|
|
27
|
+
requests.exceptions.HTTPError,
|
|
28
|
+
max_tries=5,
|
|
29
|
+
on_backoff=backoff_hndlr,
|
|
30
|
+
)
|
|
23
31
|
def create(
|
|
24
32
|
cls,
|
|
25
33
|
baseurl,
|
|
@@ -89,6 +97,15 @@ class StableAsyncTAPJob(vo.dal.AsyncTAPJob):
|
|
|
89
97
|
def phase(self):
|
|
90
98
|
return super(StableAsyncTAPJob, self).phase
|
|
91
99
|
|
|
100
|
+
@backoff.on_exception(
|
|
101
|
+
backoff.expo,
|
|
102
|
+
vo.dal.DALServiceError,
|
|
103
|
+
max_tries=50,
|
|
104
|
+
on_backoff=backoff_hndlr,
|
|
105
|
+
)
|
|
106
|
+
def _update(self, *args, **kwargs):
|
|
107
|
+
return super(StableAsyncTAPJob, self)._update(*args, **kwargs)
|
|
108
|
+
|
|
92
109
|
|
|
93
110
|
class StableTAPService(vo.dal.TAPService):
|
|
94
111
|
"""
|
timewise/plot/lightcurve.py
CHANGED
|
@@ -49,6 +49,9 @@ def plot_lightcurve(
|
|
|
49
49
|
markeredgecolor="k",
|
|
50
50
|
ecolor="k",
|
|
51
51
|
capsize=2,
|
|
52
|
+
zorder=3,
|
|
53
|
+
barsabove=True,
|
|
54
|
+
elinewidth=.5
|
|
52
55
|
)
|
|
53
56
|
ax.scatter(
|
|
54
57
|
stacked_lightcurve[keys.MEAN + "_mjd"][ul_mask_stacked],
|
|
@@ -75,6 +78,7 @@ def plot_lightcurve(
|
|
|
75
78
|
c=colors[b],
|
|
76
79
|
markersize=4,
|
|
77
80
|
alpha=0.3,
|
|
81
|
+
zorder=2
|
|
78
82
|
)
|
|
79
83
|
|
|
80
84
|
single_ul_m = m & ul_mask_raw
|
|
@@ -90,6 +94,7 @@ def plot_lightcurve(
|
|
|
90
94
|
alpha=0.3,
|
|
91
95
|
s=1,
|
|
92
96
|
label=label,
|
|
97
|
+
zorder=1,
|
|
93
98
|
)
|
|
94
99
|
|
|
95
100
|
except KeyError as e:
|
timewise/process/keys.py
CHANGED
|
@@ -1,10 +1,14 @@
|
|
|
1
|
+
# keys for stacked lightcurve processing
|
|
1
2
|
MEAN = "mean"
|
|
2
3
|
MEDIAN = "median"
|
|
3
4
|
RMS = "rms"
|
|
4
5
|
UPPER_LIMIT = "ul"
|
|
5
6
|
NPOINTS = "npoints"
|
|
6
7
|
ZEROPOINT_EXT = "zeropoint"
|
|
7
|
-
FLUX_EXT = "flux"
|
|
8
8
|
FLUX_DENSITY_EXT = "fluxdensity"
|
|
9
|
+
KSTEST_NORM_EXT = "kstest_norm_pvalue"
|
|
10
|
+
|
|
11
|
+
# keys of the single exposure lightcurve
|
|
12
|
+
FLUX_EXT = "flux"
|
|
9
13
|
MAG_EXT = "mpro"
|
|
10
14
|
ERROR_EXT = "sig"
|
timewise/process/stacking.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
import logging
|
|
2
|
-
from typing import cast, Dict, Any
|
|
2
|
+
from typing import cast, Dict, Any, Literal
|
|
3
3
|
|
|
4
4
|
from scipy import stats
|
|
5
|
+
from scipy.special import gamma
|
|
5
6
|
import numpy as np
|
|
6
7
|
from numpy import typing as npt
|
|
7
8
|
import pandas as pd
|
|
@@ -20,6 +21,26 @@ MAGNITUDE_ZEROPOINTS: Dict[str, float] = {"w1": 20.752, "w2": 19.596}
|
|
|
20
21
|
FLUX_ZEROPOINTS = {"w1": 309.54, "w2": 171.787}
|
|
21
22
|
|
|
22
23
|
|
|
24
|
+
def std_debias_factor(n: npt.NDArray[np.int64]) -> npt.NDArray[np.float64]:
|
|
25
|
+
# Cureton The American Statistician 22, 22–22 (1968). https://www.jstor.org/stable/2681876
|
|
26
|
+
return gamma((n - 1) / 2) / np.sqrt(2 / (n - 1)) / gamma(n / 2)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def t_distribution_correction(n: npt.NDArray[np.int64]) -> npt.NDArray[np.float64]:
|
|
30
|
+
return stats.t.interval(0.68, df=n - 1)[1] # type: ignore
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def no_correction(n: npt.NDArray[np.int64]) -> npt.NDArray[np.float64]:
|
|
34
|
+
return np.ones_like(n, dtype=float)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
CORRECTION_FUNCTIONS = {
|
|
38
|
+
"debias": std_debias_factor,
|
|
39
|
+
"tdist": t_distribution_correction,
|
|
40
|
+
"none": no_correction,
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
|
|
23
44
|
def calculate_epochs(
|
|
24
45
|
f: pd.Series,
|
|
25
46
|
e: pd.Series,
|
|
@@ -29,6 +50,11 @@ def calculate_epochs(
|
|
|
29
50
|
outlier_threshold: float,
|
|
30
51
|
outlier_quantile: float,
|
|
31
52
|
outlier_mask: npt.NDArray[np.bool_] | None = None,
|
|
53
|
+
mean_name: Literal["mean", "median"] = "median",
|
|
54
|
+
std_name: Literal["std", "sdom", "sdom-1"] = "sdom-1",
|
|
55
|
+
correction_name: Literal["tdist", "debias", "none"] = "tdist",
|
|
56
|
+
calculate_pvalues: bool = False,
|
|
57
|
+
use_single_exposure_errors: bool = True,
|
|
32
58
|
) -> tuple[
|
|
33
59
|
npt.NDArray[np.float64],
|
|
34
60
|
npt.NDArray[np.float64],
|
|
@@ -36,6 +62,7 @@ def calculate_epochs(
|
|
|
36
62
|
npt.NDArray[np.bool_],
|
|
37
63
|
npt.NDArray[np.bool_],
|
|
38
64
|
npt.NDArray[np.int64],
|
|
65
|
+
npt.NDArray[np.float64],
|
|
39
66
|
]:
|
|
40
67
|
"""
|
|
41
68
|
Calculates the visits within a raw lightcurve.
|
|
@@ -50,8 +77,23 @@ def calculate_epochs(
|
|
|
50
77
|
:type counts: np.array
|
|
51
78
|
:param remove_outliers: whether to remove outliers
|
|
52
79
|
:type remove_outliers: bool
|
|
80
|
+
:param outlier_threshold: threshold to identify outliers
|
|
81
|
+
:type outlier_threshold: float
|
|
82
|
+
:param outlier_quantile: quantile that the outlier_threshold is multiplied with
|
|
83
|
+
:type outlier_quantile: float
|
|
53
84
|
:param outlier_mask: the outlier mask
|
|
54
|
-
:type outlier_mask: np.array
|
|
85
|
+
:type outlier_mask: np.array, optional
|
|
86
|
+
:param mean_name: name of the numpy function to calculate the mean, defaults to "mean"
|
|
87
|
+
:type mean_name: str, optional
|
|
88
|
+
:param std_name: name of the function to calculate the stacked error, defaults to "std"
|
|
89
|
+
:type std_name: str, optional
|
|
90
|
+
:param correction_name: name of the correction function to apply to the standard deviation, defaults to "debias"
|
|
91
|
+
:type correction_name: str, optional
|
|
92
|
+
:param calculate_pvalues: if true, calculate ks-test p-values to check consistency with normal distribution per visit
|
|
93
|
+
:type calculate_pvalues: bool
|
|
94
|
+
:param use_single_exposure_errors:
|
|
95
|
+
if true, use the maximum of the RMS and the combined single exposure measurements as the final uncertainty
|
|
96
|
+
:type use_single_exposure_errors: bool
|
|
55
97
|
:return: the epoch
|
|
56
98
|
:rtype: float
|
|
57
99
|
"""
|
|
@@ -64,6 +106,7 @@ def calculate_epochs(
|
|
|
64
106
|
np.array([]),
|
|
65
107
|
np.array([]),
|
|
66
108
|
np.array([]),
|
|
109
|
+
np.array([]),
|
|
67
110
|
)
|
|
68
111
|
|
|
69
112
|
u_lims = pd.isna(e)
|
|
@@ -97,6 +140,14 @@ def calculate_epochs(
|
|
|
97
140
|
n_loops = 0
|
|
98
141
|
|
|
99
142
|
# recalculate uncertainty and median as long as no outliers left
|
|
143
|
+
mean_function = np.mean if mean_name == "mean" else np.median
|
|
144
|
+
|
|
145
|
+
# select function to use to correct standard deviation with
|
|
146
|
+
bias_correction_function = CORRECTION_FUNCTIONS[correction_name]
|
|
147
|
+
|
|
148
|
+
one_points_mask = None
|
|
149
|
+
visits_at_least_two_point = []
|
|
150
|
+
|
|
100
151
|
while n_remaining_outlier > 0:
|
|
101
152
|
# make a mask of values to use
|
|
102
153
|
use_mask = ~outlier_mask & use_mask_ul & ~nan_mask # type: ignore[operator]
|
|
@@ -109,7 +160,7 @@ def calculate_epochs(
|
|
|
109
160
|
visits_zero_points = np.unique(visit_mask[zero_points_mask[visit_mask]])
|
|
110
161
|
median[visits_at_least_one_point] = np.array(
|
|
111
162
|
[
|
|
112
|
-
|
|
163
|
+
mean_function(f[(visit_mask == i) & use_mask])
|
|
113
164
|
for i in visits_at_least_one_point
|
|
114
165
|
]
|
|
115
166
|
)
|
|
@@ -134,10 +185,20 @@ def calculate_epochs(
|
|
|
134
185
|
one_points_mask = n_points <= 1
|
|
135
186
|
# calculate standard deviation
|
|
136
187
|
std = np.zeros_like(counts, dtype=float)
|
|
188
|
+
extra_factor = (
|
|
189
|
+
1
|
|
190
|
+
if std_name == "std"
|
|
191
|
+
else 1 / n_points[~one_points_mask]
|
|
192
|
+
if std_name == "sdom"
|
|
193
|
+
else 1 / (n_points[~one_points_mask] - 1)
|
|
194
|
+
)
|
|
137
195
|
std[~one_points_mask] = (
|
|
138
|
-
np.sqrt(
|
|
139
|
-
|
|
140
|
-
|
|
196
|
+
np.sqrt(
|
|
197
|
+
mean_deviation[~one_points_mask]
|
|
198
|
+
/ (n_points[~one_points_mask] - 1)
|
|
199
|
+
* extra_factor
|
|
200
|
+
)
|
|
201
|
+
* bias_correction_function(n_points[~one_points_mask])
|
|
141
202
|
# for visits with small number of detections we have to correct according to the t distribution
|
|
142
203
|
)
|
|
143
204
|
std[one_points_mask] = -np.inf
|
|
@@ -155,8 +216,14 @@ def calculate_epochs(
|
|
|
155
216
|
single_exp_measurement_errors[n_points > 0] / n_points[n_points > 0]
|
|
156
217
|
)
|
|
157
218
|
e_meas[zero_points_mask] = np.nan
|
|
158
|
-
|
|
159
|
-
|
|
219
|
+
|
|
220
|
+
if use_single_exposure_errors:
|
|
221
|
+
# take the maximum value of the measured single exposure errors and the standard deviation
|
|
222
|
+
u = np.maximum(std, e_meas)
|
|
223
|
+
else:
|
|
224
|
+
# take the rms as the measurement uncertainty except if there is only one single exposure
|
|
225
|
+
u = std
|
|
226
|
+
u[one_points_mask] = e_meas[one_points_mask]
|
|
160
227
|
|
|
161
228
|
# Estimate the spread of the flux.
|
|
162
229
|
# To be robust against outliers, do that with quantiles instead of std
|
|
@@ -187,7 +254,29 @@ def calculate_epochs(
|
|
|
187
254
|
if n_loops > 20:
|
|
188
255
|
raise Exception(f"{n_loops}!")
|
|
189
256
|
|
|
190
|
-
|
|
257
|
+
# -------------------- calculate std for crosscheck -------------------- #
|
|
258
|
+
if calculate_pvalues:
|
|
259
|
+
npstd = np.zeros_like(counts, dtype=float)
|
|
260
|
+
npstd[~one_points_mask] = np.array( # type: ignore[operator]
|
|
261
|
+
[np.std(f[(visit_mask == i) & use_mask]) for i in visits_at_least_two_point]
|
|
262
|
+
)
|
|
263
|
+
npstd[one_points_mask] = np.nan
|
|
264
|
+
|
|
265
|
+
# ---------------- calculate compatibility with gaussian ---------------- #
|
|
266
|
+
pvalues = np.ones_like(counts, dtype=float)
|
|
267
|
+
pvalues[~one_points_mask] = np.array( # type: ignore[operator]
|
|
268
|
+
[
|
|
269
|
+
stats.kstest(
|
|
270
|
+
f[(visit_mask == i) & use_mask],
|
|
271
|
+
stats.norm(median[i], npstd[i]).cdf, # type: ignore
|
|
272
|
+
).pvalue
|
|
273
|
+
for i in visits_at_least_two_point
|
|
274
|
+
]
|
|
275
|
+
)
|
|
276
|
+
else:
|
|
277
|
+
pvalues = np.full_like(counts, -999)
|
|
278
|
+
|
|
279
|
+
return median, u, bin_ulim_bool, outlier_mask, use_mask, n_points, pvalues
|
|
191
280
|
|
|
192
281
|
|
|
193
282
|
def stack_visits(
|
|
@@ -195,6 +284,12 @@ def stack_visits(
|
|
|
195
284
|
outlier_threshold: float,
|
|
196
285
|
outlier_quantile: float,
|
|
197
286
|
clean_outliers: bool = True,
|
|
287
|
+
mean_name: Literal["mean", "median"] = "median",
|
|
288
|
+
std_name: Literal["std", "sdom", "sdom-1"] = "sdom-1",
|
|
289
|
+
correction_name: Literal["tdist", "debias", "none"] = "tdist",
|
|
290
|
+
calculate_pvalues: bool = False,
|
|
291
|
+
use_single_exposure_errors: bool = True,
|
|
292
|
+
median_zeropoint_per_visit: bool = True,
|
|
198
293
|
):
|
|
199
294
|
"""
|
|
200
295
|
Combine the data by visits of the satellite of one region in the sky.
|
|
@@ -207,6 +302,27 @@ def stack_visits(
|
|
|
207
302
|
|
|
208
303
|
:param lightcurve: the raw lightcurve
|
|
209
304
|
:type lightcurve: pandas.DataFrame
|
|
305
|
+
:param outlier_threshold: threshold to identify outliers
|
|
306
|
+
:type outlier_threshold: float
|
|
307
|
+
:param outlier_quantile: quantile that the outlier_threshold is multiplied with
|
|
308
|
+
:type outlier_quantile: float
|
|
309
|
+
:param clean_outliers:
|
|
310
|
+
if True, remove outliers that are outlier_threshold x outlier_quantile far away from the mean per visit,
|
|
311
|
+
default is True
|
|
312
|
+
:type clean_outliers: bool
|
|
313
|
+
:param mean_name: name of the numpy function to calculate the mean, defaults to "mean"
|
|
314
|
+
:type mean_name: str, optional
|
|
315
|
+
:param std_name: name of the function to calculate the stacked error, defaults to "std"
|
|
316
|
+
:type std_name: str, optional
|
|
317
|
+
:param correction_name: name of the correction function to apply to the standard deviation, defaults to "debias"
|
|
318
|
+
:type correction_name: str, optional
|
|
319
|
+
:param calculate_pvalues: if true, calculate ks-test p-values to check consistency with normal distribution per visit
|
|
320
|
+
:type calculate_pvalues: bool
|
|
321
|
+
:param use_single_exposure_errors:
|
|
322
|
+
if true, use the maximum of the RMS and the combined single exposure measurements as the final uncertainty
|
|
323
|
+
:type use_single_exposure_errors: bool
|
|
324
|
+
:param median_zeropoint_per_visit: if true, use the median zeropoint per visit instead of the individual exposure ones
|
|
325
|
+
:type median_zeropoint_per_visit: bool
|
|
210
326
|
:return: the stacked lightcurve
|
|
211
327
|
:rtype: pandas.DataFrame
|
|
212
328
|
"""
|
|
@@ -236,15 +352,22 @@ def stack_visits(
|
|
|
236
352
|
remove_outliers = lum_ext == keys.FLUX_EXT and clean_outliers
|
|
237
353
|
outlier_mask = outlier_masks.get(keys.FLUX_EXT, None)
|
|
238
354
|
|
|
239
|
-
mean, u, bin_ulim_bool, outlier_mask, use_mask, n_points =
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
355
|
+
mean, u, bin_ulim_bool, outlier_mask, use_mask, n_points, p_values = (
|
|
356
|
+
calculate_epochs(
|
|
357
|
+
f,
|
|
358
|
+
e,
|
|
359
|
+
visit_map,
|
|
360
|
+
counts,
|
|
361
|
+
remove_outliers=remove_outliers,
|
|
362
|
+
outlier_mask=outlier_mask,
|
|
363
|
+
outlier_quantile=outlier_quantile,
|
|
364
|
+
outlier_threshold=outlier_threshold,
|
|
365
|
+
mean_name=mean_name,
|
|
366
|
+
std_name=std_name,
|
|
367
|
+
correction_name=correction_name,
|
|
368
|
+
calculate_pvalues=calculate_pvalues,
|
|
369
|
+
use_single_exposure_errors=use_single_exposure_errors,
|
|
370
|
+
)
|
|
248
371
|
)
|
|
249
372
|
n_outliers = np.sum(outlier_mask)
|
|
250
373
|
|
|
@@ -257,6 +380,7 @@ def stack_visits(
|
|
|
257
380
|
stacked_data[f"{b}{lum_ext}{keys.RMS}"] = u
|
|
258
381
|
stacked_data[f"{b}{lum_ext}{keys.UPPER_LIMIT}"] = bin_ulim_bool
|
|
259
382
|
stacked_data[f"{b}{lum_ext}{keys.NPOINTS}"] = n_points
|
|
383
|
+
stacked_data[f"{b}{lum_ext}{keys.KSTEST_NORM_EXT}"] = p_values
|
|
260
384
|
|
|
261
385
|
outlier_masks[lum_ext] = outlier_mask
|
|
262
386
|
use_masks[lum_ext] = use_mask
|
|
@@ -289,20 +413,27 @@ def stack_visits(
|
|
|
289
413
|
# if the visit only has upper limits then use the fall-back zeropoint
|
|
290
414
|
zps_median[bin_ulim_bools[keys.FLUX_EXT]] = MAGNITUDE_ZEROPOINTS[b]
|
|
291
415
|
|
|
416
|
+
if median_zeropoint_per_visit:
|
|
417
|
+
use_zp = zps_median[visit_map]
|
|
418
|
+
else:
|
|
419
|
+
zp_nan_mask = np.isnan(zps)
|
|
420
|
+
zps[zp_nan_mask] = zps_median[visit_map[zp_nan_mask]]
|
|
421
|
+
use_zp = zps
|
|
422
|
+
|
|
292
423
|
# --------------- calculate flux density from instrument flux ---------------- #
|
|
293
424
|
# get the instrument flux [digital numbers], i.e. source count
|
|
294
425
|
inst_fluxes_e = lightcurve[f"{b}{keys.ERROR_EXT}{keys.FLUX_EXT}"]
|
|
295
426
|
|
|
296
427
|
# calculate the proportionality constant between flux density and source count
|
|
297
428
|
mag_zp = FLUX_ZEROPOINTS[b] * 1e3 # in mJy
|
|
298
|
-
flux_dens_const = mag_zp * 10 ** (-
|
|
429
|
+
flux_dens_const = mag_zp * 10 ** (-use_zp / 2.5)
|
|
299
430
|
|
|
300
431
|
# calculate flux densities from instrument counts
|
|
301
|
-
flux_densities = inst_fluxes * flux_dens_const
|
|
302
|
-
flux_densities_e = inst_fluxes_e * flux_dens_const
|
|
432
|
+
flux_densities = inst_fluxes * flux_dens_const
|
|
433
|
+
flux_densities_e = inst_fluxes_e * flux_dens_const
|
|
303
434
|
|
|
304
435
|
# bin flux densities
|
|
305
|
-
mean_fd, u_fd, ul_fd, outlier_mask_fd, use_mask_fd, n_points_fd = (
|
|
436
|
+
mean_fd, u_fd, ul_fd, outlier_mask_fd, use_mask_fd, n_points_fd, p_values_fd = (
|
|
306
437
|
calculate_epochs(
|
|
307
438
|
flux_densities,
|
|
308
439
|
flux_densities_e,
|
|
@@ -312,11 +443,17 @@ def stack_visits(
|
|
|
312
443
|
outlier_mask=outlier_masks[keys.FLUX_EXT],
|
|
313
444
|
outlier_threshold=outlier_threshold,
|
|
314
445
|
outlier_quantile=outlier_quantile,
|
|
446
|
+
mean_name=mean_name,
|
|
447
|
+
std_name=std_name,
|
|
448
|
+
correction_name=correction_name,
|
|
449
|
+
calculate_pvalues=calculate_pvalues,
|
|
450
|
+
use_single_exposure_errors=use_single_exposure_errors,
|
|
315
451
|
)
|
|
316
452
|
)
|
|
317
453
|
stacked_data[f"{b}{keys.MEAN}{keys.FLUX_DENSITY_EXT}"] = mean_fd
|
|
318
454
|
stacked_data[f"{b}{keys.FLUX_DENSITY_EXT}{keys.RMS}"] = u_fd
|
|
319
455
|
stacked_data[f"{b}{keys.FLUX_DENSITY_EXT}{keys.UPPER_LIMIT}"] = ul_fd
|
|
320
456
|
stacked_data[f"{b}{keys.FLUX_DENSITY_EXT}{keys.NPOINTS}"] = n_points_fd
|
|
457
|
+
stacked_data[f"{b}{keys.FLUX_DENSITY_EXT}{keys.KSTEST_NORM_EXT}"] = p_values_fd
|
|
321
458
|
|
|
322
459
|
return pd.DataFrame(stacked_data)
|
timewise/util/path.py
ADDED
|
@@ -1,23 +1,26 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: timewise
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.0a8
|
|
4
4
|
Summary: Download WISE infrared data for many objects and process them with AMPEL
|
|
5
5
|
License: MIT
|
|
6
6
|
License-File: LICENSE
|
|
7
7
|
Author: Jannis Necker
|
|
8
8
|
Author-email: jannis.necker@gmail.com
|
|
9
|
-
Requires-Python: >=3.11,<3.
|
|
9
|
+
Requires-Python: >=3.11,<3.14
|
|
10
10
|
Classifier: License :: OSI Approved :: MIT License
|
|
11
11
|
Classifier: Programming Language :: Python :: 3
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
13
15
|
Provides-Extra: ampel
|
|
14
16
|
Provides-Extra: dev
|
|
15
17
|
Provides-Extra: docs
|
|
16
|
-
Requires-Dist: ampel-alerts (==0.10.
|
|
17
|
-
Requires-Dist: ampel-core (
|
|
18
|
-
Requires-Dist: ampel-
|
|
18
|
+
Requires-Dist: ampel-alerts (==0.10.3a6) ; extra == "ampel"
|
|
19
|
+
Requires-Dist: ampel-core (==0.10.6a17) ; extra == "ampel"
|
|
20
|
+
Requires-Dist: ampel-interface (==0.10.5a8) ; extra == "ampel"
|
|
21
|
+
Requires-Dist: ampel-photometry (==0.10.2a1) ; extra == "ampel"
|
|
19
22
|
Requires-Dist: ampel-plot (>=0.9.1,<0.10.0) ; extra == "ampel"
|
|
20
|
-
Requires-Dist: astropy (>=5.1,<
|
|
23
|
+
Requires-Dist: astropy (>=5.1,<8.0.0)
|
|
21
24
|
Requires-Dist: autodoc_pydantic[erdantic] (>=2.2.0,<3.0.0) ; extra == "docs"
|
|
22
25
|
Requires-Dist: backoff (>=2.1.2,<3.0.0)
|
|
23
26
|
Requires-Dist: coveralls (>=3.3.1,<4.0.0) ; extra == "dev"
|
|
@@ -60,7 +63,7 @@ Description-Content-Type: text/markdown
|
|
|
60
63
|
This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit
|
|
61
64
|
|
|
62
65
|
## Prerequisites
|
|
63
|
-
Python version 3.11.
|
|
66
|
+
Python version 3.11, 3.12 or 3.13.
|
|
64
67
|
|
|
65
68
|
If you want to not only download individual exposure photometry but also stack detections per visit (see below),
|
|
66
69
|
you must have access to a running [MongoDB](https://www.mongodb.com/)*.
|
|
@@ -74,12 +77,12 @@ to get the MongoDB community edition. </sub>
|
|
|
74
77
|
### If you use timewise only for downloading
|
|
75
78
|
The package can be installed via `pip` (but make sure to install the v1 pre-release):
|
|
76
79
|
```bash
|
|
77
|
-
pip install --pre timewise==1.0.
|
|
80
|
+
pip install --pre timewise==1.0.0a8
|
|
78
81
|
```
|
|
79
82
|
### If you use timewise also for stacking individual exposures
|
|
80
83
|
You must install with the `ampel` extra:
|
|
81
84
|
```bash
|
|
82
|
-
pip install --pre 'timewise[ampel]==1.0.
|
|
85
|
+
pip install --pre 'timewise[ampel]==1.0.0a8'
|
|
83
86
|
```
|
|
84
87
|
To tell AMPEL which modules, aka units, to use, build the corresponding configuration file:
|
|
85
88
|
```bash
|
|
@@ -1,36 +1,35 @@
|
|
|
1
|
-
ampel/timewise/alert/TimewiseAlertSupplier.py,sha256=
|
|
2
|
-
ampel/timewise/alert/load/TimewiseFileLoader.py,sha256=
|
|
3
|
-
ampel/timewise/ingest/TiCompilerOptions.py,sha256=
|
|
1
|
+
ampel/timewise/alert/TimewiseAlertSupplier.py,sha256=HLeQBCLrmVKxqvPc4E2TVj241KWXD9hHK7MJA3EUWJw,4178
|
|
2
|
+
ampel/timewise/alert/load/TimewiseFileLoader.py,sha256=jmLppFEJwjQCSOMUtr-cSP9UbtT0VzqyAf4VDJjUf3w,4409
|
|
3
|
+
ampel/timewise/ingest/TiCompilerOptions.py,sha256=j2eUxSkJEKxAjJCd9plxRRCg6Mv8dNHw72UvKyAxUsA,809
|
|
4
4
|
ampel/timewise/ingest/TiDataPointShaper.py,sha256=KbAg-J0DkMaJA9K67xMY5Pe0SIP0LNKsBNAMnTYV9zA,3180
|
|
5
|
-
ampel/timewise/ingest/TiMongoMuxer.py,sha256=
|
|
5
|
+
ampel/timewise/ingest/TiMongoMuxer.py,sha256=Rg6yGI7AoPZhyXUsYmGD8ZiivyaLT-EJjG-U4J4yh-w,6467
|
|
6
6
|
ampel/timewise/ingest/tags.py,sha256=ggn0Y6QUnjIwMzOlgKmPX1JPhmnvc21nUrORFhaR0uI,564
|
|
7
|
-
ampel/timewise/t1/T1HDBSCAN.py,sha256=
|
|
8
|
-
ampel/timewise/t1/TimewiseFilter.py,sha256=
|
|
9
|
-
ampel/timewise/t2/T2StackVisits.py,sha256=
|
|
10
|
-
ampel/timewise/util/AuxDiagnosticPlotter.py,sha256=
|
|
7
|
+
ampel/timewise/t1/T1HDBSCAN.py,sha256=zVhcqQVAjlHgDBd9CJA6rGH3RCrCV84rsIIOWVxODKI,9528
|
|
8
|
+
ampel/timewise/t1/TimewiseFilter.py,sha256=uDjNGsqPOISoFjSH8tX87I3UTW9qGX8rEmkyze8u2ug,1566
|
|
9
|
+
ampel/timewise/t2/T2StackVisits.py,sha256=JBxFZLT-a-8jjenRw45ZVA_FvtcyOBmvMVcpYHyw_Yk,2574
|
|
10
|
+
ampel/timewise/util/AuxDiagnosticPlotter.py,sha256=evvuH5vuEOFkfbkoSQw7ztiEqHos19PP91VQ_SCg_AU,1692
|
|
11
11
|
ampel/timewise/util/pdutil.py,sha256=CJqeYqL2I6A5WnF5S342bwM4omJY-1XwT1XWY2uk3bc,1524
|
|
12
|
-
|
|
13
|
-
timewise/__init__.py,sha256=OhKzin_f9k_6U1rnhXEMzgMu70gbjQImfaWjmZiKhjA,24
|
|
12
|
+
timewise/__init__.py,sha256=uSPlLo1iPfJHnwgxEbL4lAcPqtOK8qgwkOr4GpzJSyo,24
|
|
14
13
|
timewise/backend/__init__.py,sha256=w79nWfCw8n9g98CkHWJELmb4j9xblWC4DGZOV3_XhH4,134
|
|
15
14
|
timewise/backend/base.py,sha256=dHxRzu2q3uQ0wdGmDxnn-p68Tp19qChue7HMEu56wNA,1080
|
|
16
|
-
timewise/backend/filesystem.py,sha256=
|
|
17
|
-
timewise/chunking.py,sha256=
|
|
15
|
+
timewise/backend/filesystem.py,sha256=qFBmiOtfv8b3uhhiZSbH8bRIUrpS4f0P8Zl03VxpgAI,2598
|
|
16
|
+
timewise/chunking.py,sha256=eRd3xof-5uHaxo8Pmwo4kr_cdZSfFpgTiPu-xuL-SGA,2404
|
|
18
17
|
timewise/cli.py,sha256=W8SXlUQo7a5aS4_kW40KTs2UkTLQS12JuhgwkRb23DY,4172
|
|
19
18
|
timewise/config.py,sha256=ZTSokRZMZDqBqVFV9DxvO-47yE9E9xWF48Rcjb0QG10,1044
|
|
20
19
|
timewise/io/__init__.py,sha256=S7vb0glKJnw6ztOlrD-0Wma2bQZ2RwpmXDLFJLKBMVo,35
|
|
21
|
-
timewise/io/config.py,sha256=
|
|
22
|
-
timewise/io/download.py,sha256=
|
|
23
|
-
timewise/io/stable_tap.py,sha256=
|
|
20
|
+
timewise/io/config.py,sha256=u_nlfstvuz2_bfepRZo3y43XAqQ0ilReYg9e081CTQg,2410
|
|
21
|
+
timewise/io/download.py,sha256=4uKWIPD61wLk6rPLiX72yjByDFgrRR7ww4LzsDfVugE,10019
|
|
22
|
+
timewise/io/stable_tap.py,sha256=obwzic_xvB_hh-x6S5NT4bW847emabKxp8NH1WgAh-g,3899
|
|
24
23
|
timewise/plot/__init__.py,sha256=cc00UenWC_8zAkBH-Ylhs3yCF49tAqZ2Al9MfOoXYDI,120
|
|
25
24
|
timewise/plot/diagnostic.py,sha256=GRp-OUwz2yzzDu9qdViFg_e3Mxl5t1IvUJXZHuMKB2U,8276
|
|
26
|
-
timewise/plot/lightcurve.py,sha256=
|
|
25
|
+
timewise/plot/lightcurve.py,sha256=K0UvjmvbCSsRKP3Mu9X5L5AVUwqURR76ESJCIMEgE6Q,3924
|
|
27
26
|
timewise/plot/panstarrs.py,sha256=X2ZULm7QT91cp4qociG0fVeI0saGLJwyKzL0141Vqis,8014
|
|
28
27
|
timewise/plot/sdss.py,sha256=cc1zU-4XFkqc8xH5yqCyMsDJf9w_54B_6NeRMjr9Pt8,2622
|
|
29
28
|
timewise/process/__init__.py,sha256=Yk-j1B1MnBuuaM6eFi43TxdWmFKbwFHvDsuQZt4yB_c,70
|
|
30
29
|
timewise/process/config.py,sha256=d4hCvNb8JyJf2-0BTQgJaeOmHGZK_aHEDMb_xyqJgbU,1060
|
|
31
30
|
timewise/process/interface.py,sha256=JOIh1wYuE1s9XdSfBRS7qUXexlr-p5UxdwaSjKlRZxE,5207
|
|
32
|
-
timewise/process/keys.py,sha256=
|
|
33
|
-
timewise/process/stacking.py,sha256=
|
|
31
|
+
timewise/process/keys.py,sha256=MGnTHM4UGUSyX7BaCzoT_dg81NEAiLCxMLBRz3Kq8yc,319
|
|
32
|
+
timewise/process/stacking.py,sha256=vVyrhrBRpqkptNryxNxWwEvXeQLH2GYNUtUxWggYfTk,18997
|
|
34
33
|
timewise/process/template.yml,sha256=U_xKmygDl3E-viTgZEI8pQIJwWduB52SdI2X9vy61Yo,1037
|
|
35
34
|
timewise/query/__init__.py,sha256=1OA_FlLI7O0aIDOXHpBKMOyMvYLCd0kQVkzoqbxouyE,242
|
|
36
35
|
timewise/query/base.py,sha256=GjAoQZpaxVHcpCgPco1y5bpR0Jtd_sp_bYMI26WpAK0,1262
|
|
@@ -43,9 +42,10 @@ timewise/types.py,sha256=MB-aqpyos1aCS58QWBas34WcwFHiHOkrDj_d9_ZxuVc,667
|
|
|
43
42
|
timewise/util/backoff.py,sha256=bU5yhsBO4U53XEPJ_32tgql9rq_Rzrv7w32nVQYHr64,272
|
|
44
43
|
timewise/util/csv_utils.py,sha256=5i3Jd4c58-doPs1N_hyYZ8Uc2nvuk9nGwgPNZoNrlu0,298
|
|
45
44
|
timewise/util/error_threading.py,sha256=uyV1Ri-wf87lpa17Xlp520B1V8DWHh3v9Mk97QrPmv0,2264
|
|
45
|
+
timewise/util/path.py,sha256=-CcpYdjidg5FcZnRyQqrK_EKjc-7USYbJ8gtKatGl6k,254
|
|
46
46
|
timewise/util/visits.py,sha256=4ZXwH7OXmp98tUPZplvuOtykB048kZoYXyuRTwQxT7w,998
|
|
47
|
-
timewise-1.0.
|
|
48
|
-
timewise-1.0.
|
|
49
|
-
timewise-1.0.
|
|
50
|
-
timewise-1.0.
|
|
51
|
-
timewise-1.0.
|
|
47
|
+
timewise-1.0.0a8.dist-info/METADATA,sha256=QnfpFxRvJDiL_a5Z0ElFj6_F1Mylk4bscAqE0SCNjxE,10308
|
|
48
|
+
timewise-1.0.0a8.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
|
|
49
|
+
timewise-1.0.0a8.dist-info/entry_points.txt,sha256=mYh1HsUFbV7KT8kxiGqVtR3Pk0oEk6Bd-2c5FsYVhG4,45
|
|
50
|
+
timewise-1.0.0a8.dist-info/licenses/LICENSE,sha256=sVoNJWiTlH-NarJx0wdsob468Pg3JE6vIIgll4lCa3E,1070
|
|
51
|
+
timewise-1.0.0a8.dist-info/RECORD,,
|
conf/timewise/ampel.yml
DELETED
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
unit:
|
|
2
|
-
- ampel.timewise.alert.load.TimewiseFileLoader
|
|
3
|
-
- ampel.timewise.alert.TimewiseAlertSupplier
|
|
4
|
-
- ampel.timewise.ingest.TiDataPointShaper
|
|
5
|
-
- ampel.timewise.ingest.TiMongoMuxer
|
|
6
|
-
- ampel.timewise.ingest.TiCompilerOptions
|
|
7
|
-
- ampel.timewise.t2.T2StackVisits
|
|
8
|
-
- ampel.timewise.t1.T1HDBSCAN
|
|
9
|
-
- ampel.timewise.util.AuxDiagnosticPlotter
|
|
10
|
-
- ampel.timewise.t1.TimewiseFilter
|
|
File without changes
|
|
File without changes
|
|
File without changes
|