timewise 1.0.0a1__tar.gz → 1.0.0a5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {timewise-1.0.0a1 → timewise-1.0.0a5}/PKG-INFO +49 -29
- {timewise-1.0.0a1 → timewise-1.0.0a5}/README.md +43 -23
- timewise-1.0.0a5/ampel/timewise/alert/TimewiseAlertSupplier.py +113 -0
- timewise-1.0.0a5/ampel/timewise/alert/load/TimewiseFileLoader.py +118 -0
- timewise-1.0.0a5/ampel/timewise/ingest/TiCompilerOptions.py +20 -0
- timewise-1.0.0a5/ampel/timewise/ingest/TiDataPointShaper.py +91 -0
- timewise-1.0.0a5/ampel/timewise/ingest/TiMongoMuxer.py +176 -0
- timewise-1.0.0a5/ampel/timewise/ingest/tags.py +15 -0
- timewise-1.0.0a5/ampel/timewise/t1/T1HDBSCAN.py +222 -0
- timewise-1.0.0a5/ampel/timewise/t1/TimewiseFilter.py +47 -0
- timewise-1.0.0a5/ampel/timewise/t2/T2StackVisits.py +56 -0
- timewise-1.0.0a5/ampel/timewise/util/AuxDiagnosticPlotter.py +47 -0
- timewise-1.0.0a5/ampel/timewise/util/pdutil.py +48 -0
- timewise-1.0.0a5/conf/timewise/ampel.yml +10 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/pyproject.toml +16 -6
- timewise-1.0.0a5/timewise/__init__.py +1 -0
- timewise-1.0.0a5/timewise/cli.py +130 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/io/download.py +11 -13
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/plot/diagnostic.py +1 -1
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/process/config.py +9 -5
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/process/interface.py +15 -3
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/process/stacking.py +21 -9
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/query/base.py +9 -2
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/util/visits.py +9 -9
- timewise-1.0.0a1/timewise/__init__.py +0 -1
- timewise-1.0.0a1/timewise/cli.py +0 -124
- {timewise-1.0.0a1 → timewise-1.0.0a5}/LICENSE +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/backend/__init__.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/backend/base.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/backend/filesystem.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/chunking.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/config.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/io/__init__.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/io/config.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/io/stable_tap.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/plot/__init__.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/plot/lightcurve.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/plot/panstarrs.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/plot/sdss.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/process/__init__.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/process/keys.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/process/template.yml +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/query/__init__.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/query/positional.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/tables/__init__.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/tables/allwise_p3as_mep.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/tables/base.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/tables/neowiser_p1bs_psd.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/types.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/util/backoff.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/util/csv_utils.py +0 -0
- {timewise-1.0.0a1 → timewise-1.0.0a5}/timewise/util/error_threading.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: timewise
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.0a5
|
|
4
4
|
Summary: Download WISE infrared data for many objects and process them with AMPEL
|
|
5
5
|
License: MIT
|
|
6
6
|
License-File: LICENSE
|
|
@@ -10,12 +10,13 @@ Requires-Python: >=3.11,<3.12
|
|
|
10
10
|
Classifier: License :: OSI Approved :: MIT License
|
|
11
11
|
Classifier: Programming Language :: Python :: 3
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Provides-Extra: ampel
|
|
13
14
|
Provides-Extra: dev
|
|
14
15
|
Provides-Extra: docs
|
|
15
|
-
Requires-Dist: ampel-alerts (==0.10.3a5)
|
|
16
|
-
Requires-Dist: ampel-core (>=0.10.4.post0,<0.11.0)
|
|
17
|
-
Requires-Dist: ampel-photometry (>=0.10.1,<0.11.0)
|
|
18
|
-
Requires-Dist: ampel-plot (>=0.9.1,<0.10.0)
|
|
16
|
+
Requires-Dist: ampel-alerts (==0.10.3a5) ; extra == "ampel"
|
|
17
|
+
Requires-Dist: ampel-core (>=0.10.4.post0,<0.11.0) ; extra == "ampel"
|
|
18
|
+
Requires-Dist: ampel-photometry (>=0.10.1,<0.11.0) ; extra == "ampel"
|
|
19
|
+
Requires-Dist: ampel-plot (>=0.9.1,<0.10.0) ; extra == "ampel"
|
|
19
20
|
Requires-Dist: astropy (>=5.1,<6.0.0)
|
|
20
21
|
Requires-Dist: autodoc_pydantic[erdantic] (>=2.2.0,<3.0.0) ; extra == "docs"
|
|
21
22
|
Requires-Dist: backoff (>=2.1.2,<3.0.0)
|
|
@@ -36,7 +37,6 @@ Requires-Dist: ruff (>=0.13.0,<0.14.0) ; extra == "dev"
|
|
|
36
37
|
Requires-Dist: scikit-image (>=0.19.3,<0.22.0)
|
|
37
38
|
Requires-Dist: scikit-learn (>=1.3.0,<2.0.0)
|
|
38
39
|
Requires-Dist: scipy-stubs (>=1.16.2.0,<2.0.0.0) ; extra == "dev"
|
|
39
|
-
Requires-Dist: seaborn (>=0.11.2,<0.14.0)
|
|
40
40
|
Requires-Dist: sphinx-rtd-theme (>=1.3.0,<2.0.0) ; extra == "docs"
|
|
41
41
|
Requires-Dist: tqdm (>=4.64.0,<5.0.0)
|
|
42
42
|
Requires-Dist: typer (>=0.19.2,<0.20.0)
|
|
@@ -60,23 +60,33 @@ Description-Content-Type: text/markdown
|
|
|
60
60
|
This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit
|
|
61
61
|
|
|
62
62
|
## Prerequisites
|
|
63
|
+
Python version 3.11.
|
|
63
64
|
|
|
64
|
-
|
|
65
|
+
If you want to not only download individual exposure photometry but also stack detections per visit (see below),
|
|
66
|
+
you must have access to a running [MongoDB](https://www.mongodb.com/)*.
|
|
67
|
+
|
|
68
|
+
<sub>* On MacOS have alook at the custom `brew` tap
|
|
69
|
+
[here](https://stackoverflow.com/questions/57856809/installing-mongodb-with-homebrew)
|
|
70
|
+
to get the MongoDB community edition. </sub>
|
|
65
71
|
|
|
66
72
|
## Installation
|
|
67
|
-
|
|
73
|
+
|
|
74
|
+
### If you use timewise only for downloading
|
|
75
|
+
The package can be installed via `pip` (but make sure to install the v1 pre-release):
|
|
68
76
|
```bash
|
|
69
|
-
pip install timewise
|
|
77
|
+
pip install --pre timewise==1.0.0a5
|
|
78
|
+
```
|
|
79
|
+
### If you use timewise also for stacking individual exposures
|
|
80
|
+
You must install with the `ampel` extra:
|
|
81
|
+
```bash
|
|
82
|
+
pip install --pre timewise[ampel]==1.0.0a5
|
|
70
83
|
```
|
|
71
|
-
|
|
72
84
|
To tell AMPEL which modules, aka units, to use, build the corresponding configuration file:
|
|
73
85
|
```bash
|
|
74
86
|
ampel config build -distributions ampel timewise -stop-on-errors 0 -out <path-to-ampel-config-file>
|
|
75
87
|
```
|
|
76
88
|
|
|
77
|
-
##
|
|
78
|
-
|
|
79
|
-
### Command line interface
|
|
89
|
+
## Command line interface
|
|
80
90
|
|
|
81
91
|
```
|
|
82
92
|
Usage: timewise [OPTIONS] COMMAND [ARGS]...
|
|
@@ -161,40 +171,50 @@ ampel:
|
|
|
161
171
|
This configuration file will be the input to all subcommands. Downloading and stacking can be run together or separate.
|
|
162
172
|
|
|
163
173
|
|
|
164
|
-
|
|
165
|
-
Run download, stacking, and export:
|
|
174
|
+
### To only download the data:
|
|
166
175
|
```bash
|
|
167
|
-
timewise
|
|
176
|
+
timewise download <path-to-config-file>
|
|
168
177
|
```
|
|
178
|
+
The photometry can be found in FITS files in the working directory specified in the configuration file\
|
|
179
|
+
along with metadata JSON files. These tell `timewise` which quries have already completed (per chunk) so the
|
|
180
|
+
download process can be interrupted and re-started at a later time.
|
|
181
|
+
|
|
182
|
+
### Stack individual exposure by visits
|
|
183
|
+
As mentioned above, this needs installation with the ampel extra.
|
|
184
|
+
|
|
169
185
|
|
|
170
|
-
|
|
171
|
-
To only download the data:
|
|
186
|
+
To **execute the stacking** after the download:
|
|
172
187
|
```bash
|
|
173
|
-
timewise
|
|
188
|
+
timewise process <path-to-config-file> <path-to-ampel-config-file>
|
|
174
189
|
```
|
|
175
190
|
|
|
176
|
-
|
|
191
|
+
Make some **diagnostic plots** to check the datapoint selection and binning:
|
|
177
192
|
```bash
|
|
178
|
-
timewise
|
|
193
|
+
timewise plot <path-to-config-file> <indices-to-plot> <output-directory>
|
|
179
194
|
```
|
|
180
195
|
|
|
181
|
-
|
|
182
|
-
Prepare an AMPEL job file for stacking the single-exposure data:
|
|
196
|
+
As a shortcut, you can also run **download, stacking, and export in one command**:
|
|
183
197
|
```bash
|
|
184
|
-
timewise
|
|
198
|
+
timewise run-chain <path-to-config-file> <path-to-ampel-config-file> <output-directory>
|
|
185
199
|
```
|
|
186
|
-
|
|
200
|
+
|
|
201
|
+
For more configuration options of the stacking, you can **run AMPEL manually**.
|
|
202
|
+
|
|
203
|
+
1. Prepare an AMPEL job file for stacking the single-exposure data:
|
|
187
204
|
```bash
|
|
188
|
-
|
|
205
|
+
timewise prepare-ampel <path-to-config-file>
|
|
189
206
|
```
|
|
207
|
+
The result will contain the path to the prepared AMPEL job file.
|
|
190
208
|
|
|
191
|
-
|
|
192
|
-
To check the datapoint selection and binning, take a quick look at the data:
|
|
209
|
+
2. Run the AMPEL job
|
|
193
210
|
```bash
|
|
194
|
-
|
|
211
|
+
ampel job -config <path-to-ampel-config-file> -schema <path-to-ampel-job-file>
|
|
195
212
|
```
|
|
196
213
|
|
|
197
214
|
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
|
|
198
218
|
## Citation
|
|
199
219
|
If you use `timewise` please make sure to cite [Necker et al. A&A 695, A228 (2025)](https://www.aanda.org/articles/aa/abs/2025/03/aa51340-24/aa51340-24.html).
|
|
200
220
|
Additionally, you might want to include a reference to the specific version you are using: [](https://zenodo.org/badge/latestdoi/449677569)
|
|
@@ -10,23 +10,33 @@
|
|
|
10
10
|
This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit
|
|
11
11
|
|
|
12
12
|
## Prerequisites
|
|
13
|
+
Python version 3.11.
|
|
13
14
|
|
|
14
|
-
|
|
15
|
+
If you want to not only download individual exposure photometry but also stack detections per visit (see below),
|
|
16
|
+
you must have access to a running [MongoDB](https://www.mongodb.com/)*.
|
|
17
|
+
|
|
18
|
+
<sub>* On MacOS have alook at the custom `brew` tap
|
|
19
|
+
[here](https://stackoverflow.com/questions/57856809/installing-mongodb-with-homebrew)
|
|
20
|
+
to get the MongoDB community edition. </sub>
|
|
15
21
|
|
|
16
22
|
## Installation
|
|
17
|
-
|
|
23
|
+
|
|
24
|
+
### If you use timewise only for downloading
|
|
25
|
+
The package can be installed via `pip` (but make sure to install the v1 pre-release):
|
|
18
26
|
```bash
|
|
19
|
-
pip install timewise
|
|
27
|
+
pip install --pre timewise==1.0.0a5
|
|
28
|
+
```
|
|
29
|
+
### If you use timewise also for stacking individual exposures
|
|
30
|
+
You must install with the `ampel` extra:
|
|
31
|
+
```bash
|
|
32
|
+
pip install --pre timewise[ampel]==1.0.0a5
|
|
20
33
|
```
|
|
21
|
-
|
|
22
34
|
To tell AMPEL which modules, aka units, to use, build the corresponding configuration file:
|
|
23
35
|
```bash
|
|
24
36
|
ampel config build -distributions ampel timewise -stop-on-errors 0 -out <path-to-ampel-config-file>
|
|
25
37
|
```
|
|
26
38
|
|
|
27
|
-
##
|
|
28
|
-
|
|
29
|
-
### Command line interface
|
|
39
|
+
## Command line interface
|
|
30
40
|
|
|
31
41
|
```
|
|
32
42
|
Usage: timewise [OPTIONS] COMMAND [ARGS]...
|
|
@@ -111,40 +121,50 @@ ampel:
|
|
|
111
121
|
This configuration file will be the input to all subcommands. Downloading and stacking can be run together or separate.
|
|
112
122
|
|
|
113
123
|
|
|
114
|
-
|
|
115
|
-
Run download, stacking, and export:
|
|
124
|
+
### To only download the data:
|
|
116
125
|
```bash
|
|
117
|
-
timewise
|
|
126
|
+
timewise download <path-to-config-file>
|
|
118
127
|
```
|
|
128
|
+
The photometry can be found in FITS files in the working directory specified in the configuration file\
|
|
129
|
+
along with metadata JSON files. These tell `timewise` which quries have already completed (per chunk) so the
|
|
130
|
+
download process can be interrupted and re-started at a later time.
|
|
131
|
+
|
|
132
|
+
### Stack individual exposure by visits
|
|
133
|
+
As mentioned above, this needs installation with the ampel extra.
|
|
134
|
+
|
|
119
135
|
|
|
120
|
-
|
|
121
|
-
To only download the data:
|
|
136
|
+
To **execute the stacking** after the download:
|
|
122
137
|
```bash
|
|
123
|
-
timewise
|
|
138
|
+
timewise process <path-to-config-file> <path-to-ampel-config-file>
|
|
124
139
|
```
|
|
125
140
|
|
|
126
|
-
|
|
141
|
+
Make some **diagnostic plots** to check the datapoint selection and binning:
|
|
127
142
|
```bash
|
|
128
|
-
timewise
|
|
143
|
+
timewise plot <path-to-config-file> <indices-to-plot> <output-directory>
|
|
129
144
|
```
|
|
130
145
|
|
|
131
|
-
|
|
132
|
-
Prepare an AMPEL job file for stacking the single-exposure data:
|
|
146
|
+
As a shortcut, you can also run **download, stacking, and export in one command**:
|
|
133
147
|
```bash
|
|
134
|
-
timewise
|
|
148
|
+
timewise run-chain <path-to-config-file> <path-to-ampel-config-file> <output-directory>
|
|
135
149
|
```
|
|
136
|
-
|
|
150
|
+
|
|
151
|
+
For more configuration options of the stacking, you can **run AMPEL manually**.
|
|
152
|
+
|
|
153
|
+
1. Prepare an AMPEL job file for stacking the single-exposure data:
|
|
137
154
|
```bash
|
|
138
|
-
|
|
155
|
+
timewise prepare-ampel <path-to-config-file>
|
|
139
156
|
```
|
|
157
|
+
The result will contain the path to the prepared AMPEL job file.
|
|
140
158
|
|
|
141
|
-
|
|
142
|
-
To check the datapoint selection and binning, take a quick look at the data:
|
|
159
|
+
2. Run the AMPEL job
|
|
143
160
|
```bash
|
|
144
|
-
|
|
161
|
+
ampel job -config <path-to-ampel-config-file> -schema <path-to-ampel-job-file>
|
|
145
162
|
```
|
|
146
163
|
|
|
147
164
|
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
|
|
148
168
|
## Citation
|
|
149
169
|
If you use `timewise` please make sure to cite [Necker et al. A&A 695, A228 (2025)](https://www.aanda.org/articles/aa/abs/2025/03/aa51340-24/aa51340-24.html).
|
|
150
170
|
Additionally, you might want to include a reference to the specific version you are using: [](https://zenodo.org/badge/latestdoi/449677569)
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# File: timewise/ampel/timewise/alert/TimewiseAlertSupplier.py
|
|
4
|
+
# License: BSD-3-Clause
|
|
5
|
+
# Author: Jannis Necker <jannis.necker@gmail.com>
|
|
6
|
+
# Date: 16.09.2025
|
|
7
|
+
# Last Modified Date: 16.09.2025
|
|
8
|
+
# Last Modified By: Jannis Necker <jannis.necker@gmail.com>
|
|
9
|
+
|
|
10
|
+
import sys
|
|
11
|
+
from hashlib import blake2b
|
|
12
|
+
from typing import Literal, List
|
|
13
|
+
|
|
14
|
+
import pandas as pd
|
|
15
|
+
|
|
16
|
+
from bson import encode
|
|
17
|
+
|
|
18
|
+
from ampel.alert.AmpelAlert import AmpelAlert
|
|
19
|
+
from ampel.alert.BaseAlertSupplier import BaseAlertSupplier
|
|
20
|
+
from ampel.view.ReadOnlyDict import ReadOnlyDict
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class TimewiseAlertSupplier(BaseAlertSupplier):
|
|
24
|
+
"""
|
|
25
|
+
Iterable class that, for each transient name provided by the underlying alert_loader
|
|
26
|
+
returns a PhotoAlert instance.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
stat_pps: int = 0
|
|
30
|
+
stat_uls: int = 0
|
|
31
|
+
|
|
32
|
+
dpid: Literal["hash", "inc"] = "hash"
|
|
33
|
+
# external_directory: Optional[ str ]
|
|
34
|
+
# deserialize: None | Literal["avro", "json"]
|
|
35
|
+
|
|
36
|
+
bands: List[str] = ["w1", "w2"]
|
|
37
|
+
|
|
38
|
+
def __init__(self, **kwargs) -> None:
|
|
39
|
+
super().__init__(**kwargs)
|
|
40
|
+
self.counter = 0 if self.dpid == "hash" else 1
|
|
41
|
+
|
|
42
|
+
def __next__(self) -> AmpelAlert:
|
|
43
|
+
"""
|
|
44
|
+
:returns: a dict with a structure that AlertProcessor understands
|
|
45
|
+
:raises StopIteration: when alert_loader dries out.
|
|
46
|
+
:raises AttributeError: if alert_loader was not set properly before this method is called
|
|
47
|
+
"""
|
|
48
|
+
table: pd.DataFrame = self._deserialize(next(self.alert_loader)) # type: ignore
|
|
49
|
+
|
|
50
|
+
stock_ids = table["stock_id"].unique()
|
|
51
|
+
assert len(stock_ids) == 1
|
|
52
|
+
stock_id = stock_ids[0]
|
|
53
|
+
|
|
54
|
+
# make the tables into a list of dictionaries for ampel to understand
|
|
55
|
+
all_ids = b""
|
|
56
|
+
pps = []
|
|
57
|
+
|
|
58
|
+
# remove the _ep at the end of AllWISE MEP data
|
|
59
|
+
columns_to_rename = [c for c in table.columns if c.endswith("_ep")]
|
|
60
|
+
if len(columns_to_rename):
|
|
61
|
+
rename = {
|
|
62
|
+
c: c.replace("_ep", "")
|
|
63
|
+
for c in columns_to_rename
|
|
64
|
+
if c.replace("_ep", "") not in table.columns
|
|
65
|
+
}
|
|
66
|
+
if rename:
|
|
67
|
+
# in this case only the allwise column eith the _ep extension exists
|
|
68
|
+
# and we can simply rename the columns
|
|
69
|
+
table.rename(columns=rename, inplace=True)
|
|
70
|
+
|
|
71
|
+
move = {
|
|
72
|
+
c: c.replace("_ep", "")
|
|
73
|
+
for c in columns_to_rename
|
|
74
|
+
if c.replace("_ep", "") in table.columns
|
|
75
|
+
}
|
|
76
|
+
if move:
|
|
77
|
+
# In this case, the columns already exists because the neowise data is present
|
|
78
|
+
# we have to insert the values form the columns with the _ep extension into the
|
|
79
|
+
# respective neowise columns
|
|
80
|
+
for c, nc in move.items():
|
|
81
|
+
na_mask = table[nc].isna()
|
|
82
|
+
table.loc[na_mask, nc] = table[c][na_mask]
|
|
83
|
+
pd.options.mode.chained_assignment = None
|
|
84
|
+
table.drop(columns=[c for c in move], inplace=True)
|
|
85
|
+
pd.options.mode.chained_assignment = "warn"
|
|
86
|
+
|
|
87
|
+
for i, row in table.iterrows():
|
|
88
|
+
# convert table row to dict, convert data types from numpy to native python
|
|
89
|
+
# Respect masked fields and convert to None
|
|
90
|
+
pp = {k: None if pd.isna(v) else v for k, v in row.to_dict().items()}
|
|
91
|
+
pp_hash = blake2b(encode(pp), digest_size=7).digest()
|
|
92
|
+
if self.counter:
|
|
93
|
+
pp["candid"] = self.counter
|
|
94
|
+
self.counter += 1
|
|
95
|
+
else:
|
|
96
|
+
pp["candid"] = int.from_bytes(pp_hash, byteorder=sys.byteorder)
|
|
97
|
+
|
|
98
|
+
all_ids += pp_hash
|
|
99
|
+
pps.append(ReadOnlyDict(pp))
|
|
100
|
+
|
|
101
|
+
if not pps:
|
|
102
|
+
return self.__next__()
|
|
103
|
+
|
|
104
|
+
# Update stats
|
|
105
|
+
self.stat_pps += len(pps)
|
|
106
|
+
|
|
107
|
+
return AmpelAlert(
|
|
108
|
+
id=int.from_bytes( # alert id
|
|
109
|
+
blake2b(all_ids, digest_size=7).digest(), byteorder=sys.byteorder
|
|
110
|
+
),
|
|
111
|
+
stock=int(stock_id), # internal ampel id
|
|
112
|
+
datapoints=tuple(pps),
|
|
113
|
+
)
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# File: timewise/ampel/timewise/alert/load/TimewiseFileLoader.py
|
|
4
|
+
# License: BSD-3-Clause
|
|
5
|
+
# Author: Jannis Necker <jannis.necker@gmail.com>
|
|
6
|
+
# Date: 16.09.2025
|
|
7
|
+
# Last Modified Date: 16.09.2025
|
|
8
|
+
# Last Modified By: Jannis Necker <jannis.necker@gmail.com>
|
|
9
|
+
from typing import Dict, get_args
|
|
10
|
+
|
|
11
|
+
import numpy as np
|
|
12
|
+
import pandas as pd
|
|
13
|
+
from astropy.table import Table, vstack
|
|
14
|
+
from ampel.abstract.AbsAlertLoader import AbsAlertLoader
|
|
15
|
+
from timewise.tables import TableType
|
|
16
|
+
from timewise.config import TimewiseConfig
|
|
17
|
+
from timewise.types import TaskID
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TimewiseFileLoader(AbsAlertLoader[Dict]):
|
|
21
|
+
"""
|
|
22
|
+
Load alerts from one of more files.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
# path to timewise download config file
|
|
26
|
+
timewise_config_file: str
|
|
27
|
+
|
|
28
|
+
# column name of id
|
|
29
|
+
stock_id_column_name: str
|
|
30
|
+
|
|
31
|
+
chunks: list[int] | None = None
|
|
32
|
+
|
|
33
|
+
def __init__(self, **kwargs) -> None:
|
|
34
|
+
super().__init__(**kwargs)
|
|
35
|
+
|
|
36
|
+
self.logger.info(f"loading timewise config file {self.timewise_config_file}")
|
|
37
|
+
timewise_config = TimewiseConfig.from_yaml(self.timewise_config_file)
|
|
38
|
+
dl = timewise_config.download.build_downloader()
|
|
39
|
+
self._timewise_backend = dl.backend
|
|
40
|
+
|
|
41
|
+
# selecting tasks to run
|
|
42
|
+
_tasks = [tasks for tasks in dl.iter_tasks_per_chunk()]
|
|
43
|
+
if self.chunks is not None:
|
|
44
|
+
self._tasks = [_tasks[i] for i in self.chunks]
|
|
45
|
+
else:
|
|
46
|
+
self._tasks = _tasks
|
|
47
|
+
if self.logger:
|
|
48
|
+
self.logger.info(
|
|
49
|
+
f"Registering {len(self._tasks)} chunk(s) to load: {self._tasks}"
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
self._table_types = get_args(TableType.__origin__) # type: ignore
|
|
53
|
+
self._gen = self.iter_stocks()
|
|
54
|
+
|
|
55
|
+
@staticmethod
|
|
56
|
+
def encode_result(res: pd.DataFrame) -> pd.DataFrame:
|
|
57
|
+
if isinstance(res, pd.Series):
|
|
58
|
+
return pd.DataFrame([res])
|
|
59
|
+
return res
|
|
60
|
+
|
|
61
|
+
def find_table_from_task(self, task: TaskID) -> TableType:
|
|
62
|
+
tables = [
|
|
63
|
+
t for t in self._table_types if t.model_fields["name"].default in str(task)
|
|
64
|
+
]
|
|
65
|
+
assert len(tables) > 0, f"No matching table found for {task}!"
|
|
66
|
+
assert len(tables) < 2, f"More than one matching table found for {task}!"
|
|
67
|
+
self.logger.debug(
|
|
68
|
+
f"{task} is from table {tables[0].model_fields['name'].default}"
|
|
69
|
+
)
|
|
70
|
+
return tables[0]
|
|
71
|
+
|
|
72
|
+
def iter_stocks(self):
|
|
73
|
+
# emit all datapoints per stock id
|
|
74
|
+
# This way ampel runs not per datapoint but per object
|
|
75
|
+
backend = self._timewise_backend
|
|
76
|
+
for tasks in self._tasks:
|
|
77
|
+
data = []
|
|
78
|
+
for task in tasks:
|
|
79
|
+
self.logger.debug(f"reading {task}")
|
|
80
|
+
idata = backend.load_data(task)
|
|
81
|
+
|
|
82
|
+
# add table name
|
|
83
|
+
idata["table_name"] = (
|
|
84
|
+
self.find_table_from_task(task).model_fields["name"].default
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
data.append(idata)
|
|
88
|
+
|
|
89
|
+
data = vstack(data).to_pandas()
|
|
90
|
+
|
|
91
|
+
# rename stock id column
|
|
92
|
+
data.rename(columns={self.stock_id_column_name: "stock_id"}, inplace=True)
|
|
93
|
+
|
|
94
|
+
# Find the indices for each stock id. This is much faster than making a mask
|
|
95
|
+
# each loop and accessing the table then. Shown below is a comparison.
|
|
96
|
+
# The top example is the access provided by pandas which would be
|
|
97
|
+
# again a factor 3 faster.
|
|
98
|
+
#
|
|
99
|
+
# In [45]: %timeit test_df()
|
|
100
|
+
# 5.62 μs ± 47.2 ns per loop (mean ± std. dev. of 7 runs, 100,000 loops each)
|
|
101
|
+
#
|
|
102
|
+
# In [46]: %timeit test_index()
|
|
103
|
+
# 14.6 μs ± 45 ns per loop (mean ± std. dev. of 7 runs, 100,000 loops each)
|
|
104
|
+
#
|
|
105
|
+
# In [47]: %timeit test_mask()
|
|
106
|
+
# 2.61 ms ± 18 μs per loop (mean ± std. dev. of 7 runs, 100 loops each)
|
|
107
|
+
data.set_index(data.stock_id, inplace=True)
|
|
108
|
+
|
|
109
|
+
# iterate over all stock ids
|
|
110
|
+
for stock_id in np.unique(data["stock_id"]):
|
|
111
|
+
selection = data.loc[stock_id]
|
|
112
|
+
yield self.encode_result(selection)
|
|
113
|
+
|
|
114
|
+
def __iter__(self):
|
|
115
|
+
return self
|
|
116
|
+
|
|
117
|
+
def __next__(self) -> pd.DataFrame: # type: ignore
|
|
118
|
+
return next(self._gen)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# File: Ampel-ZTF/ampel/ztf/ingest/ZiCompilerOptions.py
|
|
3
|
+
# License: BSD-3-Clause
|
|
4
|
+
# Author: valery brinnel <firstname.lastname@gmail.com>
|
|
5
|
+
# Date: 14.05.2021
|
|
6
|
+
# Last Modified Date: 14.05.2021
|
|
7
|
+
# Last Modified By: valery brinnel <firstname.lastname@gmail.com>
|
|
8
|
+
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
from ampel.model.ingest.CompilerOptions import CompilerOptions
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TiCompilerOptions(CompilerOptions):
|
|
15
|
+
stock: dict[str, Any] = {"tag": "TIMEWISE"}
|
|
16
|
+
t0: dict[str, Any] = {"tag": "TIMEWISE"}
|
|
17
|
+
t1: dict[str, Any] = {"tag": "TIMEWISE"}
|
|
18
|
+
state_t2: dict[str, Any] = {"tag": "TIMEWISE"}
|
|
19
|
+
point_t2: dict[str, Any] = {"tag": "TIMEWISE"}
|
|
20
|
+
stock_t2: dict[str, Any] = {"tag": "TIMEWISE"}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# File: timewise/ampel/timewise/ingest/TiDataPointShaper.py
|
|
3
|
+
# License: BSD-3-Clause
|
|
4
|
+
# Author: valery brinnel <firstname.lastname@gmail.com>
|
|
5
|
+
# Date: 14.12.2017
|
|
6
|
+
# Last Modified Date: 19.09.2025
|
|
7
|
+
# Last Modified By: Jannis Necker <jannis.necker@gmail.com>
|
|
8
|
+
|
|
9
|
+
from collections.abc import Iterable, Sequence
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
from bson import encode
|
|
13
|
+
|
|
14
|
+
from ampel.abstract.AbsT0Unit import AbsT0Unit
|
|
15
|
+
from ampel.base.AmpelUnit import AmpelUnit
|
|
16
|
+
from ampel.content.DataPoint import DataPoint
|
|
17
|
+
from ampel.types import StockId, Tag
|
|
18
|
+
from ampel.util.hash import hash_payload
|
|
19
|
+
|
|
20
|
+
from ampel.timewise.ingest.tags import tags
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class TiDataPointShaperBase(AmpelUnit):
|
|
24
|
+
"""
|
|
25
|
+
This class 'shapes' datapoints in a format suitable
|
|
26
|
+
to be saved into the ampel database
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
# JD2017 is used to define upper limits primary IDs
|
|
30
|
+
JD2017: float = 2457754.5
|
|
31
|
+
#: Byte width of datapoint ids
|
|
32
|
+
digest_size: int = 8
|
|
33
|
+
|
|
34
|
+
# Mandatory implementation
|
|
35
|
+
def process(self, arg: Iterable[dict[str, Any]], stock: StockId) -> list[DataPoint]:
|
|
36
|
+
"""
|
|
37
|
+
:param arg: sequence of unshaped pps
|
|
38
|
+
IMPORTANT:
|
|
39
|
+
1) This method *modifies* the input dicts (it removes 'candid' and programpi),
|
|
40
|
+
even if the unshaped pps are ReadOnlyDict instances
|
|
41
|
+
2) 'stock' is not set here on purpose since it will conflict with the $addToSet operation
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
ret_list: list[DataPoint] = []
|
|
45
|
+
popitem = dict.pop
|
|
46
|
+
|
|
47
|
+
for photo_dict in arg:
|
|
48
|
+
# Photopoint
|
|
49
|
+
assert photo_dict.get("candid"), "photometry points does not have 'candid'!"
|
|
50
|
+
ret_list.append(
|
|
51
|
+
{ # type: ignore[typeddict-item]
|
|
52
|
+
"id": photo_dict["candid"],
|
|
53
|
+
"stock": stock,
|
|
54
|
+
"tag": tags[photo_dict["table_name"]],
|
|
55
|
+
"body": photo_dict,
|
|
56
|
+
}
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
popitem(photo_dict, "candid", None)
|
|
60
|
+
|
|
61
|
+
return ret_list
|
|
62
|
+
|
|
63
|
+
def _create_datapoint(
|
|
64
|
+
self, stock: StockId, tag: Sequence[Tag], body: dict[str, Any]
|
|
65
|
+
) -> DataPoint:
|
|
66
|
+
"""
|
|
67
|
+
Create a Datapoint from stock, body, and tags, using the hash of the body as id
|
|
68
|
+
"""
|
|
69
|
+
# ensure that keys are ordered
|
|
70
|
+
sorted_body = dict(sorted(body.items()))
|
|
71
|
+
# The following is a comment from the original ampel.ztf.ingest.ZiDataPointShaperBase:
|
|
72
|
+
# This is not a complete DataPoint as (channel,meta) is missing, set later.
|
|
73
|
+
# Should these be optional? or added default?
|
|
74
|
+
return { # type: ignore
|
|
75
|
+
"id": hash_payload(encode(sorted_body), size=-self.digest_size * 8),
|
|
76
|
+
"stock": stock,
|
|
77
|
+
"tag": [*tags[body["table_name"]], *tag],
|
|
78
|
+
"body": sorted_body,
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
def ul_identity(self, uld: dict[str, Any]) -> int:
|
|
82
|
+
"""
|
|
83
|
+
This should not happen
|
|
84
|
+
"""
|
|
85
|
+
raise NotImplementedError
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class TiDataPointShaper(TiDataPointShaperBase, AbsT0Unit):
|
|
89
|
+
def process(self, arg: Any, stock: None | StockId = None) -> list[DataPoint]:
|
|
90
|
+
assert stock is not None
|
|
91
|
+
return super().process(arg, stock)
|