timewise 1.0.0a8__tar.gz → 1.0.0a10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. {timewise-1.0.0a8 → timewise-1.0.0a10}/PKG-INFO +14 -11
  2. {timewise-1.0.0a8 → timewise-1.0.0a10}/README.md +6 -4
  3. {timewise-1.0.0a8 → timewise-1.0.0a10}/ampel/timewise/alert/TimewiseAlertSupplier.py +4 -4
  4. {timewise-1.0.0a8 → timewise-1.0.0a10}/ampel/timewise/alert/load/TimewiseFileLoader.py +11 -1
  5. {timewise-1.0.0a8 → timewise-1.0.0a10}/ampel/timewise/ingest/TiMongoMuxer.py +126 -10
  6. {timewise-1.0.0a8 → timewise-1.0.0a10}/ampel/timewise/t1/T1HDBSCAN.py +2 -1
  7. {timewise-1.0.0a8 → timewise-1.0.0a10}/ampel/timewise/t1/TimewiseFilter.py +1 -1
  8. {timewise-1.0.0a8 → timewise-1.0.0a10}/pyproject.toml +8 -7
  9. timewise-1.0.0a10/timewise/__init__.py +1 -0
  10. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/backend/base.py +2 -0
  11. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/backend/filesystem.py +3 -0
  12. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/chunking.py +2 -4
  13. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/cli.py +9 -1
  14. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/io/config.py +14 -10
  15. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/io/download.py +36 -15
  16. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/io/stable_tap.py +37 -10
  17. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/plot/sdss.py +1 -3
  18. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/process/interface.py +8 -2
  19. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/process/stacking.py +1 -1
  20. timewise-1.0.0a10/timewise/query/__init__.py +11 -0
  21. timewise-1.0.0a10/timewise/query/by_allwise_cntr_and_position.py +49 -0
  22. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/query/positional.py +0 -1
  23. timewise-1.0.0a10/timewise/tables/__init__.py +11 -0
  24. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/tables/allwise_p3as_mep.py +3 -1
  25. timewise-1.0.0a10/timewise/tables/allwise_p3as_psd.py +24 -0
  26. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/tables/neowiser_p1bs_psd.py +3 -1
  27. timewise-1.0.0a8/timewise/__init__.py +0 -1
  28. timewise-1.0.0a8/timewise/query/__init__.py +0 -6
  29. timewise-1.0.0a8/timewise/tables/__init__.py +0 -10
  30. timewise-1.0.0a8/timewise/util/backoff.py +0 -12
  31. {timewise-1.0.0a8 → timewise-1.0.0a10}/LICENSE +0 -0
  32. {timewise-1.0.0a8 → timewise-1.0.0a10}/ampel/timewise/ingest/TiCompilerOptions.py +0 -0
  33. {timewise-1.0.0a8 → timewise-1.0.0a10}/ampel/timewise/ingest/TiDataPointShaper.py +0 -0
  34. {timewise-1.0.0a8 → timewise-1.0.0a10}/ampel/timewise/ingest/tags.py +0 -0
  35. {timewise-1.0.0a8 → timewise-1.0.0a10}/ampel/timewise/t2/T2StackVisits.py +0 -0
  36. {timewise-1.0.0a8 → timewise-1.0.0a10}/ampel/timewise/util/AuxDiagnosticPlotter.py +0 -0
  37. {timewise-1.0.0a8 → timewise-1.0.0a10}/ampel/timewise/util/pdutil.py +0 -0
  38. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/backend/__init__.py +0 -0
  39. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/config.py +0 -0
  40. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/io/__init__.py +0 -0
  41. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/plot/__init__.py +0 -0
  42. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/plot/diagnostic.py +0 -0
  43. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/plot/lightcurve.py +0 -0
  44. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/plot/panstarrs.py +0 -0
  45. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/process/__init__.py +0 -0
  46. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/process/config.py +0 -0
  47. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/process/keys.py +0 -0
  48. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/process/template.yml +0 -0
  49. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/query/base.py +0 -0
  50. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/tables/base.py +0 -0
  51. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/types.py +0 -0
  52. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/util/csv_utils.py +0 -0
  53. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/util/error_threading.py +0 -0
  54. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/util/path.py +0 -0
  55. {timewise-1.0.0a8 → timewise-1.0.0a10}/timewise/util/visits.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: timewise
3
- Version: 1.0.0a8
3
+ Version: 1.0.0a10
4
4
  Summary: Download WISE infrared data for many objects and process them with AMPEL
5
5
  License: MIT
6
6
  License-File: LICENSE
@@ -15,21 +15,22 @@ Classifier: Programming Language :: Python :: 3.13
15
15
  Provides-Extra: ampel
16
16
  Provides-Extra: dev
17
17
  Provides-Extra: docs
18
- Requires-Dist: ampel-alerts (==0.10.3a6) ; extra == "ampel"
19
- Requires-Dist: ampel-core (==0.10.6a17) ; extra == "ampel"
18
+ Requires-Dist: ampel-alerts (==0.10.4a0) ; extra == "ampel"
19
+ Requires-Dist: ampel-core (==0.10.6a21) ; extra == "ampel"
20
20
  Requires-Dist: ampel-interface (==0.10.5a8) ; extra == "ampel"
21
21
  Requires-Dist: ampel-photometry (==0.10.2a1) ; extra == "ampel"
22
22
  Requires-Dist: ampel-plot (>=0.9.1,<0.10.0) ; extra == "ampel"
23
23
  Requires-Dist: astropy (>=5.1,<8.0.0)
24
24
  Requires-Dist: autodoc_pydantic[erdantic] (>=2.2.0,<3.0.0) ; extra == "docs"
25
25
  Requires-Dist: backoff (>=2.1.2,<3.0.0)
26
- Requires-Dist: coveralls (>=3.3.1,<4.0.0) ; extra == "dev"
26
+ Requires-Dist: coveralls (>=4.0.0,<5.0.0) ; extra == "dev"
27
27
  Requires-Dist: jupyter[jupyter] (>=1.0.0,<2.0.0)
28
28
  Requires-Dist: jupyterlab[jupyter] (>=4.0.6,<5.0.0)
29
29
  Requires-Dist: matplotlib (>=3.5.3,<4.0.0)
30
+ Requires-Dist: mongomock (>=4.3.0,<5.0.0) ; extra == "dev"
30
31
  Requires-Dist: mypy (>=1.18.2,<2.0.0) ; extra == "dev"
31
32
  Requires-Dist: myst-parser (>=1,<3) ; extra == "docs"
32
- Requires-Dist: numpy (>=1.23.2,<2.0.0)
33
+ Requires-Dist: numpy (>=1.23.2,<3.0.0)
33
34
  Requires-Dist: pandas (>=1.4.3,<3.0.0)
34
35
  Requires-Dist: pandas-stubs (>=2.3.2.250926,<3.0.0.0) ; extra == "dev"
35
36
  Requires-Dist: pydantic (>=2.0.0,<3.0.0)
@@ -37,12 +38,12 @@ Requires-Dist: pytest (>=7.2.2,<8.0.0) ; extra == "dev"
37
38
  Requires-Dist: pyvo (>=1.7.0,<2.0.0)
38
39
  Requires-Dist: requests (>=2.28.1,<3.0.0)
39
40
  Requires-Dist: ruff (>=0.13.0,<0.14.0) ; extra == "dev"
40
- Requires-Dist: scikit-image (>=0.19.3,<0.22.0)
41
+ Requires-Dist: scikit-image (>=0.26.0,<0.27.0)
41
42
  Requires-Dist: scikit-learn (>=1.3.0,<2.0.0)
42
43
  Requires-Dist: scipy-stubs (>=1.16.2.0,<2.0.0.0) ; extra == "dev"
43
44
  Requires-Dist: sphinx-rtd-theme (>=1.3.0,<2.0.0) ; extra == "docs"
44
45
  Requires-Dist: tqdm (>=4.64.0,<5.0.0)
45
- Requires-Dist: typer (>=0.19.2,<0.20.0)
46
+ Requires-Dist: typer (>=0.19.2,<0.30.0)
46
47
  Requires-Dist: types-pyyaml (>=6.0.12.20250915,<7.0.0.0) ; extra == "dev"
47
48
  Requires-Dist: types-requests (>=2.32.4.20250913,<3.0.0.0) ; extra == "dev"
48
49
  Requires-Dist: urllib3 (>=2.5.0,<3.0.0)
@@ -60,29 +61,31 @@ Description-Content-Type: text/markdown
60
61
  ![](timewise.png)
61
62
  # Infrared light curves from WISE data
62
63
 
63
- This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit
64
+ This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit. It is designed to do so for efficiently for large samples of millions of objects.
64
65
 
65
66
  ## Prerequisites
66
67
  Python version 3.11, 3.12 or 3.13.
67
68
 
68
69
  If you want to not only download individual exposure photometry but also stack detections per visit (see below),
69
- you must have access to a running [MongoDB](https://www.mongodb.com/)*.
70
+ you must have access to a running [MongoDB](https://www.mongodb.com/)* **.
70
71
 
71
72
  <sub>* On MacOS have alook at the custom `brew` tap
72
73
  [here](https://github.com/mongodb/homebrew-brew)
73
74
  to get the MongoDB community edition. </sub>
74
75
 
76
+ <sub>** On some systems this is not straight forward to set up. `timewise` requires it nevertheless as an integral part of the AMPEL system which is used to efficiently schedule and store the stacking of lightcurves. If you do not foresee a big overhead in calculating lightcurves for a sample of O(1000) objects, a more lightweight package might be more applicable. </sub>
77
+
75
78
  ## Installation
76
79
 
77
80
  ### If you use timewise only for downloading
78
81
  The package can be installed via `pip` (but make sure to install the v1 pre-release):
79
82
  ```bash
80
- pip install --pre timewise==1.0.0a8
83
+ pip install --pre timewise==1.0.0a10
81
84
  ```
82
85
  ### If you use timewise also for stacking individual exposures
83
86
  You must install with the `ampel` extra:
84
87
  ```bash
85
- pip install --pre 'timewise[ampel]==1.0.0a8'
88
+ pip install --pre 'timewise[ampel]==1.0.0a10'
86
89
  ```
87
90
  To tell AMPEL which modules, aka units, to use, build the corresponding configuration file:
88
91
  ```bash
@@ -7,29 +7,31 @@
7
7
  ![](timewise.png)
8
8
  # Infrared light curves from WISE data
9
9
 
10
- This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit
10
+ This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit. It is designed to do so for efficiently for large samples of millions of objects.
11
11
 
12
12
  ## Prerequisites
13
13
  Python version 3.11, 3.12 or 3.13.
14
14
 
15
15
  If you want to not only download individual exposure photometry but also stack detections per visit (see below),
16
- you must have access to a running [MongoDB](https://www.mongodb.com/)*.
16
+ you must have access to a running [MongoDB](https://www.mongodb.com/)* **.
17
17
 
18
18
  <sub>* On MacOS have alook at the custom `brew` tap
19
19
  [here](https://github.com/mongodb/homebrew-brew)
20
20
  to get the MongoDB community edition. </sub>
21
21
 
22
+ <sub>** On some systems this is not straight forward to set up. `timewise` requires it nevertheless as an integral part of the AMPEL system which is used to efficiently schedule and store the stacking of lightcurves. If you do not foresee a big overhead in calculating lightcurves for a sample of O(1000) objects, a more lightweight package might be more applicable. </sub>
23
+
22
24
  ## Installation
23
25
 
24
26
  ### If you use timewise only for downloading
25
27
  The package can be installed via `pip` (but make sure to install the v1 pre-release):
26
28
  ```bash
27
- pip install --pre timewise==1.0.0a8
29
+ pip install --pre timewise==1.0.0a10
28
30
  ```
29
31
  ### If you use timewise also for stacking individual exposures
30
32
  You must install with the `ampel` extra:
31
33
  ```bash
32
- pip install --pre 'timewise[ampel]==1.0.0a8'
34
+ pip install --pre 'timewise[ampel]==1.0.0a10'
33
35
  ```
34
36
  To tell AMPEL which modules, aka units, to use, build the corresponding configuration file:
35
37
  ```bash
@@ -9,7 +9,7 @@
9
9
 
10
10
  import sys
11
11
  from hashlib import blake2b
12
- from typing import Literal, List
12
+ from typing import Literal, List, Dict, Any
13
13
 
14
14
  import pandas as pd
15
15
 
@@ -71,8 +71,8 @@ class TimewiseAlertSupplier(BaseAlertSupplier, AmpelABC):
71
71
 
72
72
  move = {
73
73
  c: c.replace("_ep", "")
74
- for c in columns_to_rename
75
- if c.replace("_ep", "") in table.columns
74
+ for c in table.columns
75
+ if (c.replace("_ep", "") in table.columns) and (c.endswith("_ep"))
76
76
  }
77
77
  if move:
78
78
  # In this case, the columns already exists because the neowise data is present
@@ -88,7 +88,7 @@ class TimewiseAlertSupplier(BaseAlertSupplier, AmpelABC):
88
88
  for i, row in table.iterrows():
89
89
  # convert table row to dict, convert data types from numpy to native python
90
90
  # Respect masked fields and convert to None
91
- pp = {k: None if pd.isna(v) else v for k, v in row.to_dict().items()}
91
+ pp = {str(k): None if pd.isna(v) else v for k, v in row.to_dict().items()}
92
92
  pp_hash = blake2b(encode(pp), digest_size=7).digest()
93
93
  if self.counter:
94
94
  pp["candid"] = self.counter
@@ -32,6 +32,9 @@ class TimewiseFileLoader(AbsAlertLoader[Dict], AmpelABC):
32
32
 
33
33
  chunks: list[int] | None = None
34
34
 
35
+ # optionally skip files that are missing
36
+ skip_missing_files: bool = False
37
+
35
38
  def __init__(self, **kwargs) -> None:
36
39
  super().__init__(**kwargs)
37
40
 
@@ -81,7 +84,14 @@ class TimewiseFileLoader(AbsAlertLoader[Dict], AmpelABC):
81
84
  data = []
82
85
  for task in tasks:
83
86
  self.logger.debug(f"reading {task}")
84
- idata = backend.load_data(task)
87
+ try:
88
+ idata = backend.load_data(task)
89
+ except FileNotFoundError as e:
90
+ if self.skip_missing_files:
91
+ self.logger.warn(f"file for task {task} not found, skipping...")
92
+ continue
93
+ else:
94
+ raise e
85
95
 
86
96
  # add table name
87
97
  idata["table_name"] = (
@@ -8,13 +8,20 @@
8
8
 
9
9
  from bisect import bisect_right
10
10
  from contextlib import suppress
11
- from typing import Any
12
-
11
+ from typing import Any, Sequence
13
12
 
14
13
  from ampel.abstract.AbsT0Muxer import AbsT0Muxer
15
14
  from ampel.content.DataPoint import DataPoint
16
- from ampel.types import DataPointId, StockId
15
+ from ampel.model.operator.AllOf import AllOf
16
+ from ampel.model.operator.AnyOf import AnyOf
17
+ from ampel.types import ChannelId, DataPointId, StockId
17
18
  from ampel.util.mappings import unflatten_dict
19
+ from astropy.table import Table
20
+ from pydantic import TypeAdapter
21
+ from timewise.io.stable_tap import StableTAPService
22
+ from timewise.query import QueryType
23
+ from timewise.tables.allwise_p3as_mep import allwise_p3as_mep
24
+ from timewise.types import TYPE_MAP
18
25
 
19
26
 
20
27
  class ConcurrentUpdateError(Exception):
@@ -51,8 +58,13 @@ class TiMongoMuxer(AbsT0Muxer):
51
58
  "body.dec": 1,
52
59
  }
53
60
 
61
+ channel: None | ChannelId | AnyOf[ChannelId] | AllOf[ChannelId] = None
62
+
54
63
  unique_key: list[str] = ["mjd", "ra", "dec"]
55
64
 
65
+ # URL of tap service for query of AllWISE Source Table
66
+ tap_service_url: str = "https://irsa.ipac.caltech.edu/TAP"
67
+
56
68
  def __init__(self, **kwargs) -> None:
57
69
  super().__init__(**kwargs)
58
70
 
@@ -60,6 +72,11 @@ class TiMongoMuxer(AbsT0Muxer):
60
72
  self._photo_col = self.context.db.get_collection("t0")
61
73
  self._projection_spec = unflatten_dict(self.projection)
62
74
 
75
+ self._tap_service = StableTAPService(self.tap_service_url)
76
+
77
+ self._allwise_source_cntr: list[str] = []
78
+ self._not_allwise_source_cntr: list[str] = []
79
+
63
80
  def process(
64
81
  self, dps: list[DataPoint], stock_id: None | StockId = None
65
82
  ) -> tuple[None | list[DataPoint], None | list[DataPoint]]:
@@ -81,7 +98,76 @@ class TiMongoMuxer(AbsT0Muxer):
81
98
 
82
99
  # NB: this 1-liner is a separate method to provide a patch point for race condition testing
83
100
  def _get_dps(self, stock_id: None | StockId) -> list[DataPoint]:
84
- return list(self._photo_col.find({"stock": stock_id}, self.projection))
101
+ if self.channel is not None:
102
+ if isinstance(self.channel, ChannelId):
103
+ channel_query: (
104
+ ChannelId | dict[str, Sequence[ChannelId | AllOf[ChannelId]]]
105
+ ) = self.channel
106
+ elif isinstance(self.channel, AnyOf):
107
+ channel_query = {"$in": self.channel.any_of}
108
+ elif isinstance(self.channel, AllOf):
109
+ channel_query = {"$all": self.channel.all_of}
110
+ else:
111
+ # should not happen
112
+ raise TypeError()
113
+ _channel = {"channel": channel_query}
114
+ else:
115
+ _channel = {}
116
+ query = {"stock": stock_id, **_channel}
117
+ return list(self._photo_col.find(query, self.projection))
118
+
119
+ def _check_cntrs(self, dps: Sequence[DataPoint]) -> None:
120
+ # assemble query
121
+ query_config = {
122
+ "type": "by_allwise_cntr_and_position",
123
+ "radius_arcsec": 10,
124
+ "columns": ["cntr"],
125
+ "constraints": [],
126
+ "table": {"name": "allwise_p3as_psd"},
127
+ }
128
+ query: QueryType = TypeAdapter(QueryType).validate_python(query_config)
129
+
130
+ # load datapoints into astropy table
131
+ upload = Table([dp["body"] for dp in dps])
132
+ upload["allwise_cntr"] = upload[allwise_p3as_mep.allwise_cntr_column]
133
+ upload[query.original_id_key] = [dp["id"] for dp in dps]
134
+ for key, dtype in query.input_columns.items():
135
+ upload[key] = upload[key].astype(TYPE_MAP[dtype])
136
+ for key in upload.colnames:
137
+ if key not in query.input_columns:
138
+ upload.remove_column(key)
139
+
140
+ # run query
141
+ self.logger.info("Querying AllWISE Source Table for MEP CNTRs ...")
142
+ res = self._tap_service.run_sync(
143
+ query.adql, uploads={query.upload_name: upload}
144
+ )
145
+
146
+ # update internal state
147
+ res_cntr = res.to_table()["cntr"].astype(str)
148
+ self._allwise_source_cntr.extend(list(res_cntr))
149
+ self._not_allwise_source_cntr.extend(
150
+ list(set(upload["allwise_cntr"].astype(str)) - set(res_cntr))
151
+ )
152
+
153
+ def _check_mep_allwise_sources(self, dps: Sequence[DataPoint]) -> list[DataPointId]:
154
+ dps_with_unchecked_cntr = [
155
+ dp
156
+ for dp in dps
157
+ if str(dp["body"][allwise_p3as_mep.allwise_cntr_column])
158
+ not in self._allwise_source_cntr + self._not_allwise_source_cntr
159
+ ]
160
+ if len(dps_with_unchecked_cntr) > 0:
161
+ self._check_cntrs(dps_with_unchecked_cntr)
162
+
163
+ # compile list of invalid datapoint ids
164
+ invalid_dp_ids = []
165
+ for dp in dps:
166
+ cntr = str(dp["body"][allwise_p3as_mep.allwise_cntr_column])
167
+ if cntr in self._not_allwise_source_cntr:
168
+ invalid_dp_ids.append(dp["id"])
169
+
170
+ return invalid_dp_ids
85
171
 
86
172
  def _process(
87
173
  self, dps: list[DataPoint], stock_id: None | StockId = None
@@ -128,7 +214,10 @@ class TiMongoMuxer(AbsT0Muxer):
128
214
  else:
129
215
  unique_dps_ids[key] = [dp["id"]]
130
216
 
131
- # make sure no duplicate datapoints exist
217
+ # Part 2: Check that there are no duplicates and handle redundant AllWISE MEP data
218
+ ##################################################################################
219
+
220
+ invalid_dp_ids = []
132
221
  for key, simultaneous_dps in unique_dps_ids.items():
133
222
  dps_db_wrong = [dp for dp in dps_db if dp["id"] in simultaneous_dps]
134
223
  dps_wrong = [dp for dp in dps if dp["id"] in simultaneous_dps]
@@ -136,20 +225,47 @@ class TiMongoMuxer(AbsT0Muxer):
136
225
  f"stockID {str(stock_id)}: Duplicate photopoints at {key}!\nDPS from DB:"
137
226
  f"\n{dps_db_wrong}\nNew DPS:\n{dps_wrong}"
138
227
  )
139
- assert len(simultaneous_dps) == 1, msg
140
228
 
141
- # Part 2: Update new data points that are already superseded
142
- ############################################################
229
+ all_wrong_dps = dps_db_wrong + dps_wrong
230
+ if len(simultaneous_dps) > 1:
231
+ # if these datapoints come from the AllWISE MEP database, downloaded by timewise
232
+ # there can be duplicates. Only the AllWISE CNTR can tell us which datapoints
233
+ # should be used: the CNTR that appears in the AllWISE source catalog.
234
+ if all(
235
+ [
236
+ ("TIMEWISE" in dp["tag"]) and ("allwise_p3as_mep" in dp["tag"])
237
+ for dp in all_wrong_dps
238
+ ]
239
+ ):
240
+ self.logger.info(
241
+ f"{len(all_wrong_dps)} duplicate MEP datapoints found. Checking ..."
242
+ )
243
+ i_invalid_dp_ids = self._check_mep_allwise_sources(
244
+ dps_db_wrong + dps_wrong
245
+ )
246
+ self.logger.info(
247
+ f"Found {len(i_invalid_dp_ids)} invalid MEP datapoints."
248
+ )
249
+ invalid_dp_ids.extend(i_invalid_dp_ids)
250
+
251
+ else:
252
+ raise RuntimeError(msg)
253
+
254
+ # Part 3: Compile final lists of datapoints to insert and combine
255
+ #################################################################
143
256
 
144
257
  # Difference between candids from the alert and candids present in DB
145
- ids_dps_to_insert = ids_dps_alert - ids_dps_db
258
+ ids_dps_to_insert = ids_dps_alert - ids_dps_db - set(invalid_dp_ids)
146
259
  dps_to_insert = [dp for dp in dps if dp["id"] in ids_dps_to_insert]
147
260
  dps_to_combine = [
148
- dp for dp in dps + dps_db if dp["id"] in ids_dps_alert | ids_dps_db
261
+ dp
262
+ for dp in dps + dps_db
263
+ if dp["id"] in ((ids_dps_alert | ids_dps_db) - set(invalid_dp_ids))
149
264
  ]
150
265
  self.logger.debug(
151
266
  f"Got {len(ids_dps_alert)} datapoints from alerts, "
152
267
  f"found {len(dps_db)} in DB, "
268
+ f"{len(invalid_dp_ids)} invalid datapoints, "
153
269
  f"inserting {len(dps_to_insert)} datapoints, "
154
270
  f"combining {len(dps_to_combine)} datapoints"
155
271
  )
@@ -37,6 +37,7 @@ else:
37
37
  class T1HDBSCAN(AbsT1CombineUnit):
38
38
  input_mongo_db_name: str
39
39
  original_id_key: str
40
+ mongo: str = "mongodb://localhost:27017/"
40
41
  whitelist_region_arcsec: float = 1
41
42
  cluster_distance_arcsec: float = 0.5
42
43
 
@@ -57,7 +58,7 @@ class T1HDBSCAN(AbsT1CombineUnit):
57
58
 
58
59
  def __init__(self, **kwargs):
59
60
  super().__init__(**kwargs)
60
- self._col = MongoClient()[self.input_mongo_db_name]["input"]
61
+ self._col = MongoClient(self.mongo)[self.input_mongo_db_name]["input"]
61
62
  self._plotter = AuxUnitRegister.new_unit(
62
63
  model=self.plotter, sub_type=AuxDiagnosticPlotter
63
64
  )
@@ -17,7 +17,7 @@ from timewise.process import keys
17
17
 
18
18
  class TimewiseFilter(AbsAlertFilter):
19
19
  det_per_visit: int = 8
20
- n_visits = 10
20
+ n_visits: int = 10
21
21
 
22
22
  def process(self, alert: AmpelAlertProtocol) -> None | bool | int:
23
23
  columns = [
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [project]
6
6
  name = "timewise"
7
- version = "1.0.0a8"
7
+ version = "1.0.0a10"
8
8
  description = "Download WISE infrared data for many objects and process them with AMPEL"
9
9
  authors = [
10
10
  { name = "Jannis Necker", email = "jannis.necker@gmail.com" },
@@ -16,18 +16,18 @@ dependencies = [
16
16
  "tqdm>=4.64.0,<5.0.0",
17
17
  "requests>=2.28.1,<3.0.0",
18
18
  "pandas>=1.4.3,<3.0.0",
19
- "numpy>=1.23.2,<2.0.0",
19
+ "numpy>=1.23.2,<3.0.0",
20
20
  "pyvo>=1.7.0,<2.0.0",
21
21
  "astropy>=5.1,<8.0.0",
22
22
  "matplotlib>=3.5.3,<4.0.0",
23
- "scikit-image>=0.19.3,<0.22.0",
23
+ "scikit-image>=0.26.0,<0.27.0",
24
24
  "backoff>=2.1.2,<3.0.0",
25
25
  "virtualenv>=20.16.3,<21.0.0",
26
26
  "pydantic>=2.0.0,<3.0.0",
27
27
  "scikit-learn>=1.3.0,<2.0.0",
28
28
  "jupyterlab[jupyter]>=4.0.6,<5.0.0",
29
29
  "jupyter[jupyter]>=1.0.0,<2.0.0",
30
- "typer (>=0.19.2,<0.20.0)",
30
+ "typer (>=0.19.2,<0.30.0)",
31
31
  "urllib3 (>=2.5.0,<3.0.0)",
32
32
  ]
33
33
 
@@ -46,7 +46,7 @@ Homepage = "https://github.com/JannisNe/timewise"
46
46
 
47
47
  [project.optional-dependencies]
48
48
  dev = [
49
- "coveralls>=3.3.1,<4.0.0",
49
+ "coveralls>=4.0.0,<5.0.0",
50
50
  "pytest>=7.2.2,<8.0.0",
51
51
  "ruff>=0.13.0,<0.14.0",
52
52
  "mypy (>=1.18.2,<2.0.0)",
@@ -54,6 +54,7 @@ dev = [
54
54
  "scipy-stubs (>=1.16.2.0,<2.0.0.0)",
55
55
  "types-pyyaml (>=6.0.12.20250915,<7.0.0.0)",
56
56
  "types-requests (>=2.32.4.20250913,<3.0.0.0)",
57
+ "mongomock (>=4.3.0,<5.0.0)",
57
58
  ]
58
59
  docs = [
59
60
  "myst-parser>=1,<3",
@@ -63,8 +64,8 @@ docs = [
63
64
  ampel= [
64
65
  "ampel-photometry (==0.10.2a1)",
65
66
  "ampel-plot (>=0.9.1,<0.10.0)",
66
- "ampel-core (==0.10.6a17)",
67
- "ampel-alerts (==0.10.3a6)",
67
+ "ampel-core (==0.10.6a21)",
68
+ "ampel-alerts (==0.10.4a0)",
68
69
  "ampel-interface (==0.10.5a8)"
69
70
  ]
70
71
 
@@ -0,0 +1 @@
1
+ __version__ = "1.0.0a10"
@@ -20,6 +20,8 @@ class Backend(abc.ABC, BaseModel):
20
20
  def save_meta(self, task: TaskID, meta: dict[str, Any]) -> None: ...
21
21
  @abc.abstractmethod
22
22
  def load_meta(self, task: TaskID) -> dict[str, Any] | None: ...
23
+ @abc.abstractmethod
24
+ def drop_meta(self, task: TaskID) -> None: ...
23
25
 
24
26
  # --- Markers ---
25
27
  @abc.abstractmethod
@@ -52,6 +52,9 @@ class FileSystemBackend(Backend):
52
52
  def meta_exists(self, task: TaskID) -> bool:
53
53
  return self._meta_path(task).exists()
54
54
 
55
+ def drop_meta(self, task: TaskID) -> None:
56
+ self._meta_path(task).unlink()
57
+
55
58
  # ----------------------------
56
59
  # Markers
57
60
  # ----------------------------
@@ -11,9 +11,7 @@ logger = logging.getLogger(__name__)
11
11
 
12
12
 
13
13
  class Chunk:
14
- def __init__(
15
- self, chunk_id: int, input_csv, row_indices: npt.NDArray[np.int_]
16
- ):
14
+ def __init__(self, chunk_id: int, input_csv, row_indices: npt.NDArray[np.int_]):
17
15
  self.chunk_id = chunk_id
18
16
  self.row_numbers = row_indices
19
17
  self.input_csv = input_csv
@@ -71,4 +69,4 @@ class Chunker:
71
69
  start = chunk_id * self.chunk_size
72
70
  stop = min(start + self.chunk_size, self._n_rows)
73
71
  logger.debug(f"chunk {chunk_id}: from {start} to {stop}")
74
- return Chunk(chunk_id, self.input_csv, np.arange(start=start, stop=stop))
72
+ return Chunk(chunk_id, self.input_csv, np.arange(start, stop))
@@ -53,8 +53,16 @@ def main(
53
53
  @app.command(help="Download WISE photometry from IRSA")
54
54
  def download(
55
55
  config_path: config_path_type,
56
+ resubmit_failed: Annotated[
57
+ bool,
58
+ typer.Option(
59
+ help="Re-submit jobs when failed due to connection issues",
60
+ ),
61
+ ] = False,
56
62
  ):
57
- TimewiseConfig.from_yaml(config_path).download.build_downloader().run()
63
+ TimewiseConfig.from_yaml(config_path).download.build_downloader(
64
+ resubmit_failed=resubmit_failed
65
+ ).run()
58
66
 
59
67
 
60
68
  # the following commands will only be added if ampel is installed
@@ -17,6 +17,7 @@ class DownloadConfig(BaseModel):
17
17
  poll_interval: float = 10.0
18
18
  queries: List[QueryType] = Field(..., description="One or more queries per chunk")
19
19
  backend: BackendType = Field(..., discriminator="type")
20
+ resubmit_failed: bool = False
20
21
 
21
22
  service_url: str = "https://irsa.ipac.caltech.edu/TAP"
22
23
 
@@ -57,13 +58,16 @@ class DownloadConfig(BaseModel):
57
58
 
58
59
  return self
59
60
 
60
- def build_downloader(self) -> Downloader:
61
- return Downloader(
62
- service_url=self.service_url,
63
- input_csv=self.expanded_input_csv,
64
- chunk_size=self.chunk_size,
65
- backend=self.backend,
66
- queries=self.queries,
67
- max_concurrent_jobs=self.max_concurrent_jobs,
68
- poll_interval=self.poll_interval,
69
- )
61
+ def build_downloader(self, **overwrite) -> Downloader:
62
+ default = {
63
+ "service_url": self.service_url,
64
+ "input_csv": self.expanded_input_csv,
65
+ "chunk_size": self.chunk_size,
66
+ "backend": self.backend,
67
+ "queries": self.queries,
68
+ "max_concurrent_jobs": self.max_concurrent_jobs,
69
+ "poll_interval": self.poll_interval,
70
+ "resubmit_failed": self.resubmit_failed,
71
+ }
72
+ default.update(overwrite)
73
+ return Downloader(**default) # type: ignore
@@ -1,25 +1,22 @@
1
- import time
2
- import threading
3
1
  import logging
4
- from queue import Empty
5
- from typing import Dict, Iterator, cast, Sequence
2
+ import threading
3
+ import time
4
+ from datetime import datetime, timedelta
6
5
  from itertools import product
7
6
  from pathlib import Path
8
- from datetime import datetime, timedelta
7
+ from queue import Empty
8
+ from typing import Dict, Iterator
9
9
 
10
- import pandas as pd
11
10
  import numpy as np
12
11
  from astropy.table import Table
13
12
  from pyvo.utils.http import create_session
14
13
 
15
- from .stable_tap import StableTAPService
16
14
  from ..backend import BackendType
17
- from ..types import TAPJobMeta, TaskID, TYPE_MAP
15
+ from ..chunking import Chunk, Chunker
18
16
  from ..query import QueryType
19
- from ..query.base import Query
17
+ from ..types import TYPE_MAP, TAPJobMeta, TaskID
20
18
  from ..util.error_threading import ErrorQueue, ExceptionSafeThread
21
- from ..chunking import Chunker, Chunk
22
-
19
+ from .stable_tap import StableTAPService
23
20
 
24
21
  logger = logging.getLogger(__name__)
25
22
 
@@ -34,6 +31,7 @@ class Downloader:
34
31
  queries: list[QueryType],
35
32
  max_concurrent_jobs: int,
36
33
  poll_interval: float,
34
+ resubmit_failed: bool,
37
35
  ):
38
36
  self.backend = backend
39
37
  self.queries = queries
@@ -67,6 +65,7 @@ class Downloader:
67
65
  self.service: StableTAPService = StableTAPService(
68
66
  service_url, session=self.session
69
67
  )
68
+ self.resubmit_failed = resubmit_failed
70
69
 
71
70
  self.chunker = Chunker(input_csv=input_csv, chunk_size=chunk_size)
72
71
 
@@ -74,7 +73,7 @@ class Downloader:
74
73
  # helpers
75
74
  # ----------------------------
76
75
  @staticmethod
77
- def get_task_id(chunk: Chunk, query: Query) -> TaskID:
76
+ def get_task_id(chunk: Chunk, query: QueryType) -> TaskID:
78
77
  return TaskID(
79
78
  namespace="download", key=f"chunk{chunk.chunk_id:04d}_{query.hash}"
80
79
  )
@@ -107,7 +106,7 @@ class Downloader:
107
106
  # TAP submission and download
108
107
  # ----------------------------
109
108
 
110
- def submit_tap_job(self, query: Query, chunk: Chunk) -> TAPJobMeta:
109
+ def submit_tap_job(self, query: QueryType, chunk: Chunk) -> TAPJobMeta:
111
110
  adql = query.adql
112
111
  chunk_df = chunk.data
113
112
 
@@ -133,7 +132,6 @@ class Downloader:
133
132
  logger.debug(f"uploading {len(upload)} objects.")
134
133
  job = self.service.submit_job(adql, uploads={query.upload_name: upload})
135
134
  job.run()
136
- logger.debug(job.url)
137
135
 
138
136
  return TAPJobMeta(
139
137
  url=job.url,
@@ -163,7 +161,7 @@ class Downloader:
163
161
  def _submission_worker(self):
164
162
  while not self.stop_event.is_set():
165
163
  try:
166
- chunk, query = self.submit_queue.get(timeout=1.0) # type: Chunk, Query
164
+ chunk, query = self.submit_queue.get(timeout=1.0) # type: Chunk, QueryType
167
165
  except Empty:
168
166
  if self.all_chunks_queued:
169
167
  self.all_chunks_submitted = True
@@ -194,6 +192,26 @@ class Downloader:
194
192
  # ----------------------------
195
193
  # Polling thread
196
194
  # ----------------------------
195
+
196
+ def resubmit(self, resubmit_task: TaskID):
197
+ logger.info(f"resubmitting {resubmit_task}")
198
+ submit = None
199
+ for chunk, q in product(self.chunker, self.queries):
200
+ task = self.get_task_id(chunk, q)
201
+ if task == resubmit_task:
202
+ submit = chunk, q
203
+ break
204
+ if submit is None:
205
+ raise RuntimeError(f"resubmit task {resubmit_task} not found!")
206
+
207
+ # remove current info, so the job won't be re-submitted over and over again
208
+ self.backend.drop_meta(resubmit_task)
209
+ with self.job_lock:
210
+ self.jobs.pop(resubmit_task)
211
+
212
+ # put task back in resubmit queue
213
+ self.submit_queue.put(submit)
214
+
197
215
  def _polling_worker(self):
198
216
  logger.debug("starting polling worker")
199
217
  backend = self.backend
@@ -225,6 +243,9 @@ class Downloader:
225
243
  f"No job found under {meta['url']} for {task}! "
226
244
  f"Probably took too long before downloading results."
227
245
  )
246
+ if self.resubmit_failed:
247
+ self.resubmit(task)
248
+ continue
228
249
 
229
250
  meta["status"] = status
230
251
  with self.job_lock:
@@ -5,9 +5,6 @@ from xml.etree import ElementTree
5
5
 
6
6
  import requests
7
7
 
8
- from timewise.util.backoff import backoff_hndlr
9
-
10
-
11
8
  logger = logging.getLogger(__name__)
12
9
 
13
10
 
@@ -26,7 +23,6 @@ class StableAsyncTAPJob(vo.dal.AsyncTAPJob):
26
23
  backoff.expo,
27
24
  requests.exceptions.HTTPError,
28
25
  max_tries=5,
29
- on_backoff=backoff_hndlr,
30
26
  )
31
27
  def create(
32
28
  cls,
@@ -92,7 +88,6 @@ class StableAsyncTAPJob(vo.dal.AsyncTAPJob):
92
88
  backoff.expo,
93
89
  (vo.dal.DALServiceError, AttributeError),
94
90
  max_tries=50,
95
- on_backoff=backoff_hndlr,
96
91
  )
97
92
  def phase(self):
98
93
  return super(StableAsyncTAPJob, self).phase
@@ -100,11 +95,32 @@ class StableAsyncTAPJob(vo.dal.AsyncTAPJob):
100
95
  @backoff.on_exception(
101
96
  backoff.expo,
102
97
  vo.dal.DALServiceError,
103
- max_tries=50,
104
- on_backoff=backoff_hndlr,
98
+ max_tries=5,
105
99
  )
106
- def _update(self, *args, **kwargs):
107
- return super(StableAsyncTAPJob, self)._update(*args, **kwargs)
100
+ def _update(self, wait_for_statechange=False, timeout=60.0):
101
+ n_tries = 0
102
+ max_tries = 10
103
+ while n_tries < max_tries:
104
+ try:
105
+ res = super(StableAsyncTAPJob, self)._update(
106
+ wait_for_statechange=wait_for_statechange,
107
+ timeout=timeout * (1 + n_tries),
108
+ )
109
+ except vo.dal.DALServiceError as e:
110
+ if "Read timed out" in str(e):
111
+ logger.debug(
112
+ f"{self.url} timed out after {timeout * (1 + n_tries):.0f}s"
113
+ )
114
+ n_tries += 1
115
+ continue
116
+ else:
117
+ raise e
118
+
119
+ return res
120
+
121
+ raise vo.dal.DALServiceError(
122
+ f"No success after {max_tries} tries for {self.url}!"
123
+ )
108
124
 
109
125
 
110
126
  class StableTAPService(vo.dal.TAPService):
@@ -116,7 +132,6 @@ class StableTAPService(vo.dal.TAPService):
116
132
  backoff.expo,
117
133
  (vo.dal.DALServiceError, AttributeError, AssertionError),
118
134
  max_tries=5,
119
- on_backoff=backoff_hndlr,
120
135
  )
121
136
  def submit_job(
122
137
  self, query, *, language="ADQL", maxrec=None, uploads=None, **keywords
@@ -136,3 +151,15 @@ class StableTAPService(vo.dal.TAPService):
136
151
 
137
152
  def get_job_from_url(self, url):
138
153
  return StableAsyncTAPJob(url, session=self._session)
154
+
155
+ @backoff.on_exception(
156
+ backoff.expo,
157
+ (vo.dal.DALServiceError, vo.dal.DALFormatError),
158
+ max_tries=5,
159
+ )
160
+ def run_sync(
161
+ self, query, *, language="ADQL", maxrec=None, uploads=None, **keywords
162
+ ):
163
+ return super().run_sync(
164
+ query, language=language, maxrec=maxrec, uploads=uploads, **keywords
165
+ )
@@ -5,8 +5,6 @@ import logging
5
5
  import matplotlib.pyplot as plt
6
6
  import backoff
7
7
 
8
- from ..util.backoff import backoff_hndlr
9
-
10
8
 
11
9
  logger = logging.getLogger(__name__)
12
10
 
@@ -34,7 +32,7 @@ def login_to_sciserver():
34
32
 
35
33
 
36
34
  @backoff.on_exception(
37
- backoff.expo, requests.RequestException, max_tries=50, on_backoff=backoff_hndlr
35
+ backoff.expo, requests.RequestException, max_tries=50
38
36
  )
39
37
  def get_cutout(*args, **kwargs):
40
38
  login_to_sciserver()
@@ -10,6 +10,8 @@ from pymongo import MongoClient, ASCENDING
10
10
  from pymongo.collection import Collection
11
11
  from pymongo.database import Database
12
12
 
13
+ from ..util.path import expand
14
+
13
15
  if find_spec("ampel.core"):
14
16
  AMPEL_EXISTS = True
15
17
  from ampel.cli.JobCommand import JobCommand
@@ -40,6 +42,10 @@ class AmpelInterface:
40
42
  self.template_path = Path(template_path)
41
43
  self.uri = uri
42
44
 
45
+ @property
46
+ def expanded_input_csv(self) -> Path:
47
+ return expand(self.input_csv)
48
+
43
49
  def import_input(self):
44
50
  # if collection already exists, assume import was already done
45
51
  if "input" in self.client[self.input_mongo_db_name].list_collection_names():
@@ -48,12 +54,12 @@ class AmpelInterface:
48
54
  )
49
55
  return
50
56
 
51
- logger.debug(f"importing {self.input_csv} into {self.input_mongo_db_name}")
57
+ logger.debug(f"importing {self.expanded_input_csv} into {self.input_mongo_db_name}")
52
58
  col = self.client[self.input_mongo_db_name]["input"]
53
59
 
54
60
  # create an index from stock id
55
61
  col.create_index([(self.orig_id_key, ASCENDING)], unique=True)
56
- col.insert_many(pd.read_csv(self.input_csv).to_dict(orient="records"))
62
+ col.insert_many(pd.read_csv(self.expanded_input_csv).to_dict(orient="records"))
57
63
 
58
64
  def make_ampel_job_file(self, cfg_path: Path) -> Path:
59
65
  logger.debug(f"loading ampel job template from {self.template_path}")
@@ -146,7 +146,7 @@ def calculate_epochs(
146
146
  bias_correction_function = CORRECTION_FUNCTIONS[correction_name]
147
147
 
148
148
  one_points_mask = None
149
- visits_at_least_two_point = []
149
+ visits_at_least_two_point: npt.NDArray[np.generic] = np.array([])
150
150
 
151
151
  while n_remaining_outlier > 0:
152
152
  # make a mask of values to use
@@ -0,0 +1,11 @@
1
+ from typing import Annotated, TypeAlias, Union
2
+
3
+ from pydantic import Field
4
+
5
+ from .by_allwise_cntr_and_position import AllWISECntrQuery
6
+ from .positional import PositionalQuery
7
+
8
+ # Discriminated union of all query types
9
+ QueryType: TypeAlias = Annotated[
10
+ Union[PositionalQuery, AllWISECntrQuery], Field(discriminator="type")
11
+ ]
@@ -0,0 +1,49 @@
1
+ import logging
2
+ from typing import Dict, Literal
3
+
4
+ from .base import Query
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+
9
+ class AllWISECntrQuery(Query):
10
+ type: Literal["by_allwise_cntr_and_position"] = "by_allwise_cntr_and_position"
11
+ radius_arcsec: float
12
+
13
+ @property
14
+ def input_columns(self) -> Dict[str, str]:
15
+ return {
16
+ "allwise_cntr": "int",
17
+ "ra": "float",
18
+ "dec": "float",
19
+ self.original_id_key: "int",
20
+ }
21
+
22
+ def build(self) -> str:
23
+ logger.debug(f"constructing query by AllWISE cntr for {self.table.name}")
24
+
25
+ q = "SELECT \n\t"
26
+ for k in self.columns:
27
+ q += f"{self.table.name}.{k}, "
28
+ q += f"\n\tmine.{self.original_id_key} \n"
29
+ q += f"FROM\n\tTAP_UPLOAD.{self.upload_name} AS mine \n"
30
+ q += f"RIGHT JOIN\n\t{self.table.name} \n"
31
+ q += "WHERE \n"
32
+ q += (
33
+ f"\tCONTAINS(POINT('J2000',{self.table.name}.{self.table.ra_column},{self.table.name}.{self.table.dec_column}),"
34
+ f"CIRCLE('J2000',mine.ra,mine.dec,{self.radius_arcsec / 3600:.18f}))=1 "
35
+ )
36
+
37
+ constraints = self.constraints + [
38
+ f"{self.table.allwise_cntr_column} = {self.upload_name}.allwise_cntr"
39
+ ]
40
+
41
+ if len(constraints) > 0:
42
+ q += " AND (\n"
43
+ for c in constraints:
44
+ q += f"\t{self.table.name}.{c} AND \n"
45
+ q = q.strip(" AND \n")
46
+ q += "\t)"
47
+
48
+ logger.debug(f"\n{q}")
49
+ return q
@@ -36,5 +36,4 @@ class PositionalQuery(Query):
36
36
  q = q.strip(" AND \n")
37
37
  q += "\t)"
38
38
 
39
- logger.debug(f"\n{q}")
40
39
  return q
@@ -0,0 +1,11 @@
1
+ from typing import Annotated, Union
2
+
3
+ from pydantic import Field
4
+
5
+ from .allwise_p3as_mep import allwise_p3as_mep
6
+ from .allwise_p3as_psd import allwise_p3as_psd
7
+ from .neowiser_p1bs_psd import neowiser_p1bs_psd
8
+
9
+ TableType = Annotated[
10
+ Union[allwise_p3as_mep, neowiser_p1bs_psd, allwise_p3as_psd], Field(discriminator="name")
11
+ ]
@@ -1,4 +1,5 @@
1
- from typing import Literal, ClassVar, Type, Dict
1
+ from typing import ClassVar, Dict, Literal, Type
2
+
2
3
  from .base import TableConfig
3
4
 
4
5
 
@@ -20,3 +21,4 @@ class allwise_p3as_mep(TableConfig):
20
21
  }
21
22
  ra_column: ClassVar[str] = "ra"
22
23
  dec_column: ClassVar[str] = "dec"
24
+ allwise_cntr_column: ClassVar[str] = "cntr_mf"
@@ -0,0 +1,24 @@
1
+ from typing import ClassVar, Dict, Literal, Type
2
+
3
+ from .base import TableConfig
4
+
5
+
6
+ class allwise_p3as_psd(TableConfig):
7
+ name: Literal["allwise_p3as_psd"] = "allwise_p3as_psd"
8
+ columns_dtypes: ClassVar[Dict[str, Type]] = {
9
+ "ra": float,
10
+ "dec": float,
11
+ "mjd": float,
12
+ "cntr": str,
13
+ "w1mpro": float,
14
+ "w1sigmpro": float,
15
+ "w2mpro": float,
16
+ "w2sigmpro": float,
17
+ "w1flux": float,
18
+ "w1sigflux": float,
19
+ "w2flux": float,
20
+ "w2sigflux": float,
21
+ }
22
+ ra_column: ClassVar[str] = "ra"
23
+ dec_column: ClassVar[str] = "dec"
24
+ allwise_cntr_column: ClassVar[str] = "cntr"
@@ -1,4 +1,5 @@
1
- from typing import Literal, ClassVar, Dict, Type
1
+ from typing import ClassVar, Dict, Literal, Type
2
+
2
3
  from .base import TableConfig
3
4
 
4
5
 
@@ -20,3 +21,4 @@ class neowiser_p1bs_psd(TableConfig):
20
21
  }
21
22
  ra_column: ClassVar[str] = "ra"
22
23
  dec_column: ClassVar[str] = "dec"
24
+ allwise_cntr_column: ClassVar[str] = "allwise_cntr"
@@ -1 +0,0 @@
1
- __version__ = "1.0.0a8"
@@ -1,6 +0,0 @@
1
- from pydantic import Field
2
- from typing import Union, Annotated, TypeAlias
3
- from .positional import PositionalQuery
4
-
5
- # Discriminated union of all query types
6
- QueryType: TypeAlias = Annotated[Union[PositionalQuery], Field(discriminator="type")]
@@ -1,10 +0,0 @@
1
- from pydantic import Field
2
- from typing import Union, Annotated
3
-
4
- from .allwise_p3as_mep import allwise_p3as_mep
5
- from .neowiser_p1bs_psd import neowiser_p1bs_psd
6
-
7
-
8
- TableType = Annotated[
9
- Union[allwise_p3as_mep, neowiser_p1bs_psd], Field(discriminator="name")
10
- ]
@@ -1,12 +0,0 @@
1
- import logging
2
-
3
-
4
- logger = logging.getLogger(__name__)
5
-
6
-
7
- def backoff_hndlr(details):
8
- logger.info(
9
- "Backing off {wait:0.1f} seconds after {tries} tries "
10
- "calling function {target} with args {args} and kwargs "
11
- "{kwargs}".format(**details)
12
- )
File without changes