timewise 1.0.0a9__tar.gz → 1.0.0a10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. {timewise-1.0.0a9 → timewise-1.0.0a10}/PKG-INFO +13 -11
  2. {timewise-1.0.0a9 → timewise-1.0.0a10}/README.md +6 -4
  3. {timewise-1.0.0a9 → timewise-1.0.0a10}/ampel/timewise/alert/TimewiseAlertSupplier.py +2 -2
  4. {timewise-1.0.0a9 → timewise-1.0.0a10}/ampel/timewise/t1/T1HDBSCAN.py +2 -1
  5. {timewise-1.0.0a9 → timewise-1.0.0a10}/ampel/timewise/t1/TimewiseFilter.py +1 -1
  6. {timewise-1.0.0a9 → timewise-1.0.0a10}/pyproject.toml +7 -7
  7. timewise-1.0.0a10/timewise/__init__.py +1 -0
  8. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/chunking.py +2 -4
  9. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/io/stable_tap.py +30 -6
  10. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/process/interface.py +8 -2
  11. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/process/stacking.py +1 -1
  12. timewise-1.0.0a9/timewise/__init__.py +0 -1
  13. {timewise-1.0.0a9 → timewise-1.0.0a10}/LICENSE +0 -0
  14. {timewise-1.0.0a9 → timewise-1.0.0a10}/ampel/timewise/alert/load/TimewiseFileLoader.py +0 -0
  15. {timewise-1.0.0a9 → timewise-1.0.0a10}/ampel/timewise/ingest/TiCompilerOptions.py +0 -0
  16. {timewise-1.0.0a9 → timewise-1.0.0a10}/ampel/timewise/ingest/TiDataPointShaper.py +0 -0
  17. {timewise-1.0.0a9 → timewise-1.0.0a10}/ampel/timewise/ingest/TiMongoMuxer.py +0 -0
  18. {timewise-1.0.0a9 → timewise-1.0.0a10}/ampel/timewise/ingest/tags.py +0 -0
  19. {timewise-1.0.0a9 → timewise-1.0.0a10}/ampel/timewise/t2/T2StackVisits.py +0 -0
  20. {timewise-1.0.0a9 → timewise-1.0.0a10}/ampel/timewise/util/AuxDiagnosticPlotter.py +0 -0
  21. {timewise-1.0.0a9 → timewise-1.0.0a10}/ampel/timewise/util/pdutil.py +0 -0
  22. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/backend/__init__.py +0 -0
  23. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/backend/base.py +0 -0
  24. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/backend/filesystem.py +0 -0
  25. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/cli.py +0 -0
  26. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/config.py +0 -0
  27. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/io/__init__.py +0 -0
  28. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/io/config.py +0 -0
  29. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/io/download.py +0 -0
  30. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/plot/__init__.py +0 -0
  31. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/plot/diagnostic.py +0 -0
  32. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/plot/lightcurve.py +0 -0
  33. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/plot/panstarrs.py +0 -0
  34. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/plot/sdss.py +0 -0
  35. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/process/__init__.py +0 -0
  36. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/process/config.py +0 -0
  37. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/process/keys.py +0 -0
  38. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/process/template.yml +0 -0
  39. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/query/__init__.py +0 -0
  40. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/query/base.py +0 -0
  41. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/query/by_allwise_cntr_and_position.py +0 -0
  42. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/query/positional.py +0 -0
  43. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/tables/__init__.py +0 -0
  44. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/tables/allwise_p3as_mep.py +0 -0
  45. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/tables/allwise_p3as_psd.py +0 -0
  46. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/tables/base.py +0 -0
  47. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/tables/neowiser_p1bs_psd.py +0 -0
  48. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/types.py +0 -0
  49. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/util/csv_utils.py +0 -0
  50. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/util/error_threading.py +0 -0
  51. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/util/path.py +0 -0
  52. {timewise-1.0.0a9 → timewise-1.0.0a10}/timewise/util/visits.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: timewise
3
- Version: 1.0.0a9
3
+ Version: 1.0.0a10
4
4
  Summary: Download WISE infrared data for many objects and process them with AMPEL
5
5
  License: MIT
6
6
  License-File: LICENSE
@@ -15,22 +15,22 @@ Classifier: Programming Language :: Python :: 3.13
15
15
  Provides-Extra: ampel
16
16
  Provides-Extra: dev
17
17
  Provides-Extra: docs
18
- Requires-Dist: ampel-alerts (==0.10.3a6) ; extra == "ampel"
19
- Requires-Dist: ampel-core (==0.10.6a17) ; extra == "ampel"
18
+ Requires-Dist: ampel-alerts (==0.10.4a0) ; extra == "ampel"
19
+ Requires-Dist: ampel-core (==0.10.6a21) ; extra == "ampel"
20
20
  Requires-Dist: ampel-interface (==0.10.5a8) ; extra == "ampel"
21
21
  Requires-Dist: ampel-photometry (==0.10.2a1) ; extra == "ampel"
22
22
  Requires-Dist: ampel-plot (>=0.9.1,<0.10.0) ; extra == "ampel"
23
23
  Requires-Dist: astropy (>=5.1,<8.0.0)
24
24
  Requires-Dist: autodoc_pydantic[erdantic] (>=2.2.0,<3.0.0) ; extra == "docs"
25
25
  Requires-Dist: backoff (>=2.1.2,<3.0.0)
26
- Requires-Dist: coveralls (>=3.3.1,<4.0.0) ; extra == "dev"
26
+ Requires-Dist: coveralls (>=4.0.0,<5.0.0) ; extra == "dev"
27
27
  Requires-Dist: jupyter[jupyter] (>=1.0.0,<2.0.0)
28
28
  Requires-Dist: jupyterlab[jupyter] (>=4.0.6,<5.0.0)
29
29
  Requires-Dist: matplotlib (>=3.5.3,<4.0.0)
30
30
  Requires-Dist: mongomock (>=4.3.0,<5.0.0) ; extra == "dev"
31
31
  Requires-Dist: mypy (>=1.18.2,<2.0.0) ; extra == "dev"
32
32
  Requires-Dist: myst-parser (>=1,<3) ; extra == "docs"
33
- Requires-Dist: numpy (>=1.23.2,<2.0.0)
33
+ Requires-Dist: numpy (>=1.23.2,<3.0.0)
34
34
  Requires-Dist: pandas (>=1.4.3,<3.0.0)
35
35
  Requires-Dist: pandas-stubs (>=2.3.2.250926,<3.0.0.0) ; extra == "dev"
36
36
  Requires-Dist: pydantic (>=2.0.0,<3.0.0)
@@ -38,12 +38,12 @@ Requires-Dist: pytest (>=7.2.2,<8.0.0) ; extra == "dev"
38
38
  Requires-Dist: pyvo (>=1.7.0,<2.0.0)
39
39
  Requires-Dist: requests (>=2.28.1,<3.0.0)
40
40
  Requires-Dist: ruff (>=0.13.0,<0.14.0) ; extra == "dev"
41
- Requires-Dist: scikit-image (>=0.19.3,<0.22.0)
41
+ Requires-Dist: scikit-image (>=0.26.0,<0.27.0)
42
42
  Requires-Dist: scikit-learn (>=1.3.0,<2.0.0)
43
43
  Requires-Dist: scipy-stubs (>=1.16.2.0,<2.0.0.0) ; extra == "dev"
44
44
  Requires-Dist: sphinx-rtd-theme (>=1.3.0,<2.0.0) ; extra == "docs"
45
45
  Requires-Dist: tqdm (>=4.64.0,<5.0.0)
46
- Requires-Dist: typer (>=0.19.2,<0.20.0)
46
+ Requires-Dist: typer (>=0.19.2,<0.30.0)
47
47
  Requires-Dist: types-pyyaml (>=6.0.12.20250915,<7.0.0.0) ; extra == "dev"
48
48
  Requires-Dist: types-requests (>=2.32.4.20250913,<3.0.0.0) ; extra == "dev"
49
49
  Requires-Dist: urllib3 (>=2.5.0,<3.0.0)
@@ -61,29 +61,31 @@ Description-Content-Type: text/markdown
61
61
  ![](timewise.png)
62
62
  # Infrared light curves from WISE data
63
63
 
64
- This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit
64
+ This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit. It is designed to do so for efficiently for large samples of millions of objects.
65
65
 
66
66
  ## Prerequisites
67
67
  Python version 3.11, 3.12 or 3.13.
68
68
 
69
69
  If you want to not only download individual exposure photometry but also stack detections per visit (see below),
70
- you must have access to a running [MongoDB](https://www.mongodb.com/)*.
70
+ you must have access to a running [MongoDB](https://www.mongodb.com/)* **.
71
71
 
72
72
  <sub>* On MacOS have alook at the custom `brew` tap
73
73
  [here](https://github.com/mongodb/homebrew-brew)
74
74
  to get the MongoDB community edition. </sub>
75
75
 
76
+ <sub>** On some systems this is not straight forward to set up. `timewise` requires it nevertheless as an integral part of the AMPEL system which is used to efficiently schedule and store the stacking of lightcurves. If you do not foresee a big overhead in calculating lightcurves for a sample of O(1000) objects, a more lightweight package might be more applicable. </sub>
77
+
76
78
  ## Installation
77
79
 
78
80
  ### If you use timewise only for downloading
79
81
  The package can be installed via `pip` (but make sure to install the v1 pre-release):
80
82
  ```bash
81
- pip install --pre timewise==1.0.0a9
83
+ pip install --pre timewise==1.0.0a10
82
84
  ```
83
85
  ### If you use timewise also for stacking individual exposures
84
86
  You must install with the `ampel` extra:
85
87
  ```bash
86
- pip install --pre 'timewise[ampel]==1.0.0a9'
88
+ pip install --pre 'timewise[ampel]==1.0.0a10'
87
89
  ```
88
90
  To tell AMPEL which modules, aka units, to use, build the corresponding configuration file:
89
91
  ```bash
@@ -7,29 +7,31 @@
7
7
  ![](timewise.png)
8
8
  # Infrared light curves from WISE data
9
9
 
10
- This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit
10
+ This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit. It is designed to do so for efficiently for large samples of millions of objects.
11
11
 
12
12
  ## Prerequisites
13
13
  Python version 3.11, 3.12 or 3.13.
14
14
 
15
15
  If you want to not only download individual exposure photometry but also stack detections per visit (see below),
16
- you must have access to a running [MongoDB](https://www.mongodb.com/)*.
16
+ you must have access to a running [MongoDB](https://www.mongodb.com/)* **.
17
17
 
18
18
  <sub>* On MacOS have alook at the custom `brew` tap
19
19
  [here](https://github.com/mongodb/homebrew-brew)
20
20
  to get the MongoDB community edition. </sub>
21
21
 
22
+ <sub>** On some systems this is not straight forward to set up. `timewise` requires it nevertheless as an integral part of the AMPEL system which is used to efficiently schedule and store the stacking of lightcurves. If you do not foresee a big overhead in calculating lightcurves for a sample of O(1000) objects, a more lightweight package might be more applicable. </sub>
23
+
22
24
  ## Installation
23
25
 
24
26
  ### If you use timewise only for downloading
25
27
  The package can be installed via `pip` (but make sure to install the v1 pre-release):
26
28
  ```bash
27
- pip install --pre timewise==1.0.0a9
29
+ pip install --pre timewise==1.0.0a10
28
30
  ```
29
31
  ### If you use timewise also for stacking individual exposures
30
32
  You must install with the `ampel` extra:
31
33
  ```bash
32
- pip install --pre 'timewise[ampel]==1.0.0a9'
34
+ pip install --pre 'timewise[ampel]==1.0.0a10'
33
35
  ```
34
36
  To tell AMPEL which modules, aka units, to use, build the corresponding configuration file:
35
37
  ```bash
@@ -9,7 +9,7 @@
9
9
 
10
10
  import sys
11
11
  from hashlib import blake2b
12
- from typing import Literal, List
12
+ from typing import Literal, List, Dict, Any
13
13
 
14
14
  import pandas as pd
15
15
 
@@ -88,7 +88,7 @@ class TimewiseAlertSupplier(BaseAlertSupplier, AmpelABC):
88
88
  for i, row in table.iterrows():
89
89
  # convert table row to dict, convert data types from numpy to native python
90
90
  # Respect masked fields and convert to None
91
- pp = {k: None if pd.isna(v) else v for k, v in row.to_dict().items()}
91
+ pp = {str(k): None if pd.isna(v) else v for k, v in row.to_dict().items()}
92
92
  pp_hash = blake2b(encode(pp), digest_size=7).digest()
93
93
  if self.counter:
94
94
  pp["candid"] = self.counter
@@ -37,6 +37,7 @@ else:
37
37
  class T1HDBSCAN(AbsT1CombineUnit):
38
38
  input_mongo_db_name: str
39
39
  original_id_key: str
40
+ mongo: str = "mongodb://localhost:27017/"
40
41
  whitelist_region_arcsec: float = 1
41
42
  cluster_distance_arcsec: float = 0.5
42
43
 
@@ -57,7 +58,7 @@ class T1HDBSCAN(AbsT1CombineUnit):
57
58
 
58
59
  def __init__(self, **kwargs):
59
60
  super().__init__(**kwargs)
60
- self._col = MongoClient()[self.input_mongo_db_name]["input"]
61
+ self._col = MongoClient(self.mongo)[self.input_mongo_db_name]["input"]
61
62
  self._plotter = AuxUnitRegister.new_unit(
62
63
  model=self.plotter, sub_type=AuxDiagnosticPlotter
63
64
  )
@@ -17,7 +17,7 @@ from timewise.process import keys
17
17
 
18
18
  class TimewiseFilter(AbsAlertFilter):
19
19
  det_per_visit: int = 8
20
- n_visits = 10
20
+ n_visits: int = 10
21
21
 
22
22
  def process(self, alert: AmpelAlertProtocol) -> None | bool | int:
23
23
  columns = [
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [project]
6
6
  name = "timewise"
7
- version = "1.0.0a9"
7
+ version = "1.0.0a10"
8
8
  description = "Download WISE infrared data for many objects and process them with AMPEL"
9
9
  authors = [
10
10
  { name = "Jannis Necker", email = "jannis.necker@gmail.com" },
@@ -16,18 +16,18 @@ dependencies = [
16
16
  "tqdm>=4.64.0,<5.0.0",
17
17
  "requests>=2.28.1,<3.0.0",
18
18
  "pandas>=1.4.3,<3.0.0",
19
- "numpy>=1.23.2,<2.0.0",
19
+ "numpy>=1.23.2,<3.0.0",
20
20
  "pyvo>=1.7.0,<2.0.0",
21
21
  "astropy>=5.1,<8.0.0",
22
22
  "matplotlib>=3.5.3,<4.0.0",
23
- "scikit-image>=0.19.3,<0.22.0",
23
+ "scikit-image>=0.26.0,<0.27.0",
24
24
  "backoff>=2.1.2,<3.0.0",
25
25
  "virtualenv>=20.16.3,<21.0.0",
26
26
  "pydantic>=2.0.0,<3.0.0",
27
27
  "scikit-learn>=1.3.0,<2.0.0",
28
28
  "jupyterlab[jupyter]>=4.0.6,<5.0.0",
29
29
  "jupyter[jupyter]>=1.0.0,<2.0.0",
30
- "typer (>=0.19.2,<0.20.0)",
30
+ "typer (>=0.19.2,<0.30.0)",
31
31
  "urllib3 (>=2.5.0,<3.0.0)",
32
32
  ]
33
33
 
@@ -46,7 +46,7 @@ Homepage = "https://github.com/JannisNe/timewise"
46
46
 
47
47
  [project.optional-dependencies]
48
48
  dev = [
49
- "coveralls>=3.3.1,<4.0.0",
49
+ "coveralls>=4.0.0,<5.0.0",
50
50
  "pytest>=7.2.2,<8.0.0",
51
51
  "ruff>=0.13.0,<0.14.0",
52
52
  "mypy (>=1.18.2,<2.0.0)",
@@ -64,8 +64,8 @@ docs = [
64
64
  ampel= [
65
65
  "ampel-photometry (==0.10.2a1)",
66
66
  "ampel-plot (>=0.9.1,<0.10.0)",
67
- "ampel-core (==0.10.6a17)",
68
- "ampel-alerts (==0.10.3a6)",
67
+ "ampel-core (==0.10.6a21)",
68
+ "ampel-alerts (==0.10.4a0)",
69
69
  "ampel-interface (==0.10.5a8)"
70
70
  ]
71
71
 
@@ -0,0 +1 @@
1
+ __version__ = "1.0.0a10"
@@ -11,9 +11,7 @@ logger = logging.getLogger(__name__)
11
11
 
12
12
 
13
13
  class Chunk:
14
- def __init__(
15
- self, chunk_id: int, input_csv, row_indices: npt.NDArray[np.int_]
16
- ):
14
+ def __init__(self, chunk_id: int, input_csv, row_indices: npt.NDArray[np.int_]):
17
15
  self.chunk_id = chunk_id
18
16
  self.row_numbers = row_indices
19
17
  self.input_csv = input_csv
@@ -71,4 +69,4 @@ class Chunker:
71
69
  start = chunk_id * self.chunk_size
72
70
  stop = min(start + self.chunk_size, self._n_rows)
73
71
  logger.debug(f"chunk {chunk_id}: from {start} to {stop}")
74
- return Chunk(chunk_id, self.input_csv, np.arange(start=start, stop=stop))
72
+ return Chunk(chunk_id, self.input_csv, np.arange(start, stop))
@@ -95,10 +95,32 @@ class StableAsyncTAPJob(vo.dal.AsyncTAPJob):
95
95
  @backoff.on_exception(
96
96
  backoff.expo,
97
97
  vo.dal.DALServiceError,
98
- max_tries=50,
98
+ max_tries=5,
99
99
  )
100
- def _update(self, *args, **kwargs):
101
- return super(StableAsyncTAPJob, self)._update(*args, **kwargs)
100
+ def _update(self, wait_for_statechange=False, timeout=60.0):
101
+ n_tries = 0
102
+ max_tries = 10
103
+ while n_tries < max_tries:
104
+ try:
105
+ res = super(StableAsyncTAPJob, self)._update(
106
+ wait_for_statechange=wait_for_statechange,
107
+ timeout=timeout * (1 + n_tries),
108
+ )
109
+ except vo.dal.DALServiceError as e:
110
+ if "Read timed out" in str(e):
111
+ logger.debug(
112
+ f"{self.url} timed out after {timeout * (1 + n_tries):.0f}s"
113
+ )
114
+ n_tries += 1
115
+ continue
116
+ else:
117
+ raise e
118
+
119
+ return res
120
+
121
+ raise vo.dal.DALServiceError(
122
+ f"No success after {max_tries} tries for {self.url}!"
123
+ )
102
124
 
103
125
 
104
126
  class StableTAPService(vo.dal.TAPService):
@@ -136,6 +158,8 @@ class StableTAPService(vo.dal.TAPService):
136
158
  max_tries=5,
137
159
  )
138
160
  def run_sync(
139
- self, query, *, language="ADQL", maxrec=None, uploads=None,
140
- **keywords):
141
- return super().run_sync(query, language=language, maxrec=maxrec, uploads=uploads, **keywords)
161
+ self, query, *, language="ADQL", maxrec=None, uploads=None, **keywords
162
+ ):
163
+ return super().run_sync(
164
+ query, language=language, maxrec=maxrec, uploads=uploads, **keywords
165
+ )
@@ -10,6 +10,8 @@ from pymongo import MongoClient, ASCENDING
10
10
  from pymongo.collection import Collection
11
11
  from pymongo.database import Database
12
12
 
13
+ from ..util.path import expand
14
+
13
15
  if find_spec("ampel.core"):
14
16
  AMPEL_EXISTS = True
15
17
  from ampel.cli.JobCommand import JobCommand
@@ -40,6 +42,10 @@ class AmpelInterface:
40
42
  self.template_path = Path(template_path)
41
43
  self.uri = uri
42
44
 
45
+ @property
46
+ def expanded_input_csv(self) -> Path:
47
+ return expand(self.input_csv)
48
+
43
49
  def import_input(self):
44
50
  # if collection already exists, assume import was already done
45
51
  if "input" in self.client[self.input_mongo_db_name].list_collection_names():
@@ -48,12 +54,12 @@ class AmpelInterface:
48
54
  )
49
55
  return
50
56
 
51
- logger.debug(f"importing {self.input_csv} into {self.input_mongo_db_name}")
57
+ logger.debug(f"importing {self.expanded_input_csv} into {self.input_mongo_db_name}")
52
58
  col = self.client[self.input_mongo_db_name]["input"]
53
59
 
54
60
  # create an index from stock id
55
61
  col.create_index([(self.orig_id_key, ASCENDING)], unique=True)
56
- col.insert_many(pd.read_csv(self.input_csv).to_dict(orient="records"))
62
+ col.insert_many(pd.read_csv(self.expanded_input_csv).to_dict(orient="records"))
57
63
 
58
64
  def make_ampel_job_file(self, cfg_path: Path) -> Path:
59
65
  logger.debug(f"loading ampel job template from {self.template_path}")
@@ -146,7 +146,7 @@ def calculate_epochs(
146
146
  bias_correction_function = CORRECTION_FUNCTIONS[correction_name]
147
147
 
148
148
  one_points_mask = None
149
- visits_at_least_two_point = []
149
+ visits_at_least_two_point: npt.NDArray[np.generic] = np.array([])
150
150
 
151
151
  while n_remaining_outlier > 0:
152
152
  # make a mask of values to use
@@ -1 +0,0 @@
1
- __version__ = "1.0.0a9"
File without changes
File without changes