timewise 0.5.3__py3-none-any.whl → 1.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. timewise/__init__.py +1 -5
  2. timewise/backend/__init__.py +6 -0
  3. timewise/backend/base.py +36 -0
  4. timewise/backend/filesystem.py +80 -0
  5. timewise/chunking.py +50 -0
  6. timewise/cli.py +117 -11
  7. timewise/config.py +34 -0
  8. timewise/io/__init__.py +1 -0
  9. timewise/io/config.py +64 -0
  10. timewise/io/download.py +302 -0
  11. timewise/io/stable_tap.py +121 -0
  12. timewise/plot/__init__.py +3 -0
  13. timewise/plot/diagnostic.py +242 -0
  14. timewise/plot/lightcurve.py +112 -0
  15. timewise/plot/panstarrs.py +260 -0
  16. timewise/plot/sdss.py +109 -0
  17. timewise/process/__init__.py +2 -0
  18. timewise/process/config.py +30 -0
  19. timewise/process/interface.py +143 -0
  20. timewise/process/keys.py +10 -0
  21. timewise/process/stacking.py +310 -0
  22. timewise/process/template.yml +49 -0
  23. timewise/query/__init__.py +6 -0
  24. timewise/query/base.py +45 -0
  25. timewise/query/positional.py +40 -0
  26. timewise/tables/__init__.py +10 -0
  27. timewise/tables/allwise_p3as_mep.py +22 -0
  28. timewise/tables/base.py +9 -0
  29. timewise/tables/neowiser_p1bs_psd.py +22 -0
  30. timewise/types.py +30 -0
  31. timewise/util/backoff.py +12 -0
  32. timewise/util/csv_utils.py +12 -0
  33. timewise/util/error_threading.py +70 -0
  34. timewise/util/visits.py +33 -0
  35. timewise-1.0.0a1.dist-info/METADATA +205 -0
  36. timewise-1.0.0a1.dist-info/RECORD +39 -0
  37. {timewise-0.5.3.dist-info → timewise-1.0.0a1.dist-info}/WHEEL +1 -1
  38. timewise-1.0.0a1.dist-info/entry_points.txt +3 -0
  39. timewise/big_parent_sample.py +0 -106
  40. timewise/config_loader.py +0 -157
  41. timewise/general.py +0 -52
  42. timewise/parent_sample_base.py +0 -89
  43. timewise/point_source_utils.py +0 -68
  44. timewise/utils.py +0 -558
  45. timewise/wise_bigdata_desy_cluster.py +0 -1407
  46. timewise/wise_data_base.py +0 -2027
  47. timewise/wise_data_by_visit.py +0 -672
  48. timewise/wise_flux_conversion_correction.dat +0 -19
  49. timewise-0.5.3.dist-info/METADATA +0 -55
  50. timewise-0.5.3.dist-info/RECORD +0 -17
  51. timewise-0.5.3.dist-info/entry_points.txt +0 -3
  52. {timewise-0.5.3.dist-info → timewise-1.0.0a1.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,112 @@
1
+ from typing import Dict
2
+ import warnings
3
+ import matplotlib.pyplot as plt
4
+ import pandas as pd
5
+ import numpy as np
6
+
7
+ from timewise.process import keys
8
+
9
+
10
+ BAND_PLOT_COLORS = {"w1": "r", "w2": "b"}
11
+
12
+
13
+ def plot_lightcurve(
14
+ lum_key: str,
15
+ stacked_lightcurve: pd.DataFrame | None = None,
16
+ raw_lightcurve: pd.DataFrame | None = None,
17
+ ax: plt.Axes | None = None,
18
+ colors: Dict[str, str] | None = None,
19
+ add_to_label: str = "",
20
+ **kwargs,
21
+ ) -> tuple[plt.Figure, plt.Axes]:
22
+ assert (stacked_lightcurve is not None) or (raw_lightcurve is not None)
23
+
24
+ if not colors:
25
+ colors = BAND_PLOT_COLORS
26
+
27
+ if not ax:
28
+ fig, ax = plt.subplots(**kwargs)
29
+ else:
30
+ fig = plt.gcf()
31
+
32
+ for b in ["w1", "w2"]:
33
+ try:
34
+ if isinstance(stacked_lightcurve, pd.DataFrame):
35
+ ul_mask_stacked = np.array(
36
+ stacked_lightcurve[f"{b}{lum_key}{keys.UPPER_LIMIT}"]
37
+ ).astype(bool)
38
+ ax.errorbar(
39
+ stacked_lightcurve[keys.MEAN + "_mjd"][~ul_mask_stacked],
40
+ stacked_lightcurve[f"{b}{keys.MEAN}{lum_key}"][~ul_mask_stacked],
41
+ yerr=stacked_lightcurve[f"{b}{lum_key}{keys.RMS}"][
42
+ ~ul_mask_stacked
43
+ ],
44
+ label=f"{b}{add_to_label} stacked",
45
+ ls="",
46
+ marker="s",
47
+ c=colors[b],
48
+ markersize=4,
49
+ markeredgecolor="k",
50
+ ecolor="k",
51
+ capsize=2,
52
+ )
53
+ ax.scatter(
54
+ stacked_lightcurve[keys.MEAN + "_mjd"][ul_mask_stacked],
55
+ stacked_lightcurve[f"{b}{keys.MEAN}{lum_key}"][ul_mask_stacked],
56
+ marker="v",
57
+ c=colors[b],
58
+ alpha=0.7,
59
+ s=2,
60
+ )
61
+
62
+ if isinstance(raw_lightcurve, pd.DataFrame):
63
+ m = ~raw_lightcurve[f"{b}{lum_key}"].isna()
64
+ ul_mask_raw = raw_lightcurve[f"{b}{keys.ERROR_EXT}{lum_key}"].isna()
65
+
66
+ tot_m = m & ~ul_mask_raw
67
+ if np.any(tot_m):
68
+ ax.errorbar(
69
+ raw_lightcurve.mjd[tot_m],
70
+ raw_lightcurve[f"{b}{lum_key}"][tot_m],
71
+ yerr=raw_lightcurve[f"{b}{keys.ERROR_EXT}{lum_key}"][tot_m],
72
+ label=f"{b}{add_to_label}",
73
+ ls="",
74
+ marker="o",
75
+ c=colors[b],
76
+ markersize=4,
77
+ alpha=0.3,
78
+ )
79
+
80
+ single_ul_m = m & ul_mask_raw
81
+ if np.any(single_ul_m):
82
+ label = (
83
+ f"{b}{add_to_label} upper limits" if not np.any(tot_m) else ""
84
+ )
85
+ ax.scatter(
86
+ raw_lightcurve.mjd[single_ul_m],
87
+ raw_lightcurve[f"{b}{lum_key}"][single_ul_m],
88
+ marker="d",
89
+ c=colors[b],
90
+ alpha=0.3,
91
+ s=1,
92
+ label=label,
93
+ )
94
+
95
+ except KeyError as e:
96
+ raise KeyError(f"Could not find brightness key {e}!")
97
+
98
+ if lum_key == keys.MAG_EXT:
99
+ ylim = ax.get_ylim()
100
+ ax.set_ylim(max(ylim), min(ylim))
101
+
102
+ ax.set_xlabel("MJD")
103
+ ax.set_ylabel(lum_key)
104
+ with warnings.catch_warnings():
105
+ warnings.filterwarnings(
106
+ "ignore",
107
+ message="No artists with labels found to put in legend",
108
+ category=UserWarning,
109
+ )
110
+ ax.legend()
111
+
112
+ return fig, ax
@@ -0,0 +1,260 @@
1
+ import logging
2
+ import hashlib
3
+ from pathlib import Path
4
+ from io import BytesIO
5
+ from PIL import Image
6
+
7
+ import requests
8
+ import matplotlib.pyplot as plt
9
+ import numpy
10
+ from astropy.table import Table
11
+ import astropy.config.paths
12
+
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class PanSTARRSQueryError(Exception):
18
+ pass
19
+
20
+
21
+ def load_cache_or_download(url):
22
+ logger.debug(f"loading or downloading {url}")
23
+ h = hashlib.md5(url.encode()).hexdigest()
24
+ cache_dir = Path(astropy.config.paths.get_cache_dir())
25
+ cache_file = cache_dir / (h + ".cache")
26
+ logger.debug(f"cache file is {cache_file}")
27
+ if not cache_file.is_file():
28
+ logger.debug(f"downloading {url}")
29
+ r = requests.get(url)
30
+ with open(cache_file, "wb") as f:
31
+ f.write(r.content)
32
+ return r.content
33
+ else:
34
+ logger.debug(f"loading {cache_file}")
35
+ with open(cache_file, "rb") as f:
36
+ return f.read()
37
+
38
+
39
+ def annotate_not_available(ax):
40
+ xlim = ax.get_xlim()
41
+ ylim = ax.get_ylim()
42
+ x = sum(xlim) / 2
43
+ y = sum(ylim) / 2
44
+ logger.debug(f"annotate_not_available at {x}, {y}")
45
+ ax.annotate(
46
+ "Outside\nPanSTARRS\nFootprint",
47
+ (x, y),
48
+ color="red",
49
+ ha="center",
50
+ va="center",
51
+ fontsize=10,
52
+ )
53
+
54
+
55
+ def getimages(ra, dec, filters="grizy"):
56
+ """Query ps1filenames.py service to get a list of images
57
+
58
+ ra, dec = position in degrees
59
+ size = image size in pixels (0.25 arcsec/pixel)
60
+ filters = string with filters to include
61
+ Returns a table with the results
62
+ """
63
+
64
+ service = "https://ps1images.stsci.edu/cgi-bin/ps1filenames.py"
65
+ url = f"{service}?ra={ra}&dec={dec}&filters={filters}"
66
+ content = load_cache_or_download(url)
67
+ table = Table.read(content.decode(), format="ascii")
68
+ return table
69
+
70
+
71
+ def geturl(
72
+ ra, dec, size=240, output_size=None, filters="grizy", format="jpg", color=False
73
+ ):
74
+ """Get URL for images in the table
75
+
76
+ ra, dec = position in degrees
77
+ size = extracted image size in pixels (0.25 arcsec/pixel)
78
+ output_size = output (display) image size in pixels (default = size).
79
+ output_size has no effect for fits format images.
80
+ filters = string with filters to include
81
+ format = data format (options are "jpg", "png" or "fits")
82
+ color = if True, creates a color image (only for jpg or png format).
83
+ Default is return a list of URLs for single-filter grayscale images.
84
+ Returns a string with the URL
85
+ """
86
+
87
+ if color and format == "fits":
88
+ raise ValueError("color images are available only for jpg or png formats")
89
+ if format not in ("jpg", "png", "fits"):
90
+ raise ValueError("format must be one of jpg, png, fits")
91
+ table = getimages(ra, dec, filters=filters)
92
+ if len(table) == 0:
93
+ raise PanSTARRSQueryError("No images available")
94
+ url = (
95
+ f"https://ps1images.stsci.edu/cgi-bin/fitscut.cgi?"
96
+ f"ra={ra}&dec={dec}&size={size}&format={format}"
97
+ )
98
+ if output_size:
99
+ url = url + "&output_size={}".format(output_size)
100
+ # sort filters from red to blue
101
+ flist = ["yzirg".find(x) for x in table["filter"]]
102
+ table = table[numpy.argsort(flist)]
103
+ if color:
104
+ if len(table) > 3:
105
+ # pick 3 filters
106
+ table = table[[0, len(table) // 2, len(table) - 1]]
107
+ for i, param in enumerate(["red", "green", "blue"]):
108
+ url = url + "&{}={}".format(param, table["filename"][i])
109
+ else:
110
+ urlbase = url + "&red="
111
+ url = []
112
+ for filename in table["filename"]:
113
+ url.append(urlbase + filename)
114
+ return url
115
+
116
+
117
+ def getcolorim(ra, dec, size=240, output_size=None, filters="grizy", format="jpg"):
118
+ """Get color image at a sky position
119
+
120
+ ra, dec = position in degrees
121
+ size = extracted image size in pixels (0.25 arcsec/pixel)
122
+ output_size = output (display) image size in pixels (default = size).
123
+ output_size has no effect for fits format images.
124
+ filters = string with filters to include
125
+ format = data format (options are "jpg", "png")
126
+ Returns the image
127
+ """
128
+
129
+ if format not in ("jpg", "png"):
130
+ raise ValueError("format must be jpg or png")
131
+ url = geturl(
132
+ ra,
133
+ dec,
134
+ size=size,
135
+ filters=filters,
136
+ output_size=output_size,
137
+ format=format,
138
+ color=True,
139
+ )
140
+ content = load_cache_or_download(url)
141
+ im = Image.open(BytesIO(content))
142
+ return im
143
+
144
+
145
+ def getgrayim(ra, dec, size=240, output_size=None, filter="g", format="jpg"):
146
+ """Get grayscale image at a sky position
147
+
148
+ ra, dec = position in degrees
149
+ size = extracted image size in pixels (0.25 arcsec/pixel)
150
+ output_size = output (display) image size in pixels (default = size).
151
+ output_size has no effect for fits format images.
152
+ filter = string with filter to extract (one of grizy)
153
+ format = data format (options are "jpg", "png")
154
+ Returns the image
155
+ """
156
+
157
+ if format not in ("jpg", "png"):
158
+ raise ValueError("format must be jpg or png")
159
+ if filter not in list("grizy"):
160
+ raise ValueError("filter must be one of grizy")
161
+ url = geturl(
162
+ ra, dec, size=size, filters=filter, output_size=output_size, format=format
163
+ )
164
+ content = load_cache_or_download(url[0])
165
+ im = Image.open(BytesIO(content))
166
+ return im
167
+
168
+
169
+ def plot_panstarrs_cutout(
170
+ ra,
171
+ dec,
172
+ arcsec,
173
+ interactive=False,
174
+ fn=None,
175
+ title=None,
176
+ save=False,
177
+ ax=False,
178
+ plot_color_image=False,
179
+ height=2.5,
180
+ ):
181
+ arcsec_per_px = 0.25
182
+ ang_px = int(arcsec / arcsec_per_px)
183
+
184
+ imshow_kwargs = {
185
+ "origin": "upper",
186
+ "extent": ([arcsec / 2, -arcsec / 2, -arcsec / 2, arcsec / 2]),
187
+ }
188
+ scatter_args = [0, 0]
189
+ scatter_kwargs = {"marker": "x", "color": "red"}
190
+
191
+ if not plot_color_image:
192
+ filters = "grizy"
193
+ if not ax:
194
+ fig, axss = plt.subplots(
195
+ 2,
196
+ len(filters),
197
+ sharex="all",
198
+ sharey="all",
199
+ gridspec_kw={"wspace": 0, "hspace": 0, "height_ratios": [1, 8]},
200
+ figsize=(height * 5, height),
201
+ )
202
+ else:
203
+ fig = plt.gcf()
204
+ axss = ax
205
+
206
+ for j, fil in enumerate(list(filters)):
207
+ axs = axss[1]
208
+ try:
209
+ im = getgrayim(ra, dec, size=ang_px, filter=fil)
210
+ axs[j].imshow(im, cmap="gray", **imshow_kwargs)
211
+ except PanSTARRSQueryError:
212
+ axs[j].set_xlim(-arcsec / 2, arcsec / 2)
213
+ axs[j].set_ylim(-arcsec / 2, arcsec / 2)
214
+ annotate_not_available(axs[j])
215
+
216
+ axs[j].scatter(*scatter_args, **scatter_kwargs)
217
+ axs[j].set_title(fil)
218
+ axss[0][j].axis("off")
219
+
220
+ else:
221
+ logger.debug("plotting color image")
222
+ if not ax:
223
+ fig, axss = plt.subplots(figsize=(height, height))
224
+ else:
225
+ fig = plt.gcf()
226
+ axss = ax
227
+
228
+ try:
229
+ im = getcolorim(ra, dec, size=ang_px)
230
+ axss.imshow(im, **imshow_kwargs)
231
+ except PanSTARRSQueryError:
232
+ axss.set_xlim(-arcsec / 2, arcsec / 2)
233
+ axss.set_ylim(-arcsec / 2, arcsec / 2)
234
+ annotate_not_available(axss)
235
+ axss.scatter(*scatter_args, **scatter_kwargs)
236
+
237
+ _this_title = title if title else f"{ra}_{dec}"
238
+ si = "-" if dec > 0 else "+"
239
+ ylabel = f"Dec {si} {abs(dec):.2f} [arcsec]"
240
+ xlabel = f"RA - {ra:.2f} [arcsec]"
241
+ try:
242
+ axss.set_title(_this_title)
243
+ axss.set_xlabel(xlabel)
244
+ axss.set_ylabel(ylabel)
245
+ axss.grid(ls=":", alpha=0.5)
246
+ except AttributeError: # in this case axss is an array
247
+ fig.supylabel(ylabel)
248
+ fig.supxlabel(xlabel)
249
+ fig.suptitle(_this_title)
250
+ for a in axss.flatten():
251
+ a.grid(ls=":", alpha=0.5)
252
+
253
+ if save:
254
+ logger.info(f"saving under {fn}")
255
+ fig.savefig(fn)
256
+
257
+ if interactive:
258
+ return fig, axss
259
+
260
+ plt.close()
timewise/plot/sdss.py ADDED
@@ -0,0 +1,109 @@
1
+ import requests
2
+ import os
3
+ import getpass
4
+ import logging
5
+ import matplotlib.pyplot as plt
6
+ import backoff
7
+
8
+ from ..util.backoff import backoff_hndlr
9
+
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ def get_sdss_credentials():
15
+ if not os.environ.get("SDSS_USERID"):
16
+ os.environ["SDSS_USERID"] = input("Enter SDSS user ID:")
17
+ if not os.environ.get("SDSS_USERPW"):
18
+ os.environ["SDSS_USERPW"] = getpass.getpass("Enter SDSS password:")
19
+ return os.environ["SDSS_USERID"], os.environ["SDSS_USERPW"]
20
+
21
+
22
+ def login_to_sciserver():
23
+ try:
24
+ from SciServer import Authentication
25
+ except ModuleNotFoundError:
26
+ raise ModuleNotFoundError(
27
+ "Please install SciServer (https://github.com/sciserver/SciScript-Python) "
28
+ "if you want to see SDSS cutouts!"
29
+ )
30
+
31
+ uid, pw = get_sdss_credentials()
32
+ logger.debug(f"logging in to SciServer with username {uid}")
33
+ Authentication.login(uid, pw)
34
+
35
+
36
+ @backoff.on_exception(
37
+ backoff.expo, requests.RequestException, max_tries=50, on_backoff=backoff_hndlr
38
+ )
39
+ def get_cutout(*args, **kwargs):
40
+ login_to_sciserver()
41
+ from SciServer import SkyServer
42
+
43
+ return SkyServer.getJpegImgCutout(*args, **kwargs)
44
+
45
+
46
+ def plot_sdss_cutout(
47
+ ra,
48
+ dec,
49
+ arcsec=20,
50
+ arcsec_per_px=0.1,
51
+ interactive=False,
52
+ fn=None,
53
+ title=None,
54
+ save=False,
55
+ ax=False,
56
+ height=2.5,
57
+ ):
58
+ ang_px = int(arcsec / arcsec_per_px)
59
+
60
+ if not ax:
61
+ fig, ax = plt.subplots(figsize=(height, height))
62
+ else:
63
+ fig = plt.gcf()
64
+
65
+ try:
66
+ im = get_cutout(ra, dec, scale=arcsec_per_px, height=ang_px, width=ang_px)
67
+ ax.imshow(
68
+ im,
69
+ origin="upper",
70
+ extent=(
71
+ (
72
+ arcsec / 2,
73
+ -arcsec / 2,
74
+ -arcsec / 2,
75
+ arcsec / 2,
76
+ )
77
+ ),
78
+ cmap="gray",
79
+ )
80
+ ax.scatter(0, 0, marker="x", color="red")
81
+
82
+ except Exception as e:
83
+ if "outside the SDSS footprint" in str(e):
84
+ xlim = ax.get_xlim()
85
+ ylim = ax.get_ylim()
86
+ x = sum(xlim) / 2
87
+ y = sum(ylim) / 2
88
+ ax.annotate(
89
+ "Outside SDSS Footprint",
90
+ (x, y),
91
+ color="red",
92
+ ha="center",
93
+ va="center",
94
+ fontsize=20,
95
+ )
96
+ else:
97
+ raise Exception(e)
98
+
99
+ if title:
100
+ ax.set_title(title)
101
+
102
+ if save:
103
+ logger.debug(f"saving under {fn}")
104
+ fig.savefig(fn)
105
+
106
+ if interactive:
107
+ return fig, ax
108
+
109
+ plt.close()
@@ -0,0 +1,2 @@
1
+ from .config import AmpelConfig
2
+ from .interface import AmpelInterface
@@ -0,0 +1,30 @@
1
+ from pathlib import Path
2
+ import logging
3
+
4
+ from pydantic import BaseModel
5
+
6
+ from .interface import AmpelInterface
7
+
8
+
9
+ logger = logging.getLogger(__name__)
10
+ DEFAULT_TEMPLATE_PATH = Path(__file__).parent / "template.yml"
11
+
12
+
13
+ class AmpelConfig(BaseModel):
14
+ mongo_db_name: str
15
+ template_path: Path = DEFAULT_TEMPLATE_PATH
16
+ uri: str = "localhost:27017"
17
+
18
+ @property
19
+ def input_mongo_db_name(self) -> str:
20
+ return self.mongo_db_name + "_input"
21
+
22
+ def build_interface(self, original_id_key: str, input_csv: Path) -> AmpelInterface:
23
+ return AmpelInterface(
24
+ mongo_db_name=self.mongo_db_name,
25
+ orig_id_key=original_id_key,
26
+ input_csv=input_csv,
27
+ input_mongo_db_name=self.input_mongo_db_name,
28
+ template_path=self.template_path,
29
+ uri=self.uri,
30
+ )
@@ -0,0 +1,143 @@
1
+ from pathlib import Path
2
+ import logging
3
+ from typing import Iterable, List, cast
4
+
5
+ import numpy as np
6
+ from numpy import typing as npt
7
+ import pandas as pd
8
+ from pymongo import MongoClient, ASCENDING
9
+ from pymongo.collection import Collection
10
+ from pymongo.database import Database
11
+ from ampel.cli.JobCommand import JobCommand
12
+ from ampel.types import DataPointId, StockId
13
+
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class AmpelInterface:
19
+ def __init__(
20
+ self,
21
+ mongo_db_name: str,
22
+ orig_id_key: str,
23
+ input_csv: Path,
24
+ input_mongo_db_name: str,
25
+ template_path: str | Path,
26
+ uri: str,
27
+ ):
28
+ self.mongo_db_name = mongo_db_name
29
+ self.orig_id_key = orig_id_key
30
+ self.input_csv = input_csv
31
+ self.input_mongo_db_name = input_mongo_db_name
32
+ self.template_path = Path(template_path)
33
+ self.uri = uri
34
+
35
+ def import_input(self):
36
+ # if collection already exists, assume import was already done
37
+ if "input" in self.client[self.input_mongo_db_name].list_collection_names():
38
+ logger.debug(
39
+ f"'input' collection already exists in '{self.input_mongo_db_name}'."
40
+ )
41
+ return
42
+
43
+ logger.debug(f"importing {self.input_csv} into {self.input_mongo_db_name}")
44
+ col = self.client[self.input_mongo_db_name]["input"]
45
+
46
+ # create an index from stock id
47
+ col.create_index([(self.orig_id_key, ASCENDING)], unique=True)
48
+ col.insert_many(pd.read_csv(self.input_csv).to_dict(orient="records"))
49
+
50
+ def make_ampel_job_file(self, cfg_path: Path) -> Path:
51
+ logger.debug(f"loading ampel job template from {self.template_path}")
52
+ with self.template_path.open("r") as f:
53
+ template = f.read()
54
+
55
+ ampel_job = (
56
+ template.replace("TIMEWISE_CONFIG_PATH", str(cfg_path))
57
+ .replace("ORIGINAL_ID_KEY", self.orig_id_key)
58
+ .replace("INPUT_MONGODB_NAME", self.input_mongo_db_name)
59
+ .replace("MONGODB_NAME", self.mongo_db_name)
60
+ )
61
+
62
+ ampel_job_path = cfg_path.parent / f"{cfg_path.stem}_ampel_job.yml"
63
+ logger.info(f"writing ampel job to {ampel_job_path}")
64
+ with ampel_job_path.open("w") as f:
65
+ f.write(ampel_job)
66
+
67
+ return ampel_job_path
68
+
69
+ def prepare(self, cfg_path: Path) -> Path:
70
+ self.import_input()
71
+ return self.make_ampel_job_file(cfg_path)
72
+
73
+ def run(self, timewise_cfg_path: Path, ampel_config_path: Path):
74
+ ampel_job_path = self.prepare(timewise_cfg_path)
75
+ cmd = JobCommand()
76
+ parser = cmd.get_parser()
77
+ args = vars(
78
+ parser.parse_args(
79
+ ["--schema", str(ampel_job_path), "--config", str(ampel_config_path)]
80
+ )
81
+ )
82
+ logger.debug(args)
83
+ cmd.run(args, unknown_args=())
84
+
85
+ @property
86
+ def client(self) -> MongoClient:
87
+ return MongoClient(self.uri)
88
+
89
+ @property
90
+ def db(self) -> Database:
91
+ return self.client[self.mongo_db_name]
92
+
93
+ @property
94
+ def t0(self) -> Collection:
95
+ return self.db["t0"]
96
+
97
+ @property
98
+ def t1(self) -> Collection:
99
+ return self.db["t1"]
100
+
101
+ @property
102
+ def t2(self) -> Collection:
103
+ return self.db["t2"]
104
+
105
+ def extract_stacked_lightcurve(self, stock_id: StockId) -> pd.DataFrame:
106
+ records = []
107
+ for i, ic in enumerate(
108
+ self.t2.find({"stock": stock_id, "unit": "T2StackVisits"})
109
+ ):
110
+ stock_id_str = str(stock_id)
111
+ assert i == 0, f"More than one stacked lightcurve found for {stock_id_str}!"
112
+ assert len(ic["body"]) == 1, (
113
+ f"None or more than one stacking result found for {stock_id_str}!"
114
+ )
115
+ records = ic["body"][0]
116
+ return pd.DataFrame(records)
117
+
118
+ def extract_datapoints(self, stock_id: StockId) -> pd.DataFrame:
119
+ records = []
120
+ index = []
121
+ for ic in self.t0.find({"stock": stock_id}):
122
+ records.append(ic["body"])
123
+ index.append(ic["id"])
124
+ return pd.DataFrame(records, index=index)
125
+
126
+ def extract_selected_datapoint_ids(self, stock_id: StockId) -> List[DataPointId]:
127
+ d = self.t1.find_one({"stock": stock_id})
128
+ if d is None:
129
+ return []
130
+ return d["dps"]
131
+
132
+ def export_stacked_lightcurve(self, stock_id: StockId, filename: Path):
133
+ logger.debug(f"Exporting stacked lightcurve for {str(stock_id)} to {filename}")
134
+ self.extract_stacked_lightcurve(stock_id).to_csv(filename)
135
+
136
+ def export_many(
137
+ self, directory: Path, stock_ids: Iterable[StockId] | StockId | None = None
138
+ ):
139
+ if stock_ids is None:
140
+ stock_ids = pd.read_csv(self.input_csv)[self.orig_id_key]
141
+ directory.mkdir(exist_ok=True, parents=True)
142
+ for s in np.atleast_1d(cast(npt.ArrayLike, stock_ids)):
143
+ self.export_stacked_lightcurve(s.item(), directory / f"{s}.csv")
@@ -0,0 +1,10 @@
1
+ MEAN = "mean"
2
+ MEDIAN = "median"
3
+ RMS = "rms"
4
+ UPPER_LIMIT = "ul"
5
+ NPOINTS = "npoints"
6
+ ZEROPOINT_EXT = "zeropoint"
7
+ FLUX_EXT = "flux"
8
+ FLUX_DENSITY_EXT = "fluxdensity"
9
+ MAG_EXT = "mpro"
10
+ ERROR_EXT = "sig"