astro-otter 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of astro-otter might be problematic. Click here for more details.

@@ -0,0 +1,744 @@
1
+ """
2
+ Host object that stores information on the Transient DataFinder and provides utility
3
+ methods for pulling in data corresponding to that host
4
+ """
5
+
6
+ from __future__ import annotations
7
+ import os
8
+ import csv
9
+ import io
10
+ import re
11
+ import time
12
+ import math
13
+ from urllib.request import urlopen
14
+
15
+ from astropy import units as u
16
+ from astropy.coordinates import SkyCoord
17
+ from astropy.time import Time
18
+ from astropy.table import Table
19
+ from astropy.io.votable import parse_single_table
20
+
21
+ import pandas as pd
22
+ import requests
23
+ import logging
24
+
25
+ from fundamentals.stats import rolling_window_sigma_clip
26
+ from operator import itemgetter
27
+
28
+ from ..util import VIZIER_LARGE_CATALOGS
29
+ from ..exceptions import MissingEnvVarError
30
+
31
+ logger = logging.getLogger(__name__)
32
+
33
+
34
+ class DataFinder(object):
35
+ def __init__(
36
+ self,
37
+ ra: str | float,
38
+ dec: str | float,
39
+ ra_units: str | u.Unit,
40
+ dec_units: str | u.Unit,
41
+ name: str = None,
42
+ redshift: float = None,
43
+ reference: list[str] = None,
44
+ **kwargs,
45
+ ) -> None:
46
+ """
47
+ Object to store DataFinder info to query public data sources of host galaxies
48
+
49
+ Args:
50
+ ra (str|float) : The RA of the host to be passed to an astropy SkyCoord
51
+ dec (str|float) : The declination of the host to be passed to an
52
+ astropy SkyCoord
53
+ ra_units (str|astropy.units.Unit) : units of the RA, to be passed to
54
+ the unit keyword of SkyCoord
55
+ dec_units (str|astropy.units.Unit) : units of the declination, to be
56
+ passed to the unit keyword of
57
+ SkyCoord
58
+ name (str) : The name of the host galaxy
59
+ redshift (float) : The redshift of the host galaxy
60
+ reference (list[str]) : a list of bibcodes that found this to be the host
61
+ kwargs : Just here so we can pass **Transient['host'] into this constructor
62
+ and any extraneous properties will be ignored.
63
+ """
64
+ self.coord = SkyCoord(ra, dec, unit=(ra_units, dec_units))
65
+ self.name = name
66
+ self.z = redshift
67
+ self.redshift = redshift # just here for ease of use
68
+ self.bibcodes = reference
69
+
70
+ def __repr__(self) -> str:
71
+ """
72
+ String representation of the DataFinder for printing
73
+ """
74
+
75
+ if self.name is None:
76
+ print_name = "No Name DataFinder"
77
+ else:
78
+ print_name = self.name
79
+
80
+ return f"{print_name} @ (RA, Dec)=({self.coord.ra},{self.coord.dec})"
81
+
82
+ def __iter__(self) -> dict:
83
+ """
84
+ Provides an iterator for the properties of this DataFinder. Yields (key, value)
85
+ """
86
+ out = dict(
87
+ host_ra=self.coord.ra.value,
88
+ host_dec=self.coord.dec.value,
89
+ host_ra_units="deg",
90
+ host_dec_units="deg",
91
+ )
92
+
93
+ if self.name is not None:
94
+ out["host_name"] = self.name
95
+
96
+ if self.z is not None:
97
+ out["host_redshift"] = self.z
98
+
99
+ if self.bibcodes is not None:
100
+ out["reference"] = self.bibcodes
101
+
102
+ for k, v in out.items():
103
+ yield (k, v)
104
+
105
+ ###################################################################################
106
+ ################### CONVENIENCE METHODS FOR QUERYING HOST METADATA ################
107
+ ###################################################################################
108
+
109
+ @staticmethod
110
+ def _wrap_astroquery(module, *args, **kwargs):
111
+ """
112
+ Private convenience method that just standardizes how we call the query_region
113
+ method in astroquery
114
+ """
115
+ return module.query_region(*args, **kwargs)
116
+
117
+ def query_simbad(self, radius="5 arcsec", **kwargs):
118
+ """
119
+ Query SIMBAD through astroquery to provide any other "meta" information on this
120
+ host that may not be stored in the OTTER
121
+
122
+ Args:
123
+ radius (str|astropy.quantity.Quantity) : search radius for astroquery
124
+ **kwargs : any other arguments for astroquery.vizier.Vizier.query_region
125
+
126
+ Returns:
127
+ astropy Table of the simbad results.
128
+ """
129
+ from astroquery.simbad import Simbad
130
+
131
+ return DataFinder._wrap_astroquery(Simbad, self.coord, radius=radius, **kwargs)
132
+
133
+ def query_vizier(self, radius="5 arcsec", **kwargs):
134
+ """
135
+ Query the ViZier catalog for TIME-AVERAGED data from their major/large catalogs.
136
+
137
+ ViZier Catalogs Queried:
138
+ - 2MASS-PSC
139
+ - 2MASX
140
+ - AC2000.2
141
+ - AKARI
142
+ - ALLWISE
143
+ - ASCC-2.5
144
+ - B/DENIS
145
+ - CMC14
146
+ - Gaia-DR1
147
+ - GALEX
148
+ - GLIMPSE
149
+ - GSC-ACT
150
+ - GSC1.2
151
+ - GSC2.2
152
+ - GSC2.3
153
+ - HIP
154
+ - HIP2
155
+ - IRAS
156
+ - NOMAD1
157
+ - NVSS
158
+ - PanSTARRS-DR1
159
+ - PGC
160
+ - Planck-DR1
161
+ - PPMX
162
+ - PPMXL
163
+ - SDSS-DR12
164
+ - SDSS-DR7
165
+ - SDSS-DR9
166
+ - Tycho-2
167
+ - UCAC2
168
+ - UCAC3
169
+ - UCAC4
170
+ - UKIDSS
171
+ - USNO-A2
172
+ - USNO-B1
173
+ - WISE
174
+
175
+ Args:
176
+ radius (str|astropy.quantity.Quantity) : search radius for astroquery
177
+ **kwargs : any other arguments for astroquery.vizier.Vizier.query_region
178
+
179
+ Returns:
180
+ astropy TableList of the time-averaged photometry associated with this host.
181
+ """
182
+ from astroquery.vizier import Vizier
183
+
184
+ return DataFinder._wrap_astroquery(
185
+ Vizier, self.coord, radius=radius, catalog=VIZIER_LARGE_CATALOGS
186
+ )
187
+
188
+ ###################################################################################
189
+ ######### CONVENIENCE METHODS FOR QUERYING HOST TIME SERIES PHOTOMETRY ###########
190
+ ###################################################################################
191
+
192
+ def query_atlas(
193
+ self, days_ago: int = 365, disc_date: float = None, clip_sigma: float = 2.0
194
+ ) -> pd.DataFrame:
195
+ """
196
+ Query ATLAS forced photometry for photometry for this host
197
+
198
+ Args:
199
+ days_ago (int) : Number of days before the transients discovery date
200
+ (or today if no disc_date is given) to get ATLAS
201
+ forced photometry for.
202
+ disc_date (float) : The discovery date of the transient in MJD.
203
+ clip_sigma (float) : amount to sigma clip the ATLAS data by
204
+
205
+ Return:
206
+ pandas DataFrame of the ATLAS forced photometry for this host
207
+ """
208
+ base_url = "https://fallingstar-data.com/forcedphot"
209
+
210
+ token = os.environ.get("ATLAS_API_TOKEN", None)
211
+ if token is None:
212
+ logger.warn(
213
+ "Getting your token from ATLAS. Please add ATLAS_API_TOKEN to your \
214
+ environment variables to avoid this!"
215
+ )
216
+
217
+ uname = os.environ.get("ATLAS_UNAME", default=None)
218
+ pword = os.environ.get("ATLAS_PWORD", default=None)
219
+
220
+ if uname is None and pword is None:
221
+ raise MissingEnvVarError(["ATLAS_UNAME", "ATLAS_PWORD"], base_url)
222
+ elif uname is None and pword is not None:
223
+ raise MissingEnvVarError(["ATLAS_UNAME"], base_url)
224
+ elif uname is not None and pword is None:
225
+ raise MissingEnvVarError(["ATLAS_PWORD"], base_url)
226
+
227
+ resp = requests.post(
228
+ url=f"{base_url}/api-token-auth/",
229
+ data={"username": uname, "password": pword},
230
+ )
231
+
232
+ token = resp.json()["token"]
233
+
234
+ headers = {"Authorization": f"Token {token}", "Accept": "application/json"}
235
+
236
+ # compute the query start
237
+ if disc_date is None:
238
+ t_queryend = Time.now().mjd
239
+ logger.warn(
240
+ "Since no transient name is given we are using today \
241
+ as the query end!"
242
+ )
243
+ else:
244
+ t_queryend = Time(disc_date, format="mjd").mjd
245
+
246
+ t_querystart = t_queryend - days_ago
247
+
248
+ # submit the query to the ATLAS forced photometry server
249
+ task_url = None
250
+ while not task_url:
251
+ with requests.Session() as s:
252
+ resp = s.post(
253
+ f"{base_url}/queue/",
254
+ headers=headers,
255
+ data={
256
+ "ra": self.coord.ra.value,
257
+ "dec": self.coord.ra.value,
258
+ "send_email": False,
259
+ "mjd_min": t_querystart,
260
+ "mjd_max": t_queryend,
261
+ "use_reduced": False,
262
+ },
263
+ )
264
+ if resp.status_code == 201: # success
265
+ task_url = resp.json()["url"]
266
+ logger.info(f"The task URL is {task_url}")
267
+ elif resp.status_code == 429: # throttled
268
+ message = resp.json()["detail"]
269
+ logger.info(f"{resp.status_code} {message}")
270
+ t_sec = re.findall(r"available in (\d+) seconds", message)
271
+ t_min = re.findall(r"available in (\d+) minutes", message)
272
+ if t_sec:
273
+ waittime = int(t_sec[0])
274
+ elif t_min:
275
+ waittime = int(t_min[0]) * 60
276
+ else:
277
+ waittime = 10
278
+ logger.info(f"Waiting {waittime} seconds")
279
+ time.sleep(waittime)
280
+ else:
281
+ raise Exception(f"ERROR {resp.status_code}\n{resp.text}")
282
+
283
+ # Now wait for the result
284
+ result_url = None
285
+ taskstarted_printed = False
286
+ while not result_url:
287
+ with requests.Session() as s:
288
+ resp = s.get(task_url, headers=headers)
289
+
290
+ if resp.status_code == 200: # HTTP OK
291
+ if resp.json()["finishtimestamp"]:
292
+ result_url = resp.json()["result_url"]
293
+ logger.info(
294
+ f"Task is complete with results available at {result_url}"
295
+ )
296
+ elif resp.json()["starttimestamp"]:
297
+ if not taskstarted_printed:
298
+ print(
299
+ f"Task is running (started at\
300
+ {resp.json()['starttimestamp']})"
301
+ )
302
+ taskstarted_printed = True
303
+ time.sleep(2)
304
+ else:
305
+ # print(f"Waiting for job to start (queued at {timestamp})")
306
+ time.sleep(4)
307
+ else:
308
+ raise Exception(f"ERROR {resp.status_code}\n{resp.text}")
309
+
310
+ # get and clean up the result
311
+ with requests.Session() as s:
312
+ textdata = s.get(result_url, headers=headers).text
313
+
314
+ atlas_phot = DataFinder._atlas_stack(textdata, clipping_sigma=clip_sigma)
315
+
316
+ return pd.DataFrame(atlas_phot)
317
+
318
+ def query_ptf(self, radius: str | u.Quantity = "5 arcsec", **kwargs) -> Table:
319
+ """
320
+ Query the palomer transient facility's light curve catalog for this host
321
+
322
+ Args:
323
+ radius (str|astropy.quantity.Quantity) : search radius
324
+ **kwargs : other optional arguments for astroquery's query_region
325
+
326
+ Returns:
327
+ An astropy Table of the resulting light curve
328
+ """
329
+ from astroquery.ipac.irsa import Irsa
330
+
331
+ ptf_lc_catalog = "ptf_lightcurves"
332
+ return DataFinder._wrap_astroquery(
333
+ Irsa, self.coord, radius=radius, catalog=ptf_lc_catalog
334
+ )
335
+
336
+ def query_ztf(self, radius: float = 5):
337
+ """
338
+ Query ZTF photometry/forced photometry for photometry for this host
339
+
340
+ Args:
341
+ radius (float) : The search radius in arcseconds
342
+
343
+ Returns:
344
+ An astropy table of the time series data from the cone search in ZTF
345
+ """
346
+
347
+ base_url = "https://irsa.ipac.caltech.edu/cgi-bin/ZTF/nph_light_curves?"
348
+
349
+ ra, dec = self.coord.ra.value, self.coord.dec.value
350
+ search_radius_arcseconds = radius # in arcseconds
351
+ search_radius_degree = search_radius_arcseconds / 3600
352
+
353
+ query_url = f"{base_url}POS=CIRCLE%20{ra}%20{dec}%20{search_radius_degree}"
354
+
355
+ resp = urlopen(query_url)
356
+
357
+ votab = parse_single_table(io.BytesIO(resp.read()))
358
+
359
+ return Table(votab.array)
360
+
361
+ def query_asassn(self, radius: float = 5.0, nthreads: int = 2) -> pd.DataFrame:
362
+ """
363
+ Query ASASSN photometry/forced photometry for photometry for this host
364
+
365
+ Args:
366
+ radius (float) : search radius in arcseconds
367
+ nthreads (int) : number of threads to utilize during download, default is 2
368
+
369
+ Returns:
370
+ A pandas dataframe with the ASASSN lightcurve for this object
371
+ """
372
+ from pyasassn.client import SkyPatrolClient
373
+
374
+ client = SkyPatrolClient()
375
+ light_curve = client.cone_search(
376
+ self.coord.ra.value,
377
+ self.coord.dec.value,
378
+ radius=radius,
379
+ units="arcsec",
380
+ download=True,
381
+ threads=nthreads,
382
+ )
383
+ return light_curve.data
384
+
385
+ def query_wise(self, radius: float = 5, **kwargs) -> Table:
386
+ """
387
+ Query NEOWISE for their multiepoch photometry
388
+
389
+ Args:
390
+ radius (float) : The cone search radius in arcseconds
391
+ **kwargs : Other optional arguments for the astroquery query_region
392
+ Returns:
393
+ An astropy Table of the multiepoch wise data for this host
394
+ """
395
+ from astroquery.ipac.irsa import Irsa
396
+
397
+ wise_catalogs = "neowiser_p1bs_psd"
398
+ res = DataFinder._wrap_astroquery(
399
+ Irsa, self.coord, radius="5 arcsec", catalog=wise_catalogs, **kwargs
400
+ )
401
+ return res
402
+
403
+ def query_alma(self, radius: float = 5, **kwargs) -> Table:
404
+ """
405
+ Query ALMA to see if there are observations of this host.
406
+
407
+ NOTE: Since this is radio/mm data, it is unlikely that the output table will
408
+ simply have fluxes in it. Instead you will need to use the access_url column
409
+ to download and reduce this data.
410
+
411
+ Args:
412
+ radius (float) : The cone search radius in arcseconds
413
+ **kwargs : Other optional arguments for the astroquery query_region
414
+ Returns:
415
+ An astropy Table of the multiepoch wise data for this host
416
+ """
417
+
418
+ from astroquery.alma import Alma
419
+
420
+ res = DataFinder._wrap_astroquery(
421
+ Alma, self.coord, radius=5 * u.arcsec, **kwargs
422
+ )
423
+ return res
424
+
425
+ def query_first(
426
+ self, radius: u.Quantity = 5 * u.arcmin, get_image: bool = False, **kwargs
427
+ ) -> list:
428
+ """
429
+ Query the FIRST radio survey and return an astropy table of the flux density
430
+
431
+ This queries Table 2 from Ofek & Frail (2011); 2011ApJ...737...45O
432
+
433
+ Args:
434
+ radius (u.Quantity) : An astropy Quantity with the image height/width
435
+ get_image (bool) : If True, download and return a list of the associated
436
+ images too.
437
+ **kwargs : any other arguments to pass to the astroquery.image_cutouts
438
+ get_images method
439
+
440
+ Returns:
441
+ Astropy table of the flux densities. If get_image is True, it also returns
442
+ a list of FIRST radio survey images
443
+ """
444
+ from astroquery.vizier import Vizier
445
+
446
+ res = DataFinder._wrap_astroquery(
447
+ Vizier, self.coord, radius=radius, catalog="J/ApJ/737/45/table2"
448
+ )
449
+
450
+ if get_image:
451
+ from astroquery.image_cutouts.first import First
452
+
453
+ res_img = First.get_images(self.coord, image_size=radius, **kwargs)
454
+ return res, res_img
455
+
456
+ return res
457
+
458
+ def query_nvss(self, radius: u.Quantity = 5 * u.arcsec, **kwargs) -> Table:
459
+ """
460
+ Query the NRAO VLA Sky Survey (NVSS) and return a table list of the
461
+ result
462
+
463
+ This queries Table 1 from Ofek & Frail (2011); 2011ApJ...737...45O
464
+
465
+ Args:
466
+ radius (u.Quantity) : An astropy Quantity with the radius
467
+ **kwargs : Any other arguments to pass to query_region
468
+ """
469
+ from astroquery.vizier import Vizier
470
+
471
+ res = DataFinder._wrap_astroquery(
472
+ Vizier, self.coord, radius=radius, catalog="J/ApJ/737/45/table1"
473
+ )
474
+ return res
475
+
476
+ def query_heasarc(
477
+ self, radius: u.Quantity = 5 * u.arcsec, heasarc_table: str = "xray", **kwargs
478
+ ) -> Table:
479
+ """
480
+ Query Heasarc by the argument "heasarc_key" for the ra/dec associated with this
481
+ DataLoader object.
482
+
483
+ Args:
484
+ radius (u.Quantity) : An astropy Quantity with the radius
485
+ heasarc_table (str) : String with name of heasarc table to query. Default is
486
+ 'xray' which queries the heasarc master x-ray catalog,
487
+ 'radio' will query the heasarc master radio catalog. See
488
+ https://heasarc.gsfc.nasa.gov/cgi-bin/W3Browse/w3catindex.pl
489
+ for a complete list.
490
+ **kwargs : Any other arguments to pass to query_region
491
+
492
+ Returns:
493
+ Astropy table of the rows in `heasarc_table` that match self.coord.
494
+ """
495
+ from astroquery.heasarc import Heasarc
496
+
497
+ res = DataFinder._wrap_astroquery(
498
+ Heasarc, self.coord, mission=heasarc_table, radius=radius, **kwargs
499
+ )
500
+
501
+ return res
502
+
503
+ ###################################################################################
504
+ ######### CONVENIENCE METHODS FOR QUERYING HOST SPECTR ###########################
505
+ ###################################################################################
506
+
507
+ def query_sparcl(
508
+ self, radius: u.Quantity = 5 * u.arcsec, include: str | list = "DEFAULT"
509
+ ) -> Table:
510
+ """
511
+ Query the NOIRLab DataLabs Sparcl database for spectra for this host
512
+
513
+ Args:
514
+ radius (Quantity) : search radius as an Astropy.unit.Quantity
515
+ include [list|str] : list or string of columns to include in the result. See
516
+ the sparcl client documentation for more info. The
517
+ default returns specid, ra, dec, sparcl_id, flux,
518
+ wavelength, and the spectroscopic surveyu (_dr)
519
+
520
+ Returns:
521
+ astropy Table of the results, one row per spectrum
522
+ """
523
+
524
+ from sparcl.client import SparclClient
525
+ from dl import queryClient as qc # noqa: N813
526
+
527
+ client = SparclClient()
528
+
529
+ # first do a cone search on sparcl.main
530
+ ra, dec = self.coord.ra.value, self.coord.dec.value
531
+ radius_deg = radius.to(u.deg).value
532
+
533
+ adql = f"""
534
+ SELECT *
535
+ FROM sparcl.main
536
+ WHERE 't'=Q3C_RADIAL_QUERY(ra,dec,{ra},{dec},{radius_deg})
537
+ """
538
+ cone_search_res = qc.query(adql=adql, fmt="pandas")
539
+
540
+ # then retrieve all of the spectra corresponding to those sparcl_ids
541
+ sparcl_ids = cone_search_res.sparcl_id.tolist()
542
+ res = client.retrieve(uuid_list=sparcl_ids, include=include)
543
+ all_spec = pd.concat([pd.DataFrame([record]) for record in res.records])
544
+ return Table.from_pandas(all_spec)
545
+
546
+ ###################################################################################
547
+ ######### PRIVATE HELPER METHODS FOR THE QUERYING #################################
548
+ ###################################################################################
549
+ @staticmethod
550
+ def _atlas_stack(filecontent, clipping_sigma, log=logger):
551
+ """
552
+ Function adapted from David Young's :func:`plotter.plot_single_result`
553
+ https://github.com/thespacedoctor/plot-results-from-atlas-force-photometry-service/blob/main/plot_atlas_fp.py
554
+
555
+ And again adapted from https://github.com/SAGUARO-MMA/kne-cand-vetting/blob/master/kne_cand_vetting/survey_phot.py
556
+ """
557
+ epochs = DataFinder._atlas_read_and_sigma_clip_data(
558
+ filecontent, log=log, clipping_sigma=clipping_sigma
559
+ )
560
+
561
+ # c = cyan, o = arange
562
+ magnitudes = {
563
+ "c": {"mjds": [], "mags": [], "magErrs": [], "lim5sig": []},
564
+ "o": {"mjds": [], "mags": [], "magErrs": [], "lim5sig": []},
565
+ "I": {"mjds": [], "mags": [], "magErrs": [], "lim5sig": []},
566
+ }
567
+
568
+ # SPLIT BY FILTER
569
+ for epoch in epochs:
570
+ if epoch["F"] in ["c", "o", "I"]:
571
+ magnitudes[epoch["F"]]["mjds"].append(epoch["MJD"])
572
+ magnitudes[epoch["F"]]["mags"].append(epoch["uJy"])
573
+ magnitudes[epoch["F"]]["magErrs"].append(epoch["duJy"])
574
+ magnitudes[epoch["F"]]["lim5sig"].append(epoch["mag5sig"])
575
+
576
+ # STACK PHOTOMETRY IF REQUIRED
577
+ stacked_magnitudes = DataFinder._stack_photometry(magnitudes, binningdays=1)
578
+
579
+ return stacked_magnitudes
580
+
581
+ @staticmethod
582
+ def _atlas_read_and_sigma_clip_data(filecontent, log, clipping_sigma=2.2):
583
+ """
584
+ Function adapted from David Young's :func:`plotter.read_and_sigma_clip_data`
585
+ https://github.com/thespacedoctor/plot-results-from-atlas-force-photometry-service/blob/main/plot_atlas_fp.py
586
+
587
+ And again adapted from
588
+ https://github.com/SAGUARO-MMA/kne-cand-vetting/blob/master/kne_cand_vetting/survey_phot.py
589
+
590
+ *clean up rouge data from the files by performing some basic clipping*
591
+ **Key Arguments:**
592
+ - `fpFile` -- path to single force photometry file
593
+ - `clippingSigma` -- the level at which to clip flux data
594
+ **Return:**
595
+ - `epochs` -- sigma clipped and cleaned epoch data
596
+ """
597
+
598
+ # CLEAN UP FILE FOR EASIER READING
599
+ fpdata = (
600
+ filecontent.replace("###", "")
601
+ .replace(" ", ",")
602
+ .replace(",,", ",")
603
+ .replace(",,", ",")
604
+ .replace(",,", ",")
605
+ .replace(",,", ",")
606
+ .splitlines()
607
+ )
608
+
609
+ # PARSE DATA WITH SOME FIXED CLIPPING
610
+ oepochs = []
611
+ cepochs = []
612
+ csvreader = csv.DictReader(
613
+ fpdata, dialect="excel", delimiter=",", quotechar='"'
614
+ )
615
+
616
+ for row in csvreader:
617
+ for k, v in row.items():
618
+ try:
619
+ row[k] = float(v)
620
+ except Exception:
621
+ pass
622
+ # REMOVE VERY HIGH ERROR DATA POINTS, POOR CHI SQUARED, OR POOR EPOCHS
623
+ if row["duJy"] > 4000 or row["chi/N"] > 100 or row["mag5sig"] < 17.0:
624
+ continue
625
+ if row["F"] == "c":
626
+ cepochs.append(row)
627
+ if row["F"] == "o":
628
+ oepochs.append(row)
629
+
630
+ # SORT BY MJD
631
+ cepochs = sorted(cepochs, key=itemgetter("MJD"), reverse=False)
632
+ oepochs = sorted(oepochs, key=itemgetter("MJD"), reverse=False)
633
+
634
+ # SIGMA-CLIP THE DATA WITH A ROLLING WINDOW
635
+ cdataflux = []
636
+ cdataflux[:] = [row["uJy"] for row in cepochs]
637
+ odataflux = []
638
+ odataflux[:] = [row["uJy"] for row in oepochs]
639
+
640
+ masklist = []
641
+ for flux in [cdataflux, odataflux]:
642
+ fullmask = rolling_window_sigma_clip(
643
+ log=log, array=flux, clippingSigma=clipping_sigma, windowSize=11
644
+ )
645
+ masklist.append(fullmask)
646
+
647
+ try:
648
+ cepochs = [e for e, m in zip(cepochs, masklist[0]) if m == False]
649
+ except Exception:
650
+ cepochs = []
651
+
652
+ try:
653
+ oepochs = [e for e, m in zip(oepochs, masklist[1]) if m == False]
654
+ except Exception:
655
+ oepochs = []
656
+
657
+ logger.info("Completed the ``read_and_sigma_clip_data`` function")
658
+ # Returns ordered dictionary of all parameters
659
+ return cepochs + oepochs
660
+
661
+ @staticmethod
662
+ def _stack_photometry(magnitudes, binningdays=1.0):
663
+ """
664
+ Function adapted from David Young's :func:`plotter.stack_photometry`
665
+ https://github.com/thespacedoctor/plot-results-from-atlas-force-photometry-service/blob/main/plot_atlas_fp.py
666
+
667
+ And again adapted from
668
+ https://github.com/SAGUARO-MMA/kne-cand-vetting/blob/master/kne_cand_vetting/survey_phot.py
669
+
670
+ *stack the photometry for the given temporal range*
671
+ **Key Arguments:**
672
+ - `magnitudes` -- dictionary of photometry divided into filter sets
673
+ - `binningDays` -- the binning to use (in days)
674
+ **Return:**
675
+ - `summedMagnitudes` -- the stacked photometry
676
+ """
677
+
678
+ # IF WE WANT TO 'STACK' THE PHOTOMETRY
679
+ summed_magnitudes = {
680
+ "c": {"mjds": [], "mags": [], "magErrs": [], "n": [], "lim5sig": []},
681
+ "o": {"mjds": [], "mags": [], "magErrs": [], "n": [], "lim5sig": []},
682
+ "I": {"mjds": [], "mags": [], "magErrs": [], "n": [], "lim5sig": []},
683
+ }
684
+
685
+ # MAGNITUDES/FLUXES ARE DIVIDED IN UNIQUE FILTER SETS - SO ITERATE OVER
686
+ # FILTERS
687
+ alldata = []
688
+ for fil, data in list(magnitudes.items()):
689
+ # WE'RE GOING TO CREATE FURTHER SUBSETS FOR EACH UNQIUE MJD
690
+ # (FLOORED TO AN INTEGER)
691
+ # MAG VARIABLE == FLUX (JUST TO CONFUSE YOU)
692
+ distinctmjds = {}
693
+ for mjd, flx, err, lim in zip(
694
+ data["mjds"], data["mags"], data["magErrs"], data["lim5sig"]
695
+ ):
696
+ # DICT KEY IS THE UNIQUE INTEGER MJD
697
+ key = str(int(math.floor(mjd / float(binningdays))))
698
+ # FIRST DATA POINT OF THE NIGHTS? CREATE NEW DATA SET
699
+ if key not in distinctmjds:
700
+ distinctmjds[key] = {
701
+ "mjds": [mjd],
702
+ "mags": [flx],
703
+ "magErrs": [err],
704
+ "lim5sig": [lim],
705
+ }
706
+ # OR NOT THE FIRST? APPEND TO ALREADY CREATED LIST
707
+ else:
708
+ distinctmjds[key]["mjds"].append(mjd)
709
+ distinctmjds[key]["mags"].append(flx)
710
+ distinctmjds[key]["magErrs"].append(err)
711
+ distinctmjds[key]["lim5sig"].append(lim)
712
+
713
+ # ALL DATA NOW IN MJD SUBSETS. SO FOR EACH SUBSET (I.E. INDIVIDUAL
714
+ # NIGHTS) ...
715
+ for k, v in list(distinctmjds.items()):
716
+ # GIVE ME THE MEAN MJD
717
+ meanmjd = sum(v["mjds"]) / len(v["mjds"])
718
+ summed_magnitudes[fil]["mjds"].append(meanmjd)
719
+ # GIVE ME THE MEAN FLUX
720
+ meanflux = sum(v["mags"]) / len(v["mags"])
721
+ summed_magnitudes[fil]["mags"].append(meanflux)
722
+ # GIVE ME THE COMBINED ERROR
723
+ sum_of_squares = sum(x**2 for x in v["magErrs"])
724
+ comberror = math.sqrt(sum_of_squares) / len(v["magErrs"])
725
+ summed_magnitudes[fil]["magErrs"].append(comberror)
726
+ # 5-sigma limits
727
+ comb5siglimit = 23.9 - 2.5 * math.log10(5.0 * comberror)
728
+ summed_magnitudes[fil]["lim5sig"].append(comb5siglimit)
729
+ # GIVE ME NUMBER OF DATA POINTS COMBINED
730
+ n = len(v["mjds"])
731
+ summed_magnitudes[fil]["n"].append(n)
732
+ alldata.append(
733
+ {
734
+ "mjd": meanmjd,
735
+ "uJy": meanflux,
736
+ "duJy": comberror,
737
+ "F": fil,
738
+ "n": n,
739
+ "mag5sig": comb5siglimit,
740
+ }
741
+ )
742
+ print("completed the ``stack_photometry`` method")
743
+
744
+ return alldata