timewise 0.5.3__py3-none-any.whl → 1.0.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- timewise/__init__.py +1 -5
- timewise/backend/__init__.py +6 -0
- timewise/backend/base.py +36 -0
- timewise/backend/filesystem.py +80 -0
- timewise/chunking.py +50 -0
- timewise/cli.py +117 -11
- timewise/config.py +34 -0
- timewise/io/__init__.py +1 -0
- timewise/io/config.py +64 -0
- timewise/io/download.py +302 -0
- timewise/io/stable_tap.py +121 -0
- timewise/plot/__init__.py +3 -0
- timewise/plot/diagnostic.py +242 -0
- timewise/plot/lightcurve.py +112 -0
- timewise/plot/panstarrs.py +260 -0
- timewise/plot/sdss.py +109 -0
- timewise/process/__init__.py +2 -0
- timewise/process/config.py +30 -0
- timewise/process/interface.py +143 -0
- timewise/process/keys.py +10 -0
- timewise/process/stacking.py +310 -0
- timewise/process/template.yml +49 -0
- timewise/query/__init__.py +6 -0
- timewise/query/base.py +45 -0
- timewise/query/positional.py +40 -0
- timewise/tables/__init__.py +10 -0
- timewise/tables/allwise_p3as_mep.py +22 -0
- timewise/tables/base.py +9 -0
- timewise/tables/neowiser_p1bs_psd.py +22 -0
- timewise/types.py +30 -0
- timewise/util/backoff.py +12 -0
- timewise/util/csv_utils.py +12 -0
- timewise/util/error_threading.py +70 -0
- timewise/util/visits.py +33 -0
- timewise-1.0.0a1.dist-info/METADATA +205 -0
- timewise-1.0.0a1.dist-info/RECORD +39 -0
- {timewise-0.5.3.dist-info → timewise-1.0.0a1.dist-info}/WHEEL +1 -1
- timewise-1.0.0a1.dist-info/entry_points.txt +3 -0
- timewise/big_parent_sample.py +0 -106
- timewise/config_loader.py +0 -157
- timewise/general.py +0 -52
- timewise/parent_sample_base.py +0 -89
- timewise/point_source_utils.py +0 -68
- timewise/utils.py +0 -558
- timewise/wise_bigdata_desy_cluster.py +0 -1407
- timewise/wise_data_base.py +0 -2027
- timewise/wise_data_by_visit.py +0 -672
- timewise/wise_flux_conversion_correction.dat +0 -19
- timewise-0.5.3.dist-info/METADATA +0 -55
- timewise-0.5.3.dist-info/RECORD +0 -17
- timewise-0.5.3.dist-info/entry_points.txt +0 -3
- {timewise-0.5.3.dist-info → timewise-1.0.0a1.dist-info/licenses}/LICENSE +0 -0
timewise/point_source_utils.py
DELETED
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
import pandas as pd
|
|
3
|
-
|
|
4
|
-
from timewise.parent_sample_base import ParentSampleBase
|
|
5
|
-
from timewise.wise_data_by_visit import WiseDataByVisit
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
logger = logging.getLogger(__name__)
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
###########################################################################################################
|
|
12
|
-
# START POINT SOURCE UTILS #
|
|
13
|
-
#####################################################
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def get_point_source_parent_sample(base_name, ra, dec):
|
|
17
|
-
|
|
18
|
-
class PointSourceParentSample(ParentSampleBase):
|
|
19
|
-
default_keymap = {
|
|
20
|
-
'ra': 'ra',
|
|
21
|
-
'dec': 'dec',
|
|
22
|
-
'id': 'id'
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
def __init__(self):
|
|
26
|
-
|
|
27
|
-
super().__init__(base_name=base_name)
|
|
28
|
-
|
|
29
|
-
self.base_name = base_name
|
|
30
|
-
self.df = pd.DataFrame({'ra': [ra], 'dec': [dec], 'id': [base_name]})
|
|
31
|
-
|
|
32
|
-
def save_local(self):
|
|
33
|
-
logger.debug(f"not saving")
|
|
34
|
-
|
|
35
|
-
return PointSourceParentSample
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def get_point_source_wise_data(base_name, ra, dec, min_sep_arcsec=10, match=False, **kwargs):
|
|
39
|
-
"""
|
|
40
|
-
Get a WISEData instance for a point source
|
|
41
|
-
|
|
42
|
-
:param base_name: base name for storage in the data directory
|
|
43
|
-
:type base_name: str
|
|
44
|
-
:param ra: right ascencion
|
|
45
|
-
:type ra: float
|
|
46
|
-
:param dec: declination
|
|
47
|
-
:type dec: float
|
|
48
|
-
:param min_sep_arcsec: search radius in arcsec
|
|
49
|
-
:type min_sep_arcsec: float
|
|
50
|
-
:param match: match to AllWISE Source Catalogue
|
|
51
|
-
:type match: bool
|
|
52
|
-
:param kwargs: keyword arguments passed to WISEData.get_photometric_data()
|
|
53
|
-
:type kwargs: dict
|
|
54
|
-
:return: WISEData
|
|
55
|
-
"""
|
|
56
|
-
ps = get_point_source_parent_sample(base_name, ra, dec)
|
|
57
|
-
wd = WiseDataByVisit(n_chunks=1, base_name=base_name, parent_sample_class=ps, min_sep_arcsec=min_sep_arcsec)
|
|
58
|
-
if match:
|
|
59
|
-
wd.match_all_chunks()
|
|
60
|
-
service = kwargs.pop('service', 'gator')
|
|
61
|
-
wd.get_photometric_data(service=service, **kwargs)
|
|
62
|
-
wd.plot_lc(parent_sample_idx=0, service=service)
|
|
63
|
-
return wd
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
#####################################################
|
|
67
|
-
# END POINT SOURCE UTILS #
|
|
68
|
-
###########################################################################################################
|
timewise/utils.py
DELETED
|
@@ -1,558 +0,0 @@
|
|
|
1
|
-
import requests
|
|
2
|
-
import os
|
|
3
|
-
import getpass
|
|
4
|
-
import logging
|
|
5
|
-
import pandas as pd
|
|
6
|
-
import matplotlib.pyplot as plt
|
|
7
|
-
import pyvo as vo
|
|
8
|
-
import backoff
|
|
9
|
-
import numpy
|
|
10
|
-
from scipy.stats import chi2
|
|
11
|
-
from functools import cache
|
|
12
|
-
from astropy.table import Table
|
|
13
|
-
from PIL import Image
|
|
14
|
-
from io import BytesIO
|
|
15
|
-
import hashlib
|
|
16
|
-
from threading import Thread
|
|
17
|
-
from queue import Queue
|
|
18
|
-
import sys
|
|
19
|
-
|
|
20
|
-
from timewise.general import backoff_hndlr, get_directories
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
logger = logging.getLogger(__name__)
|
|
24
|
-
mirong_url = 'http://staff.ustc.edu.cn/~jnac/data_public/wisevar.txt'
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def get_mirong_path():
|
|
28
|
-
return get_directories()['cache_dir'] / 'mirong_sample.csv'
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
@cache
|
|
32
|
-
def get_2d_gaussian_correction(cl):
|
|
33
|
-
return numpy.sqrt(chi2.ppf(cl, 2))
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
def get_mirong_sample():
|
|
37
|
-
|
|
38
|
-
mirong_path = get_mirong_path()
|
|
39
|
-
if not mirong_path.is_file():
|
|
40
|
-
|
|
41
|
-
logger.info(f'getting MIRONG sample from {mirong_url}')
|
|
42
|
-
r = requests.get(mirong_url)
|
|
43
|
-
lll = list()
|
|
44
|
-
for l in r.text.split('\n')[1:]:
|
|
45
|
-
illl = list()
|
|
46
|
-
for ll in l.split(' '):
|
|
47
|
-
if ll and '#' not in ll:
|
|
48
|
-
illl.append(ll)
|
|
49
|
-
lll.append(illl)
|
|
50
|
-
|
|
51
|
-
mirong_sample = pd.DataFrame(lll[1:-1], columns=lll[0])
|
|
52
|
-
mirong_sample['ra'] = mirong_sample['RA']
|
|
53
|
-
mirong_sample['dec'] = mirong_sample['DEC']
|
|
54
|
-
logger.info(f'found {len(mirong_sample)} objects in MIRONG Sample')
|
|
55
|
-
|
|
56
|
-
mirong_sample.drop(columns=['ra', 'dec'], inplace=True)
|
|
57
|
-
logger.debug(f'saving to {mirong_path}')
|
|
58
|
-
mirong_path.parent.mkdir(parents=True, exist_ok=True)
|
|
59
|
-
mirong_sample.to_csv(mirong_path, index=False)
|
|
60
|
-
|
|
61
|
-
else:
|
|
62
|
-
logger.debug(f'loading {mirong_path}')
|
|
63
|
-
mirong_sample = pd.read_csv(mirong_path)
|
|
64
|
-
|
|
65
|
-
return mirong_sample
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
###########################################################################################################
|
|
69
|
-
# START SDSS UTILS #
|
|
70
|
-
#####################################################
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
def get_sdss_credentials():
|
|
74
|
-
if not os.environ.get('SDSS_USERID'):
|
|
75
|
-
os.environ['SDSS_USERID'] = input('Enter SDSS user ID:')
|
|
76
|
-
if not os.environ.get('SDSS_USERPW'):
|
|
77
|
-
os.environ['SDSS_USERPW'] = getpass.getpass('Enter SDSS password:')
|
|
78
|
-
return os.environ['SDSS_USERID'], os.environ['SDSS_USERPW']
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
def login_to_sciserver():
|
|
82
|
-
try:
|
|
83
|
-
from SciServer import Authentication
|
|
84
|
-
except ModuleNotFoundError:
|
|
85
|
-
raise ModuleNotFoundError("Please install SciServer (https://github.com/sciserver/SciScript-Python) "
|
|
86
|
-
"if you want to see SDSS cutouts!")
|
|
87
|
-
|
|
88
|
-
uid, pw = get_sdss_credentials()
|
|
89
|
-
logger.debug(f"logging in to SciServer with username {uid}")
|
|
90
|
-
Authentication.login(uid, pw)
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
@backoff.on_exception(
|
|
94
|
-
backoff.expo,
|
|
95
|
-
requests.RequestException,
|
|
96
|
-
max_tries=50,
|
|
97
|
-
on_backoff=backoff_hndlr
|
|
98
|
-
)
|
|
99
|
-
def get_cutout(*args, **kwargs):
|
|
100
|
-
login_to_sciserver()
|
|
101
|
-
from SciServer import SkyServer
|
|
102
|
-
return SkyServer.getJpegImgCutout(*args, **kwargs)
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
def plot_sdss_cutout(ra, dec, arcsec=20, arcsec_per_px=0.1, interactive=False, fn=None, title=None, save=False, ax=False,
|
|
106
|
-
height=2.5):
|
|
107
|
-
|
|
108
|
-
ang_px = int(arcsec / arcsec_per_px)
|
|
109
|
-
ang_deg = arcsec / 3600
|
|
110
|
-
|
|
111
|
-
if not ax:
|
|
112
|
-
fig, ax = plt.subplots(figsize=(height, height))
|
|
113
|
-
else:
|
|
114
|
-
fig = plt.gcf()
|
|
115
|
-
|
|
116
|
-
try:
|
|
117
|
-
im = get_cutout(ra, dec, scale=arcsec_per_px, height=ang_px, width=ang_px)
|
|
118
|
-
ax.imshow(im, origin='upper',
|
|
119
|
-
extent=([ra + ang_deg / 2, ra - ang_deg / 2,
|
|
120
|
-
dec - ang_deg / 2, dec + ang_deg / 2]),
|
|
121
|
-
cmap='gray')
|
|
122
|
-
ax.scatter(ra, dec, marker='x', color='red')
|
|
123
|
-
|
|
124
|
-
except Exception as e:
|
|
125
|
-
if "outside the SDSS footprint" in str(e):
|
|
126
|
-
xlim = ax.get_xlim()
|
|
127
|
-
ylim = ax.get_ylim()
|
|
128
|
-
x = sum(xlim) / 2
|
|
129
|
-
y = sum(ylim) / 2
|
|
130
|
-
ax.annotate("Outside SDSS Footprint", (x, y), color='red', ha='center', va='center', fontsize=20)
|
|
131
|
-
else:
|
|
132
|
-
raise Exception(e)
|
|
133
|
-
|
|
134
|
-
if title:
|
|
135
|
-
ax.set_title(title)
|
|
136
|
-
|
|
137
|
-
if save:
|
|
138
|
-
logger.debug(f"saving under {fn}")
|
|
139
|
-
fig.savefig(fn)
|
|
140
|
-
|
|
141
|
-
if interactive:
|
|
142
|
-
return fig, ax
|
|
143
|
-
|
|
144
|
-
plt.close()
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
#####################################################
|
|
148
|
-
# END SDSS UTILS #
|
|
149
|
-
###########################################################################################################
|
|
150
|
-
|
|
151
|
-
###########################################################################################################
|
|
152
|
-
# START PANSTARRS UTILS #
|
|
153
|
-
#####################################################
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
class PanSTARRSQueryError(Exception):
|
|
157
|
-
pass
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
def load_cache_or_download(url):
|
|
161
|
-
logger.debug(f"loading or downloading {url}")
|
|
162
|
-
h = hashlib.md5(url.encode()).hexdigest()
|
|
163
|
-
cache_dir = get_directories()['cache_dir']
|
|
164
|
-
cache_file = cache_dir / (h + ".cache")
|
|
165
|
-
logger.debug(f"cache file is {cache_file}")
|
|
166
|
-
if not cache_file.is_file():
|
|
167
|
-
logger.debug(f"downloading {url}")
|
|
168
|
-
r = requests.get(url)
|
|
169
|
-
with open(cache_file, 'wb') as f:
|
|
170
|
-
f.write(r.content)
|
|
171
|
-
return r.content
|
|
172
|
-
else:
|
|
173
|
-
logger.debug(f"loading {cache_file}")
|
|
174
|
-
with open(cache_file, 'rb') as f:
|
|
175
|
-
return f.read()
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
def annotate_not_available(ax):
|
|
179
|
-
xlim = ax.get_xlim()
|
|
180
|
-
ylim = ax.get_ylim()
|
|
181
|
-
x = sum(xlim) / 2
|
|
182
|
-
y = sum(ylim) / 2
|
|
183
|
-
logger.debug(f"annotate_not_available at {x}, {y}")
|
|
184
|
-
ax.annotate("Outside\nPanSTARRS\nFootprint", (x, y), color='red', ha='center', va='center', fontsize=10)
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
def getimages(ra, dec, filters="grizy"):
|
|
188
|
-
"""Query ps1filenames.py service to get a list of images
|
|
189
|
-
|
|
190
|
-
ra, dec = position in degrees
|
|
191
|
-
size = image size in pixels (0.25 arcsec/pixel)
|
|
192
|
-
filters = string with filters to include
|
|
193
|
-
Returns a table with the results
|
|
194
|
-
"""
|
|
195
|
-
|
|
196
|
-
service = "https://ps1images.stsci.edu/cgi-bin/ps1filenames.py"
|
|
197
|
-
url = f"{service}?ra={ra}&dec={dec}&filters={filters}"
|
|
198
|
-
content = load_cache_or_download(url)
|
|
199
|
-
table = Table.read(content.decode(), format='ascii')
|
|
200
|
-
return table
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
def geturl(ra, dec, size=240, output_size=None, filters="grizy", format="jpg", color=False):
|
|
204
|
-
"""Get URL for images in the table
|
|
205
|
-
|
|
206
|
-
ra, dec = position in degrees
|
|
207
|
-
size = extracted image size in pixels (0.25 arcsec/pixel)
|
|
208
|
-
output_size = output (display) image size in pixels (default = size).
|
|
209
|
-
output_size has no effect for fits format images.
|
|
210
|
-
filters = string with filters to include
|
|
211
|
-
format = data format (options are "jpg", "png" or "fits")
|
|
212
|
-
color = if True, creates a color image (only for jpg or png format).
|
|
213
|
-
Default is return a list of URLs for single-filter grayscale images.
|
|
214
|
-
Returns a string with the URL
|
|
215
|
-
"""
|
|
216
|
-
|
|
217
|
-
if color and format == "fits":
|
|
218
|
-
raise ValueError("color images are available only for jpg or png formats")
|
|
219
|
-
if format not in ("jpg", "png", "fits"):
|
|
220
|
-
raise ValueError("format must be one of jpg, png, fits")
|
|
221
|
-
table = getimages(ra, dec, filters=filters)
|
|
222
|
-
if len(table) == 0:
|
|
223
|
-
raise PanSTARRSQueryError("No images available")
|
|
224
|
-
url = (f"https://ps1images.stsci.edu/cgi-bin/fitscut.cgi?"
|
|
225
|
-
f"ra={ra}&dec={dec}&size={size}&format={format}")
|
|
226
|
-
if output_size:
|
|
227
|
-
url = url + "&output_size={}".format(output_size)
|
|
228
|
-
# sort filters from red to blue
|
|
229
|
-
flist = ["yzirg".find(x) for x in table['filter']]
|
|
230
|
-
table = table[numpy.argsort(flist)]
|
|
231
|
-
if color:
|
|
232
|
-
if len(table) > 3:
|
|
233
|
-
# pick 3 filters
|
|
234
|
-
table = table[[0, len(table) // 2, len(table) - 1]]
|
|
235
|
-
for i, param in enumerate(["red", "green", "blue"]):
|
|
236
|
-
url = url + "&{}={}".format(param, table['filename'][i])
|
|
237
|
-
else:
|
|
238
|
-
urlbase = url + "&red="
|
|
239
|
-
url = []
|
|
240
|
-
for filename in table['filename']:
|
|
241
|
-
url.append(urlbase + filename)
|
|
242
|
-
return url
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
def getcolorim(ra, dec, size=240, output_size=None, filters="grizy", format="jpg"):
|
|
246
|
-
"""Get color image at a sky position
|
|
247
|
-
|
|
248
|
-
ra, dec = position in degrees
|
|
249
|
-
size = extracted image size in pixels (0.25 arcsec/pixel)
|
|
250
|
-
output_size = output (display) image size in pixels (default = size).
|
|
251
|
-
output_size has no effect for fits format images.
|
|
252
|
-
filters = string with filters to include
|
|
253
|
-
format = data format (options are "jpg", "png")
|
|
254
|
-
Returns the image
|
|
255
|
-
"""
|
|
256
|
-
|
|
257
|
-
if format not in ("jpg", "png"):
|
|
258
|
-
raise ValueError("format must be jpg or png")
|
|
259
|
-
url = geturl(ra, dec, size=size, filters=filters, output_size=output_size, format=format, color=True)
|
|
260
|
-
content = load_cache_or_download(url)
|
|
261
|
-
im = Image.open(BytesIO(content))
|
|
262
|
-
return im
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
def getgrayim(ra, dec, size=240, output_size=None, filter="g", format="jpg"):
|
|
266
|
-
"""Get grayscale image at a sky position
|
|
267
|
-
|
|
268
|
-
ra, dec = position in degrees
|
|
269
|
-
size = extracted image size in pixels (0.25 arcsec/pixel)
|
|
270
|
-
output_size = output (display) image size in pixels (default = size).
|
|
271
|
-
output_size has no effect for fits format images.
|
|
272
|
-
filter = string with filter to extract (one of grizy)
|
|
273
|
-
format = data format (options are "jpg", "png")
|
|
274
|
-
Returns the image
|
|
275
|
-
"""
|
|
276
|
-
|
|
277
|
-
if format not in ("jpg", "png"):
|
|
278
|
-
raise ValueError("format must be jpg or png")
|
|
279
|
-
if filter not in list("grizy"):
|
|
280
|
-
raise ValueError("filter must be one of grizy")
|
|
281
|
-
url = geturl(ra, dec, size=size, filters=filter, output_size=output_size, format=format)
|
|
282
|
-
content = load_cache_or_download(url[0])
|
|
283
|
-
im = Image.open(BytesIO(content))
|
|
284
|
-
return im
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
def plot_panstarrs_cutout(
|
|
288
|
-
ra,
|
|
289
|
-
dec,
|
|
290
|
-
arcsec,
|
|
291
|
-
interactive=False,
|
|
292
|
-
fn=None,
|
|
293
|
-
title=None,
|
|
294
|
-
save=False,
|
|
295
|
-
ax=False,
|
|
296
|
-
plot_color_image=False,
|
|
297
|
-
height=2.5
|
|
298
|
-
):
|
|
299
|
-
arcsec_per_px = 0.25
|
|
300
|
-
ang_px = int(arcsec / arcsec_per_px)
|
|
301
|
-
|
|
302
|
-
imshow_kwargs = {
|
|
303
|
-
'origin': 'upper',
|
|
304
|
-
"extent": ([arcsec / 2, -arcsec / 2, -arcsec / 2, arcsec / 2])
|
|
305
|
-
}
|
|
306
|
-
scatter_args = [0, 0]
|
|
307
|
-
scatter_kwargs = {'marker': 'x', 'color': 'red'}
|
|
308
|
-
|
|
309
|
-
if not plot_color_image:
|
|
310
|
-
filters = 'grizy'
|
|
311
|
-
if not ax:
|
|
312
|
-
fig, axss = plt.subplots(2, len(filters), sharex='all', sharey='all',
|
|
313
|
-
gridspec_kw={'wspace': 0, 'hspace': 0, 'height_ratios': [1, 8]},
|
|
314
|
-
figsize=(height * 5, height))
|
|
315
|
-
else:
|
|
316
|
-
fig = plt.gcf()
|
|
317
|
-
axss = ax
|
|
318
|
-
|
|
319
|
-
for j, fil in enumerate(list(filters)):
|
|
320
|
-
axs = axss[1]
|
|
321
|
-
try:
|
|
322
|
-
im = getgrayim(ra, dec, size=ang_px, filter=fil)
|
|
323
|
-
axs[j].imshow(im, cmap='gray', **imshow_kwargs)
|
|
324
|
-
except PanSTARRSQueryError:
|
|
325
|
-
axs[j].set_xlim(-arcsec / 2, arcsec / 2)
|
|
326
|
-
axs[j].set_ylim(-arcsec / 2, arcsec / 2)
|
|
327
|
-
annotate_not_available(axs[j])
|
|
328
|
-
|
|
329
|
-
axs[j].scatter(*scatter_args, **scatter_kwargs)
|
|
330
|
-
axs[j].set_title(fil)
|
|
331
|
-
axss[0][j].axis('off')
|
|
332
|
-
|
|
333
|
-
else:
|
|
334
|
-
logger.debug('plotting color image')
|
|
335
|
-
if not ax:
|
|
336
|
-
fig, axss = plt.subplots(figsize=(height, height))
|
|
337
|
-
else:
|
|
338
|
-
fig = plt.gcf()
|
|
339
|
-
axss = ax
|
|
340
|
-
|
|
341
|
-
try:
|
|
342
|
-
im = getcolorim(ra, dec, size=ang_px)
|
|
343
|
-
axss.imshow(im, **imshow_kwargs)
|
|
344
|
-
except PanSTARRSQueryError:
|
|
345
|
-
axss.set_xlim(-arcsec / 2, arcsec / 2)
|
|
346
|
-
axss.set_ylim(-arcsec / 2, arcsec / 2)
|
|
347
|
-
annotate_not_available(axss)
|
|
348
|
-
axss.scatter(*scatter_args, **scatter_kwargs)
|
|
349
|
-
|
|
350
|
-
_this_title = title if title else f"{ra}_{dec}"
|
|
351
|
-
si = "-" if dec > 0 else "+"
|
|
352
|
-
ylabel = f"Dec {si} {abs(dec):.2f} [arcsec]"
|
|
353
|
-
xlabel = f"RA - {ra:.2f} [arcsec]"
|
|
354
|
-
try:
|
|
355
|
-
axss.set_title(_this_title)
|
|
356
|
-
axss.set_xlabel(xlabel)
|
|
357
|
-
axss.set_ylabel(ylabel)
|
|
358
|
-
axss.grid(ls=":", alpha=0.5)
|
|
359
|
-
except AttributeError: # in this case axss is an array
|
|
360
|
-
fig.supylabel(ylabel)
|
|
361
|
-
fig.supxlabel(xlabel)
|
|
362
|
-
fig.suptitle(_this_title)
|
|
363
|
-
for a in axss.flatten():
|
|
364
|
-
a.grid(ls=":", alpha=0.5)
|
|
365
|
-
|
|
366
|
-
if save:
|
|
367
|
-
logger.info(f'saving under {fn}')
|
|
368
|
-
fig.savefig(fn)
|
|
369
|
-
|
|
370
|
-
if interactive:
|
|
371
|
-
return fig, axss
|
|
372
|
-
|
|
373
|
-
plt.close()
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
#####################################################
|
|
377
|
-
# END PANSTARRS UTILS #
|
|
378
|
-
###########################################################################################################
|
|
379
|
-
|
|
380
|
-
###########################################################################################################
|
|
381
|
-
# START EXCESS VARIANCE UTILS #
|
|
382
|
-
#####################################################
|
|
383
|
-
|
|
384
|
-
# calculate excess variance as done in section 7.3.4 of (Boller et al)
|
|
385
|
-
# https://www.aanda.org/articles/aa/full_html/2016/04/aa25648-15/aa25648-15.html#S26
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
def calc_Expectation(a):
|
|
389
|
-
|
|
390
|
-
# variable prb is for probability
|
|
391
|
-
# of each element which is same for
|
|
392
|
-
# each element
|
|
393
|
-
n = len(a)
|
|
394
|
-
prb = 1 / n
|
|
395
|
-
# calculating expectation overall
|
|
396
|
-
sum = 0
|
|
397
|
-
for i in range(0, n):
|
|
398
|
-
sum += (a[i] * prb)
|
|
399
|
-
# returning expectation as sum
|
|
400
|
-
return float(sum)
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
def get_excess_variance(y, y_err, mu):
|
|
404
|
-
import numpy as np
|
|
405
|
-
N = len(y)
|
|
406
|
-
sum_variance = 0
|
|
407
|
-
for i, (X, sig) in enumerate(zip(y, y_err)):
|
|
408
|
-
sum_variance += np.power(X-mu,2) - np.power(sig,2)
|
|
409
|
-
|
|
410
|
-
excess_variance = (sum_variance)/(N*mu**2)
|
|
411
|
-
|
|
412
|
-
# calculate the uncertainty
|
|
413
|
-
F_var = np.sqrt(np.abs(excess_variance))/mu
|
|
414
|
-
std_exp = calc_Expectation(y_err**2)
|
|
415
|
-
term1 = np.sqrt(2/N)*std_exp/(np.power(mu,2))
|
|
416
|
-
term2 = np.sqrt((std_exp*2*F_var)/(N*mu))
|
|
417
|
-
|
|
418
|
-
uncertainty = np.sqrt(term1**2 + term2**2)
|
|
419
|
-
return excess_variance, uncertainty
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
#####################################################
|
|
423
|
-
# END EXCESS VARIANCE UTILS #
|
|
424
|
-
###########################################################################################################
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
###########################################################################################################
|
|
428
|
-
# START CUSTOM TAP Service #
|
|
429
|
-
#######################################################
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
class StableAsyncTAPJob(vo.dal.AsyncTAPJob):
|
|
433
|
-
"""
|
|
434
|
-
Implements backoff for call of phase which otherwise breaks the code if there are connection issues.
|
|
435
|
-
Also stores the response of TapQuery.submit() under self.submit_response for debugging
|
|
436
|
-
"""
|
|
437
|
-
|
|
438
|
-
def __init__(self, url, *, session=None, delete=True):
|
|
439
|
-
super(StableAsyncTAPJob, self).__init__(url, session=session, delete=delete)
|
|
440
|
-
self.submit_response = None
|
|
441
|
-
|
|
442
|
-
@classmethod
|
|
443
|
-
def create(
|
|
444
|
-
cls, baseurl, query, *, language="ADQL", maxrec=None, uploads=None,
|
|
445
|
-
session=None, **keywords):
|
|
446
|
-
"""
|
|
447
|
-
creates a async tap job on the server under ``baseurl``
|
|
448
|
-
Raises requests.HTTPError if TAPQuery.submit() failes.
|
|
449
|
-
|
|
450
|
-
Parameters
|
|
451
|
-
----------
|
|
452
|
-
baseurl : str
|
|
453
|
-
the TAP baseurl
|
|
454
|
-
query : str
|
|
455
|
-
the query string
|
|
456
|
-
language : str
|
|
457
|
-
specifies the query language, default ADQL.
|
|
458
|
-
useful for services which allow to use the backend query language.
|
|
459
|
-
maxrec : int
|
|
460
|
-
the maximum records to return. defaults to the service default
|
|
461
|
-
uploads : dict
|
|
462
|
-
a mapping from table names to objects containing a votable
|
|
463
|
-
session : object
|
|
464
|
-
optional session to use for network requests
|
|
465
|
-
"""
|
|
466
|
-
tapquery = vo.dal.TAPQuery(
|
|
467
|
-
baseurl, query, mode="async", language=language, maxrec=maxrec,
|
|
468
|
-
uploads=uploads, session=session, **keywords)
|
|
469
|
-
response = tapquery.submit()
|
|
470
|
-
response.raise_for_status()
|
|
471
|
-
job = cls(response.url, session=session)
|
|
472
|
-
job._client_set_maxrec = maxrec
|
|
473
|
-
job.submit_response = response
|
|
474
|
-
return job
|
|
475
|
-
|
|
476
|
-
@property
|
|
477
|
-
@backoff.on_exception(
|
|
478
|
-
backoff.expo,
|
|
479
|
-
(vo.dal.DALServiceError, AttributeError),
|
|
480
|
-
max_tries=50,
|
|
481
|
-
on_backoff=backoff_hndlr
|
|
482
|
-
)
|
|
483
|
-
def phase(self):
|
|
484
|
-
return super(StableAsyncTAPJob, self).phase
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
class StableTAPService(vo.dal.TAPService):
|
|
488
|
-
"""
|
|
489
|
-
Implements the StableAsyncTAPJob for job submission
|
|
490
|
-
"""
|
|
491
|
-
|
|
492
|
-
def submit_job(
|
|
493
|
-
self,
|
|
494
|
-
query,
|
|
495
|
-
*,
|
|
496
|
-
language="ADQL",
|
|
497
|
-
maxrec=None,
|
|
498
|
-
uploads=None,
|
|
499
|
-
**keywords
|
|
500
|
-
):
|
|
501
|
-
return StableAsyncTAPJob.create(
|
|
502
|
-
self.baseurl,
|
|
503
|
-
query,
|
|
504
|
-
language=language,
|
|
505
|
-
maxrec=maxrec,
|
|
506
|
-
uploads=uploads,
|
|
507
|
-
session=self._session,
|
|
508
|
-
**keywords
|
|
509
|
-
)
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
#######################################################
|
|
513
|
-
# END CUSTOM TAP Service #
|
|
514
|
-
###########################################################################################################
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
###########################################################################################################
|
|
518
|
-
# START CUSTOM TAP Service #
|
|
519
|
-
#######################################################
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
class ErrorQueue(Queue):
|
|
523
|
-
"""Queue subclass whose join() re-raises exceptions from worker threads."""
|
|
524
|
-
|
|
525
|
-
def __init__(self, *args, **kwargs):
|
|
526
|
-
super().__init__(*args, **kwargs)
|
|
527
|
-
self.error_queue = Queue()
|
|
528
|
-
|
|
529
|
-
def report_error(self, exc_info):
|
|
530
|
-
"""Called by workers to push an exception into the error queue."""
|
|
531
|
-
self.error_queue.put(exc_info)
|
|
532
|
-
# Also decrement unfinished_tasks, so join() won't block forever
|
|
533
|
-
with self.all_tasks_done:
|
|
534
|
-
self.unfinished_tasks = max(0, self.unfinished_tasks - 1)
|
|
535
|
-
self.all_tasks_done.notify_all()
|
|
536
|
-
|
|
537
|
-
def join(self):
|
|
538
|
-
"""Wait until all tasks are done, or raise if a worker failed."""
|
|
539
|
-
with self.all_tasks_done:
|
|
540
|
-
while self.unfinished_tasks:
|
|
541
|
-
if not self.error_queue.empty():
|
|
542
|
-
exc_info = self.error_queue.get()
|
|
543
|
-
raise exc_info[1].with_traceback(exc_info[2])
|
|
544
|
-
self.all_tasks_done.wait()
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
class ExceptionSafeThread(Thread):
|
|
548
|
-
"""Thread subclass that reports uncaught exceptions to the ErrorQueue."""
|
|
549
|
-
|
|
550
|
-
def __init__(self, error_queue: ErrorQueue, *args, **kwargs):
|
|
551
|
-
super().__init__(*args, **kwargs)
|
|
552
|
-
self.error_queue = error_queue
|
|
553
|
-
|
|
554
|
-
def run(self):
|
|
555
|
-
try:
|
|
556
|
-
super().run()
|
|
557
|
-
except Exception:
|
|
558
|
-
self.error_queue.report_error(sys.exc_info())
|