sparclclient 1.2.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sparcl/Results.py +234 -0
- sparcl/__init__.py +33 -0
- sparcl/benchmarks/__init__.py +0 -0
- sparcl/benchmarks/benchmarks.py +337 -0
- sparcl/client.py +869 -0
- sparcl/conf.py +34 -0
- sparcl/exceptions.py +141 -0
- sparcl/fields.py +160 -0
- sparcl/gather_2d.py +233 -0
- sparcl/notebooks/sparcl-examples.ipynb +1550 -0
- sparcl/resample_spectra.py +41 -0
- sparcl/sparc.ini +11 -0
- sparcl/type_conversion.py +418 -0
- sparcl/unsupported.py +65 -0
- sparcl/utils.py +209 -0
- sparclclient-1.2.1.dist-info/LICENSE +31 -0
- sparclclient-1.2.1.dist-info/METADATA +14 -0
- sparclclient-1.2.1.dist-info/RECORD +19 -0
- sparclclient-1.2.1.dist-info/WHEEL +5 -0
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
# NOT INTENDED FOR PUBLIC USE!
|
|
2
|
+
#
|
|
3
|
+
# See:
|
|
4
|
+
# https://spectres.readthedocs.io/en/latest/
|
|
5
|
+
import math
|
|
6
|
+
import spectres
|
|
7
|
+
import numpy as np
|
|
8
|
+
|
|
9
|
+
# Local
|
|
10
|
+
import sparcl.client
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# Per paper, should be able to pass all flux in one call to spectres
|
|
14
|
+
# https://arxiv.org/pdf/1705.05165.pdf
|
|
15
|
+
# Perhaps users would rather the bins uniform (1,5,20 Angstroms?)
|
|
16
|
+
def _resample_flux(records, wavstep=1):
|
|
17
|
+
smallest = math.floor(min([min(r.wavelength) for r in records]))
|
|
18
|
+
largest = math.ceil(max([max(r.wavelength) for r in records]))
|
|
19
|
+
|
|
20
|
+
#!wrange = largest - smallest
|
|
21
|
+
# new_wavs = np.fro<mfunction(lambda i: i + smallest, (wrange,), dtype=int)
|
|
22
|
+
# flux_2d = np.ones([len(records), wrange])
|
|
23
|
+
|
|
24
|
+
new_wavs = np.array(range(smallest, largest + 1, wavstep))
|
|
25
|
+
flux_2d = np.full([len(records), len(new_wavs)], None, dtype=float)
|
|
26
|
+
|
|
27
|
+
for idx, rec in enumerate(records):
|
|
28
|
+
flux_2d[idx] = spectres.spectres(
|
|
29
|
+
new_wavs, rec.wavelength, rec.flux, verbose=False
|
|
30
|
+
)
|
|
31
|
+
return flux_2d, new_wavs
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _tt0(numrecs=20):
|
|
35
|
+
client = sparcl.client.SparclClient()
|
|
36
|
+
found = client.find(
|
|
37
|
+
constraints=dict(data_release=["BOSS-DR16"]), limit=numrecs
|
|
38
|
+
)
|
|
39
|
+
got = client.retrieve(found.ids)
|
|
40
|
+
flux_2d, new_wavs = _resample_flux(got.records)
|
|
41
|
+
return flux_2d, new_wavs
|
sparcl/sparc.ini
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
# This config file is for the NOIRLab Astro Data Archive client.
|
|
2
|
+
# It gets data from a server such as: https://specserver.noirlab.edu/
|
|
3
|
+
|
|
4
|
+
[DEFAULT]
|
|
5
|
+
ServerBaseUrl = https://specserver.noirlab.edu
|
|
6
|
+
ServerAliveInterval = 45
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
[sparc.server]
|
|
10
|
+
# Time limit (seconds) for connecting to NOIRLab Astro Data Archive server
|
|
11
|
+
ServerTimeout = 30
|
|
@@ -0,0 +1,418 @@
|
|
|
1
|
+
# Python Standard Library
|
|
2
|
+
from abc import ABC, abstractmethod
|
|
3
|
+
import copy
|
|
4
|
+
|
|
5
|
+
#!from pprint import pformat
|
|
6
|
+
from enum import Enum, auto
|
|
7
|
+
|
|
8
|
+
# External Packages
|
|
9
|
+
import numpy as np
|
|
10
|
+
|
|
11
|
+
#!import pandas as pd
|
|
12
|
+
from specutils import Spectrum1D
|
|
13
|
+
import astropy.units as u
|
|
14
|
+
from astropy.nddata import InverseVariance
|
|
15
|
+
|
|
16
|
+
# Local Packages
|
|
17
|
+
import sparcl.exceptions as ex
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
"""It would be much better if this were abstracted and easier to
|
|
21
|
+
update with new Data Types and new DataReleases. Perhaps use
|
|
22
|
+
something like the Server "Personalities". I've rejected abstracting
|
|
23
|
+
for now because I think we need operational experience with the DataType
|
|
24
|
+
feature and how it interacts with other features (especially global
|
|
25
|
+
Rename and retrieve(INCLUDE).
|
|
26
|
+
|
|
27
|
+
DataType conversion should be done completely within the Client, not
|
|
28
|
+
on the Server. The obvious reason is Clients are language
|
|
29
|
+
dependent, the Server API is not. But for Client to be able to know
|
|
30
|
+
all about fields names (mapping from original to new names, which ones
|
|
31
|
+
are required) it needs info from the Server. The Client gets such
|
|
32
|
+
tables on instance instantiation through one web-service call that
|
|
33
|
+
grabs everything that pulls it appart into multiple DataField related
|
|
34
|
+
LUTs (LookUpTables, aka dictionaries).
|
|
35
|
+
|
|
36
|
+
Questions abound for use-cases.
|
|
37
|
+
|
|
38
|
+
1. Is it very important to be able to convert a record LIST to a
|
|
39
|
+
single data structure? Example: for Pandas DataFrame we combine all
|
|
40
|
+
vectors in a record into a 2D DataFrame. What about those across
|
|
41
|
+
records into a 3D DataFrame?
|
|
42
|
+
|
|
43
|
+
2. Should vectors and scalars be funadmentally separated? (see #1) If
|
|
44
|
+
so, how do we avoid hard coding the distinction for every
|
|
45
|
+
DataRelease?
|
|
46
|
+
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
# Replace all uses of string rtype with enum @@@
|
|
51
|
+
class Rtype(Enum):
|
|
52
|
+
JSON = auto()
|
|
53
|
+
NUMPY = auto()
|
|
54
|
+
PANDAS = auto()
|
|
55
|
+
SPECTRUM1D = auto()
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class Convert(ABC):
|
|
59
|
+
"""Convert JSON record to mix of plain python
|
|
60
|
+
and selected data record type.
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
@abstractmethod
|
|
64
|
+
def to_numpy(self, record, o2nLUT):
|
|
65
|
+
newrec = copy.deepcopy(record)
|
|
66
|
+
return newrec
|
|
67
|
+
|
|
68
|
+
@abstractmethod
|
|
69
|
+
def to_spectrum1d(self, record, o2nLUT):
|
|
70
|
+
newrec = copy.deepcopy(record)
|
|
71
|
+
return newrec
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
#! @abstractmethod
|
|
75
|
+
#! def to_pandas(self, record, o2nLUT):
|
|
76
|
+
#! newrec = copy.deepcopy(record)
|
|
77
|
+
#! return(newrec)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class NoopConvert(Convert):
|
|
81
|
+
def to_numpy(self, record, o2nLUT):
|
|
82
|
+
return record
|
|
83
|
+
|
|
84
|
+
def to_spectrum1d(self, record, o2nLUT):
|
|
85
|
+
return record
|
|
86
|
+
|
|
87
|
+
def to_pandas(self, record, o2nLUT):
|
|
88
|
+
return record
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class SdssDr16(Convert):
|
|
92
|
+
def to_numpy(self, record, o2nLUT):
|
|
93
|
+
"""Convert FitsFile record to a structure that uses Numpy"""
|
|
94
|
+
arflds = [
|
|
95
|
+
"spectra.coadd.and_mask",
|
|
96
|
+
"spectra.coadd.flux",
|
|
97
|
+
"spectra.coadd.ivar",
|
|
98
|
+
"spectra.coadd.loglam",
|
|
99
|
+
"spectra.coadd.model",
|
|
100
|
+
"spectra.coadd.or_mask",
|
|
101
|
+
"spectra.coadd.sky",
|
|
102
|
+
"spectra.coadd.wdisp",
|
|
103
|
+
]
|
|
104
|
+
lofl = [record[o2nLUT[f]] for f in arflds if f in o2nLUT]
|
|
105
|
+
newrec = dict(nparr=np.array(lofl))
|
|
106
|
+
for orig, new in o2nLUT.items():
|
|
107
|
+
if orig in arflds:
|
|
108
|
+
continue
|
|
109
|
+
if new in record:
|
|
110
|
+
newrec[new] = record[new]
|
|
111
|
+
return newrec
|
|
112
|
+
|
|
113
|
+
# Sdss
|
|
114
|
+
def to_spectrum1d(self, record, o2nLUT):
|
|
115
|
+
arflds = [
|
|
116
|
+
"red_shift",
|
|
117
|
+
"spectra.coadd.flux",
|
|
118
|
+
"spectra.coadd.ivar",
|
|
119
|
+
"spectra.coadd.loglam",
|
|
120
|
+
"spectra.coadd.and_mask",
|
|
121
|
+
]
|
|
122
|
+
|
|
123
|
+
loglam = record[o2nLUT["spectra.coadd.loglam"]]
|
|
124
|
+
flux = record[o2nLUT["spectra.coadd.flux"]]
|
|
125
|
+
ivar = record[o2nLUT["spectra.coadd.ivar"]]
|
|
126
|
+
and_mask = record[o2nLUT["spectra.coadd.and_mask"]]
|
|
127
|
+
|
|
128
|
+
wavelength = (10 ** np.array(loglam)) * u.AA
|
|
129
|
+
flux = np.array(flux) * 10**-17 * u.Unit("erg cm-2 s-1 AA-1")
|
|
130
|
+
ivar = InverseVariance(np.array(ivar))
|
|
131
|
+
z = record.get("red_shift")
|
|
132
|
+
|
|
133
|
+
newrec = dict(
|
|
134
|
+
# flux, uncertainty, wavevelength, mask(and), redshift
|
|
135
|
+
spec1d=Spectrum1D(
|
|
136
|
+
spectral_axis=wavelength,
|
|
137
|
+
flux=flux,
|
|
138
|
+
uncertainty=ivar,
|
|
139
|
+
redshift=z,
|
|
140
|
+
mask=and_mask,
|
|
141
|
+
),
|
|
142
|
+
)
|
|
143
|
+
for orig, new in o2nLUT.items():
|
|
144
|
+
if orig in arflds:
|
|
145
|
+
continue
|
|
146
|
+
if new in record:
|
|
147
|
+
newrec[new] = record[new]
|
|
148
|
+
|
|
149
|
+
return newrec
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
#! def to_pandas(self, record, o2nLUT):
|
|
153
|
+
#! arflds = [
|
|
154
|
+
#! 'spectra.coadd.and_mask',
|
|
155
|
+
#! 'spectra.coadd.flux',
|
|
156
|
+
#! 'spectra.coadd.ivar',
|
|
157
|
+
#! 'spectra.coadd.loglam',
|
|
158
|
+
#! 'spectra.coadd.model',
|
|
159
|
+
#! 'spectra.coadd.or_mask',
|
|
160
|
+
#! 'spectra.coadd.sky',
|
|
161
|
+
#! 'spectra.coadd.wdisp',
|
|
162
|
+
#! ]
|
|
163
|
+
#! dfdict = dict((o2nLUT[f], record[o2nLUT[f]])
|
|
164
|
+
#! for f in arflds if f in o2nLUT)
|
|
165
|
+
#! newrec = dict(df = pd.DataFrame(dfdict))
|
|
166
|
+
#! for orig,new in o2nLUT.items():
|
|
167
|
+
#! if orig in arflds:
|
|
168
|
+
#! continue
|
|
169
|
+
#! if new in record:
|
|
170
|
+
#! newrec[new] = record[new]
|
|
171
|
+
#! return(newrec)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
class BossDr16(Convert):
|
|
175
|
+
def to_numpy(self, record, o2nLUT):
|
|
176
|
+
arflds = [
|
|
177
|
+
"spectra.coadd.AND_MASK",
|
|
178
|
+
"spectra.coadd.FLUX",
|
|
179
|
+
"spectra.coadd.IVAR",
|
|
180
|
+
"spectra.coadd.LOGLAM",
|
|
181
|
+
"spectra.coadd.MODEL",
|
|
182
|
+
"spectra.coadd.OR_MASK",
|
|
183
|
+
"spectra.coadd.SKY",
|
|
184
|
+
"spectra.coadd.WDISP",
|
|
185
|
+
]
|
|
186
|
+
lofl = [record[o2nLUT[f]] for f in arflds if f in o2nLUT]
|
|
187
|
+
newrec = dict(nparr=np.array(lofl))
|
|
188
|
+
for orig, new in o2nLUT.items():
|
|
189
|
+
# Don't carry over the fields used to build the new datatype.
|
|
190
|
+
# This would be duplication since their content is already
|
|
191
|
+
# in the new datatype.
|
|
192
|
+
if orig in arflds:
|
|
193
|
+
continue
|
|
194
|
+
if new in record:
|
|
195
|
+
newrec[new] = record[new]
|
|
196
|
+
return newrec
|
|
197
|
+
|
|
198
|
+
# BOSS
|
|
199
|
+
def to_spectrum1d(self, record, o2nLUT):
|
|
200
|
+
arflds = [
|
|
201
|
+
"red_shift",
|
|
202
|
+
"spectra.coadd.FLUX",
|
|
203
|
+
"spectra.coadd.IVAR",
|
|
204
|
+
"spectra.coadd.LOGLAM",
|
|
205
|
+
"spectra.coadd.AND_MASK",
|
|
206
|
+
]
|
|
207
|
+
loglam = record[o2nLUT["spectra.coadd.LOGLAM"]]
|
|
208
|
+
flux = record[o2nLUT["spectra.coadd.FLUX"]]
|
|
209
|
+
ivar = record[o2nLUT["spectra.coadd.IVAR"]]
|
|
210
|
+
and_mask = record[o2nLUT["spectra.coadd.AND_MASK"]]
|
|
211
|
+
|
|
212
|
+
wavelength = (10 ** np.array(loglam)) * u.AA
|
|
213
|
+
flux = np.array(flux) * 10**-17 * u.Unit("erg cm-2 s-1 AA-1")
|
|
214
|
+
ivar = InverseVariance(np.array(ivar))
|
|
215
|
+
z = record.get("red_shift")
|
|
216
|
+
|
|
217
|
+
newrec = dict(
|
|
218
|
+
# flux, uncertainty, wavelength, mask(and), redshift
|
|
219
|
+
spec1d=Spectrum1D(
|
|
220
|
+
spectral_axis=wavelength,
|
|
221
|
+
flux=flux,
|
|
222
|
+
uncertainty=ivar,
|
|
223
|
+
redshift=z,
|
|
224
|
+
mask=and_mask,
|
|
225
|
+
),
|
|
226
|
+
)
|
|
227
|
+
for orig, new in o2nLUT.items():
|
|
228
|
+
if orig in arflds:
|
|
229
|
+
continue
|
|
230
|
+
if new in record:
|
|
231
|
+
newrec[new] = record[new]
|
|
232
|
+
return newrec
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
#! def to_pandas(self, record, o2nLUT): # BOSS
|
|
236
|
+
#! arflds = [
|
|
237
|
+
#! 'spectra.coadd.AND_MASK',
|
|
238
|
+
#! 'spectra.coadd.FLUX',
|
|
239
|
+
#! 'spectra.coadd.IVAR',
|
|
240
|
+
#! 'spectra.coadd.LOGLAM',
|
|
241
|
+
#! 'spectra.coadd.MODEL',
|
|
242
|
+
#! 'spectra.coadd.OR_MASK',
|
|
243
|
+
#! 'spectra.coadd.SKY',
|
|
244
|
+
#! 'spectra.coadd.WDISP',
|
|
245
|
+
#! ]
|
|
246
|
+
#! dfdict = dict((o2nLUT[f], record[o2nLUT[f]])
|
|
247
|
+
#! for f in arflds if f in o2nLUT)
|
|
248
|
+
#! newrec = dict(df = pd.DataFrame(dfdict))
|
|
249
|
+
#! for orig,new in o2nLUT.items():
|
|
250
|
+
#! if orig in arflds:
|
|
251
|
+
#! continue
|
|
252
|
+
#! if new in record:
|
|
253
|
+
#! newrec[new] = record[new]
|
|
254
|
+
#! return(newrec)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
class Desi(Convert):
|
|
258
|
+
def to_numpy(self, record, o2nLUT):
|
|
259
|
+
arflds = [
|
|
260
|
+
"spectra.b_flux",
|
|
261
|
+
"spectra.b_ivar",
|
|
262
|
+
"spectra.b_mask",
|
|
263
|
+
"spectra.b_wavelength",
|
|
264
|
+
"spectra.r_flux",
|
|
265
|
+
"spectra.r_ivar",
|
|
266
|
+
"spectra.r_mask",
|
|
267
|
+
"spectra.r_wavelength",
|
|
268
|
+
"spectra.z_flux",
|
|
269
|
+
"spectra.z_ivar",
|
|
270
|
+
"spectra.z_mask",
|
|
271
|
+
"spectra.z_wavelength",
|
|
272
|
+
]
|
|
273
|
+
lofl = [record[o2nLUT[f]] for f in arflds if f in o2nLUT]
|
|
274
|
+
newrec = dict(nparr=np.array(lofl))
|
|
275
|
+
for orig, new in o2nLUT.items():
|
|
276
|
+
if orig in arflds:
|
|
277
|
+
continue
|
|
278
|
+
if new in record:
|
|
279
|
+
newrec[new] = record[new]
|
|
280
|
+
return newrec
|
|
281
|
+
|
|
282
|
+
def to_spectrum1d(self, record, o2nLUT): # Desi
|
|
283
|
+
arflds = [
|
|
284
|
+
"red_shift",
|
|
285
|
+
"spectra.b_flux",
|
|
286
|
+
"spectra.b_ivar",
|
|
287
|
+
"spectra.b_mask",
|
|
288
|
+
"spectra.b_wavelength",
|
|
289
|
+
"spectra.r_flux",
|
|
290
|
+
"spectra.r_ivar",
|
|
291
|
+
"spectra.r_mask",
|
|
292
|
+
"spectra.r_wavelength",
|
|
293
|
+
"spectra.z_flux",
|
|
294
|
+
"spectra.z_ivar",
|
|
295
|
+
"spectra.z_mask",
|
|
296
|
+
"spectra.z_wavelength",
|
|
297
|
+
]
|
|
298
|
+
|
|
299
|
+
z = record.get("red_shift")
|
|
300
|
+
|
|
301
|
+
# _b
|
|
302
|
+
wavelength_b = record[o2nLUT["spectra.b_wavelength"]]
|
|
303
|
+
flux_b = record[o2nLUT["spectra.b_flux"]]
|
|
304
|
+
ivar_b = record[o2nLUT["spectra.b_ivar"]]
|
|
305
|
+
mask_b = record[o2nLUT["spectra.b_mask"]]
|
|
306
|
+
# Define units
|
|
307
|
+
wavelength_b = np.array(wavelength_b) * u.AA
|
|
308
|
+
flux_b = np.array(flux_b) * 10**-17 * u.Unit("erg cm-2 s-1 AA-1")
|
|
309
|
+
ivar_b = InverseVariance(np.array(ivar_b))
|
|
310
|
+
|
|
311
|
+
# _r
|
|
312
|
+
wavelength_r = record[o2nLUT["spectra.r_wavelength"]]
|
|
313
|
+
flux_r = record[o2nLUT["spectra.r_flux"]]
|
|
314
|
+
ivar_r = record[o2nLUT["spectra.r_ivar"]]
|
|
315
|
+
mask_r = record[o2nLUT["spectra.r_mask"]]
|
|
316
|
+
# Define units
|
|
317
|
+
wavelength_r = np.array(wavelength_r) * u.AA
|
|
318
|
+
flux_r = np.array(flux_r) * 10**-17 * u.Unit("erg cm-2 s-1 AA-1")
|
|
319
|
+
ivar_r = InverseVariance(np.array(ivar_r))
|
|
320
|
+
|
|
321
|
+
# _z
|
|
322
|
+
wavelength_z = record[o2nLUT["spectra.z_wavelength"]]
|
|
323
|
+
flux_z = record[o2nLUT["spectra.z_flux"]]
|
|
324
|
+
ivar_z = record[o2nLUT["spectra.z_ivar"]]
|
|
325
|
+
mask_z = record[o2nLUT["spectra.z_mask"]]
|
|
326
|
+
# Define units
|
|
327
|
+
wavelength_z = np.array(wavelength_z) * u.AA
|
|
328
|
+
flux_z = np.array(flux_z) * 10**-17 * u.Unit("erg cm-2 s-1 AA-1")
|
|
329
|
+
ivar_z = InverseVariance(np.array(ivar_z))
|
|
330
|
+
|
|
331
|
+
newrec = dict(
|
|
332
|
+
# flux, uncertainty, wavevelength, mask, redshift
|
|
333
|
+
b_spec1d=Spectrum1D(
|
|
334
|
+
spectral_axis=wavelength_b,
|
|
335
|
+
flux=flux_b,
|
|
336
|
+
uncertainty=ivar_b,
|
|
337
|
+
redshift=z,
|
|
338
|
+
mask=mask_b,
|
|
339
|
+
),
|
|
340
|
+
r_spec1d=Spectrum1D(
|
|
341
|
+
spectral_axis=wavelength_r,
|
|
342
|
+
flux=flux_r,
|
|
343
|
+
uncertainty=ivar_r,
|
|
344
|
+
redshift=z,
|
|
345
|
+
mask=mask_r,
|
|
346
|
+
),
|
|
347
|
+
z_spec1d=Spectrum1D(
|
|
348
|
+
spectral_axis=wavelength_z,
|
|
349
|
+
flux=flux_z,
|
|
350
|
+
uncertainty=ivar_z,
|
|
351
|
+
redshift=z,
|
|
352
|
+
mask=mask_z,
|
|
353
|
+
),
|
|
354
|
+
)
|
|
355
|
+
for orig, new in o2nLUT.items():
|
|
356
|
+
if orig in arflds:
|
|
357
|
+
continue
|
|
358
|
+
if new in record:
|
|
359
|
+
newrec[new] = record[new]
|
|
360
|
+
return newrec
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
class DesiDenali(Desi):
|
|
364
|
+
pass
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
class DesiEverest(Desi):
|
|
368
|
+
pass
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
# DR Instance LookUp Table
|
|
372
|
+
diLUT = {
|
|
373
|
+
"SDSS-DR16": SdssDr16(),
|
|
374
|
+
"BOSS-DR16": BossDr16(),
|
|
375
|
+
"DESI-denali": DesiDenali(),
|
|
376
|
+
"DESI-everest": DesiEverest(),
|
|
377
|
+
#'Unknown': NoopConvert(),
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
|
|
381
|
+
def convert(record, rtype, client, include, verbose=False):
|
|
382
|
+
if rtype is None:
|
|
383
|
+
return record
|
|
384
|
+
|
|
385
|
+
dr = record["_dr"]
|
|
386
|
+
|
|
387
|
+
# Validate parameters
|
|
388
|
+
if dr not in diLUT:
|
|
389
|
+
allowed = ", ".join(list(diLUT.keys()))
|
|
390
|
+
msg = (
|
|
391
|
+
f'The Data Set associated with a records, "{dr}",'
|
|
392
|
+
f" is not supported for Type Conversion."
|
|
393
|
+
f" Available Data Sets are: {allowed}."
|
|
394
|
+
)
|
|
395
|
+
raise ex.UnkDr(msg)
|
|
396
|
+
|
|
397
|
+
drin = diLUT.get(dr, NoopConvert())
|
|
398
|
+
|
|
399
|
+
o2nLUT = copy.copy(client.orig2newLUT[dr]) # orig2newLUT[dr][orig] = new
|
|
400
|
+
o2nLUT["_dr"] = "_dr"
|
|
401
|
+
#!n2oLUT = client.new2origLUT[dr]
|
|
402
|
+
#!required = set(client.required[dr])
|
|
403
|
+
#!if include is not None:
|
|
404
|
+
#! nuke = set(n2oLUT.keys()).difference(required.union(include))
|
|
405
|
+
#! for new in nuke:
|
|
406
|
+
#! del o2nLUT[n2oLUT[new]]
|
|
407
|
+
|
|
408
|
+
if rtype == "json":
|
|
409
|
+
return record
|
|
410
|
+
elif rtype == "numpy":
|
|
411
|
+
return drin.to_numpy(record, o2nLUT)
|
|
412
|
+
elif rtype == "pandas":
|
|
413
|
+
return drin.to_pandas(record, o2nLUT)
|
|
414
|
+
elif rtype == "spectrum1d":
|
|
415
|
+
return drin.to_spectrum1d(record, o2nLUT)
|
|
416
|
+
else:
|
|
417
|
+
raise Exception(f"Unknown record type ({rtype})")
|
|
418
|
+
return None
|
sparcl/unsupported.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# End Users should not use anything from this file. All of it is
|
|
2
|
+
# considered experimentatal and be broken or change without notice.
|
|
3
|
+
############################################
|
|
4
|
+
# Python Standard Library
|
|
5
|
+
from urllib.parse import urlencode
|
|
6
|
+
|
|
7
|
+
#!from urllib.parse import urlparse
|
|
8
|
+
#!from warnings import warn
|
|
9
|
+
import pickle
|
|
10
|
+
import tempfile
|
|
11
|
+
import json
|
|
12
|
+
|
|
13
|
+
############################################
|
|
14
|
+
# External Packages
|
|
15
|
+
import requests
|
|
16
|
+
|
|
17
|
+
############################################
|
|
18
|
+
# Local Packages
|
|
19
|
+
#!from sparcl.fields import Fields
|
|
20
|
+
import sparcl.exceptions as ex
|
|
21
|
+
|
|
22
|
+
_STAGE = "https://sparclstage.datalab.noirlab.edu" # noqa: E221
|
|
23
|
+
_PAT = "https://sparc1.datalab.noirlab.edu" # noqa: E221
|
|
24
|
+
|
|
25
|
+
drs = ["SDSS-DR16", "BOSS-DR16", "DESI-EDR"]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def retrieve(
|
|
29
|
+
ids,
|
|
30
|
+
include=["id"],
|
|
31
|
+
dataset_list=["BOSS-DR16"],
|
|
32
|
+
server=_PAT,
|
|
33
|
+
svc="spectras", # or 'retrieve',
|
|
34
|
+
limit=100,
|
|
35
|
+
verbose=True,
|
|
36
|
+
):
|
|
37
|
+
uparams = dict(
|
|
38
|
+
include=",".join(include),
|
|
39
|
+
limit=limit,
|
|
40
|
+
dataset_list=",".join(dataset_list),
|
|
41
|
+
)
|
|
42
|
+
qstr = urlencode(uparams)
|
|
43
|
+
|
|
44
|
+
url = f"{server}/sparc/{svc}/?{qstr}"
|
|
45
|
+
if verbose:
|
|
46
|
+
print(f"Using ids={ids[:2]}")
|
|
47
|
+
print(f'Using url="{url}"')
|
|
48
|
+
print(f"curl -X POST \"{url}\" -d '{json.dumps(ids)}' > retrieve.pkl")
|
|
49
|
+
|
|
50
|
+
res = requests.post(url, json=ids)
|
|
51
|
+
|
|
52
|
+
if res.status_code != 200:
|
|
53
|
+
#! if verbose and ('traceback' in res.json()):
|
|
54
|
+
#! print(f'DBG: Server traceback=\n{res.json()["traceback"]}')
|
|
55
|
+
raise ex.genSparclException(res, verbose=verbose)
|
|
56
|
+
|
|
57
|
+
# unpack pickle file from result
|
|
58
|
+
with tempfile.TemporaryFile(mode="w+b") as fp:
|
|
59
|
+
for idx, chunk in enumerate(res.iter_content(chunk_size=None)):
|
|
60
|
+
fp.write(chunk)
|
|
61
|
+
# Position to start of file for pickle reading (load)
|
|
62
|
+
fp.seek(0)
|
|
63
|
+
results = pickle.load(fp)
|
|
64
|
+
|
|
65
|
+
return results
|