mwdust 1.6__cp312-cp312-macosx_10_13_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mwdust might be problematic. Click here for more details.

Binary file
mwdust/Combined15.py ADDED
@@ -0,0 +1,61 @@
1
+ ###############################################################################
2
+ #
3
+ # Combined15: extinction model obtained from a combination of Marshall et al.
4
+ # (2006), Green et al. (2015), and Drimmel et al. (2003)
5
+ #
6
+ ###############################################################################
7
+ import os, os.path
8
+ import numpy
9
+ import h5py
10
+ from mwdust.util.download import downloader, dust_dir
11
+ from mwdust.HierarchicalHealpixMap import HierarchicalHealpixMap
12
+ _DEGTORAD= numpy.pi/180.
13
+ _combineddir= os.path.join(dust_dir, 'combined15')
14
+ class Combined15(HierarchicalHealpixMap):
15
+ """extinction model obtained from a combination of Marshall et al.
16
+ (2006), Green et al. (2015), and Drimmel et al. (2003)"""
17
+ def __init__(self,filter=None,sf10=True,load_samples=False,
18
+ interpk=1):
19
+ """
20
+ NAME:
21
+ __init__
22
+ PURPOSE:
23
+ Initialize the combined dust map
24
+ INPUT:
25
+ filter= filter to return the extinction in
26
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
27
+ interpk= (1) interpolation order
28
+ OUTPUT:
29
+ object
30
+ HISTORY:
31
+ 2015-07-28 - Started - Bovy (UofT)
32
+ """
33
+ HierarchicalHealpixMap.__init__(self,filter=filter,sf10=sf10)
34
+ #Read the map
35
+ with h5py.File(os.path.join(_combineddir,'dust-map-3d.h5'),'r') \
36
+ as combineddata:
37
+ self._pix_info= combineddata['/pixel_info'][:]
38
+ self._best_fit= combineddata['/best_fit'][:]
39
+ # Utilities
40
+ self._distmods= numpy.linspace(4.,19.,31)
41
+ self._minnside= numpy.amin(self._pix_info['nside'])
42
+ self._maxnside= numpy.amax(self._pix_info['nside'])
43
+ nlevels= int(numpy.log2(self._maxnside//self._minnside))+1
44
+ self._nsides= [self._maxnside//2**ii for ii in range(nlevels)]
45
+ self._indexArray= numpy.arange(len(self._pix_info['healpix_index']))
46
+ # For the interpolation
47
+ self._intps= numpy.zeros(len(self._pix_info['healpix_index']),
48
+ dtype='object') #array to cache interpolated extinctions
49
+ self._interpk= interpk
50
+ return None
51
+
52
+ @classmethod
53
+ def download(cls, test=False):
54
+ # Download the combined map of Bovy et al. (2015): Marshall+Green+Drimmel for full sky coverage
55
+ combined15_path = os.path.join(dust_dir, "combined15", "dust-map-3d.h5")
56
+ if not os.path.exists(combined15_path):
57
+ if not os.path.exists(os.path.join(dust_dir, "combined15")):
58
+ os.mkdir(os.path.join(dust_dir, "combined15"))
59
+ _COMBINED15_URL = "https://zenodo.org/record/31262/files/dust-map-3d.h5"
60
+ downloader(_COMBINED15_URL, combined15_path, cls.__name__, test=test)
61
+ return None
mwdust/Combined19.py ADDED
@@ -0,0 +1,62 @@
1
+ ###############################################################################
2
+ #
3
+ # Combined19: extinction model obtained from a combination of Marshall et al.
4
+ # (2006), Green et al. (2019), and Drimmel et al. (2003)
5
+ #
6
+ ###############################################################################
7
+ import os, os.path
8
+ import numpy
9
+ import h5py
10
+ from mwdust.util.download import dust_dir, downloader
11
+ from mwdust.HierarchicalHealpixMap import HierarchicalHealpixMap
12
+ _DEGTORAD= numpy.pi/180.
13
+ _combineddir= os.path.join(dust_dir,'combined19')
14
+ class Combined19(HierarchicalHealpixMap):
15
+ """extinction model obtained from a combination of Marshall et al.
16
+ (2006), Green et al. (2019), and Drimmel et al. (2003)"""
17
+ def __init__(self,filter=None,sf10=True,load_samples=False,
18
+ interpk=1):
19
+ """
20
+ NAME:
21
+ __init__
22
+ PURPOSE:
23
+ Initialize the combined dust map
24
+ INPUT:
25
+ filter= filter to return the extinction in
26
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
27
+ interpk= (1) interpolation order
28
+ OUTPUT:
29
+ object
30
+ HISTORY:
31
+ 2015-07-28 - Started - Bovy (UofT)
32
+ 2019-09-12 - Updated - Rybizki (MPIA)
33
+ """
34
+ HierarchicalHealpixMap.__init__(self,filter=filter,sf10=sf10)
35
+ #Read the map
36
+ with h5py.File(os.path.join(_combineddir,'combine19.h5'),'r') \
37
+ as combineddata:
38
+ self._pix_info= combineddata['/pixel_info'][:]
39
+ self._best_fit= combineddata['/best_fit'][:]
40
+ # Utilities
41
+ self._distmods= numpy.linspace(4,18.875,120)
42
+ self._minnside= numpy.amin(self._pix_info['nside'])
43
+ self._maxnside= numpy.amax(self._pix_info['nside'])
44
+ nlevels= int(numpy.log2(self._maxnside//self._minnside))+1
45
+ self._nsides= [self._maxnside//2**ii for ii in range(nlevels)]
46
+ self._indexArray= numpy.arange(len(self._pix_info['healpix_index']))
47
+ # For the interpolation
48
+ self._intps= numpy.zeros(len(self._pix_info['healpix_index']),
49
+ dtype='object') #array to cache interpolated extinctions
50
+ self._interpk= interpk
51
+ return None
52
+
53
+ @classmethod
54
+ def download(cls, test=False):
55
+ # Download the combined map: Marshall+Green19+Drimmel for full sky coverage
56
+ combined19_path = os.path.join(dust_dir, "combined19", "combine19.h5")
57
+ if not os.path.exists(combined19_path):
58
+ if not os.path.exists(os.path.join(dust_dir, "combined19")):
59
+ os.mkdir(os.path.join(dust_dir, "combined19"))
60
+ _COMBINED19_URL = "https://zenodo.org/record/3566060/files/combine19.h5"
61
+ downloader(_COMBINED19_URL, combined19_path, cls.__name__, test=test)
62
+ return None
mwdust/Drimmel03.py ADDED
@@ -0,0 +1,293 @@
1
+ ###############################################################################
2
+ #
3
+ # Drimmel03: extinction model from Drimmel et al. 2003 2003A&A...409..205D
4
+ #
5
+ ###############################################################################
6
+ import os
7
+ import copy
8
+ import numpy
9
+ import tarfile
10
+ import inspect
11
+ from scipy.ndimage import map_coordinates
12
+ from scipy import optimize
13
+ try:
14
+ import healpy
15
+ except ImportError: pass
16
+ from mwdust.util.extCurves import aebv
17
+ from mwdust.util import read_Drimmel
18
+ from mwdust.util.tools import cos_sphere_dist
19
+ from mwdust.util.download import dust_dir, downloader
20
+ from mwdust.DustMap3D import DustMap3D
21
+
22
+ _DEGTORAD= numpy.pi/180.
23
+ class Drimmel03(DustMap3D):
24
+ """extinction model from Drimmel et al. 2003 2003A&A...409..205D"""
25
+ def __init__(self,filter=None,sf10=True):
26
+ """
27
+ NAME:
28
+ __init__
29
+ PURPOSE:
30
+ Initialize the Drimmel03 dust map
31
+ INPUT:
32
+ filter= filter to return the extinction in
33
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
34
+ OUTPUT:
35
+ object
36
+ HISTORY:
37
+ 2013-12-10 - Started - Bovy (IAS)
38
+ """
39
+ DustMap3D.__init__(self,filter=filter)
40
+ self._sf10= sf10
41
+ #Read the maps
42
+ drimmelMaps= read_Drimmel.readDrimmelAll()
43
+ self._drimmelMaps= drimmelMaps
44
+ #Sines and cosines of sky positions of COBE pixels
45
+ self._rf_sintheta= numpy.sin(numpy.pi/2.-self._drimmelMaps['rf_glat']*_DEGTORAD)
46
+ self._rf_costheta= numpy.cos(numpy.pi/2.-self._drimmelMaps['rf_glat']*_DEGTORAD)
47
+ self._rf_sinphi= numpy.sin(self._drimmelMaps['rf_glon']*_DEGTORAD)
48
+ self._rf_cosphi= numpy.cos(self._drimmelMaps['rf_glon']*_DEGTORAD)
49
+ #Various setups
50
+ self._xsun= -8.
51
+ self._zsun= 0.015
52
+ #Global grids
53
+ self._nx_disk, self._ny_disk, self._nz_disk= 151, 151, 51
54
+ self._dx_disk, self._dy_disk, self._dz_disk= 0.2, 0.2, 0.02
55
+ self._nx_ori, self._ny_ori, self._nz_ori= 76, 151, 51
56
+ self._dx_ori, self._dy_ori, self._dz_ori= 0.05, 0.05, 0.02
57
+ #Local grids
58
+ self._nx_diskloc, self._ny_diskloc, self._nz_diskloc= 31, 31, 51
59
+ self._dx_diskloc, self._dy_diskloc, self._dz_diskloc= 0.05, 0.05, 0.02
60
+ self._nx_ori2, self._ny_ori2, self._nz_ori2= 101, 201, 51
61
+ self._dx_ori2, self._dy_ori2, self._dz_ori2= 0.02, 0.02, 0.02
62
+ return None
63
+
64
+ def _evaluate(self,l,b,d,norescale=False,
65
+ _fd=1.,_fs=1.,_fo=1.):
66
+ """
67
+ NAME:
68
+ _evaluate
69
+ PURPOSE:
70
+ evaluate the dust-map
71
+ INPUT:
72
+ l- Galactic longitude (deg)
73
+ b- Galactic latitude (deg)
74
+ d- distance (kpc) can be array
75
+ norescale= (False) if True, don't apply re-scalings
76
+ _fd, _fs, _fo= (1.) amplitudes of the different components
77
+ OUTPUT:
78
+ extinction
79
+ HISTORY:
80
+ 2013-12-10 - Started - Bovy (IAS)
81
+ """
82
+ if isinstance(l,numpy.ndarray) or isinstance(b,numpy.ndarray):
83
+ raise NotImplementedError("array input for l and b for Drimmel dust map not implemented")
84
+
85
+ cl= numpy.cos(l*_DEGTORAD)
86
+ sl= numpy.sin(l*_DEGTORAD)
87
+ cb= numpy.cos(b*_DEGTORAD)
88
+ sb= numpy.sin(b*_DEGTORAD)
89
+
90
+ #Setup arrays
91
+ avori= numpy.zeros_like(d)
92
+ avspir= numpy.zeros_like(d)
93
+ avdisk= numpy.zeros_like(d)
94
+
95
+ #Find nearest pixel in COBE map for the re-scaling
96
+ rfIndx= numpy.argmax(cos_sphere_dist(self._rf_sintheta,
97
+ self._rf_costheta,
98
+ self._rf_sinphi,
99
+ self._rf_cosphi,
100
+ numpy.sin(numpy.pi/2.-b*_DEGTORAD),
101
+ numpy.cos(numpy.pi/2.-b*_DEGTORAD),
102
+ sl,cl))
103
+
104
+ rfdisk, rfspir, rfori= 1., 1., 1,
105
+ if self._drimmelMaps['rf_comp'][rfIndx] == 1 and not norescale:
106
+ rfdisk= self._drimmelMaps['rf'][rfIndx]
107
+ elif self._drimmelMaps['rf_comp'][rfIndx] == 2 and not norescale:
108
+ rfspir= self._drimmelMaps['rf'][rfIndx]
109
+ elif self._drimmelMaps['rf_comp'][rfIndx] == 3 and not norescale:
110
+ rfori= self._drimmelMaps['rf'][rfIndx]
111
+
112
+ #Find maximum distance
113
+ dmax= 100.
114
+ if b != 0.: dmax= .49999/numpy.fabs(sb) - self._zsun/sb
115
+ if cl != 0.:
116
+ tdmax= (14.9999/numpy.fabs(cl)-self._xsun/cl)
117
+ if tdmax < dmax: dmax= tdmax
118
+ if sl != 0.:
119
+ tdmax = 14.9999/numpy.fabs(sl)
120
+ if tdmax < dmax: dmax= tdmax
121
+ d= copy.copy(d)
122
+ d[d > dmax]= dmax
123
+
124
+ #Rectangular coordinates
125
+ X= d*cb*cl
126
+ Y= d*cb*sl
127
+ Z= d*sb+self._zsun
128
+
129
+ #Local grid
130
+ #Orion
131
+ locIndx= (numpy.fabs(X) < 1.)*(numpy.fabs(Y) < 2.)
132
+ if numpy.sum(locIndx) > 0:
133
+ xi = X[locIndx]/self._dx_ori2+float(self._nx_ori2-1)/2.
134
+ yj = Y[locIndx]/self._dy_ori2+float(self._ny_ori2-1)/2.
135
+ zk = Z[locIndx]/self._dz_ori2+float(self._nz_ori2-1)/2.
136
+ avori[locIndx]= map_coordinates(self._drimmelMaps['avori2'],
137
+ [xi,yj,zk],
138
+ mode='constant',cval=0.)
139
+ #local disk
140
+ locIndx= (numpy.fabs(X) < 0.75)*(numpy.fabs(Y) < 0.75)
141
+ if numpy.sum(locIndx) > 0:
142
+ xi = X[locIndx]/self._dx_diskloc+float(self._nx_diskloc-1)/2.
143
+ yj = Y[locIndx]/self._dy_diskloc+float(self._ny_diskloc-1)/2.
144
+ zk = Z[locIndx]/self._dz_diskloc+float(self._nz_diskloc-1)/2.
145
+ avdisk[locIndx]= map_coordinates(self._drimmelMaps['avdloc'],
146
+ [xi,yj,zk],
147
+ mode='constant',cval=0.)
148
+
149
+ #Go to Galactocentric coordinates
150
+ X= X+self._xsun
151
+
152
+ #Stars beyond the local grid
153
+ #Orion
154
+ globIndx= True^(numpy.fabs(X-self._xsun) < 1.)*(numpy.fabs(Y) < 2.)
155
+ if numpy.sum(globIndx) > 0:
156
+ #Orion grid is different from other global grids, so has its own dmax
157
+ dmax= 100.
158
+ if b != 0.: dmax= .49999/numpy.fabs(sb) - self._zsun/sb
159
+ if cl > 0.:
160
+ tdmax = (2.374999/numpy.fabs(cl))
161
+ if tdmax < dmax: dmax= tdmax
162
+ if cl < 0.:
163
+ tdmax = (1.374999/numpy.fabs(cl))
164
+ if tdmax < dmax: dmax= tdmax
165
+ if sl != 0.:
166
+ tdmax = (3.749999/numpy.fabs(sl))
167
+ if tdmax < dmax: dmax= tdmax
168
+ dori= copy.copy(d)
169
+ dori[dori > dmax]= dmax
170
+ Xori= dori*cb*cl+self._xsun
171
+ Yori= dori*cb*sl
172
+ Zori= dori*sb+self._zsun
173
+
174
+ xi = Xori[globIndx]/self._dx_ori + 2.5*float(self._nx_ori-1)
175
+ yj = Yori[globIndx]/self._dy_ori + float(self._ny_ori-1)/2.
176
+ zk = Zori[globIndx]/self._dz_ori + float(self._nz_ori-1)/2.
177
+
178
+ avori[globIndx]= map_coordinates(self._drimmelMaps['avori'],
179
+ [xi,yj,zk],
180
+ mode='constant',cval=0.)
181
+ #disk & spir
182
+ xi = X/self._dx_disk+float(self._nx_disk-1)/2.
183
+ yj = Y/self._dy_disk+float(self._ny_disk-1)/2.
184
+ zk = Z/self._dz_disk+float(self._nz_disk-1)/2.
185
+ avspir= map_coordinates(self._drimmelMaps['avspir'],
186
+ [xi,yj,zk],
187
+ mode='constant',cval=0.)
188
+ globIndx= True^(numpy.fabs(X-self._xsun) < 0.75)*(numpy.fabs(Y) < 0.75)
189
+ if numpy.sum(globIndx) > 0:
190
+ avdisk[globIndx]= map_coordinates(self._drimmelMaps['avdisk'],
191
+ [xi,yj,zk],
192
+ mode='constant',
193
+ cval=0.)[globIndx]
194
+
195
+ #Return
196
+ out=_fd*rfdisk*avdisk+_fs*rfspir*avspir+_fo*rfori*avori
197
+ if self._filter is None: # From Rieke & Lebovksy (1985); if sf10, first put ebv on SFD scale
198
+ return out/3.09/((1-self._sf10)+self._sf10*0.86)
199
+ else:
200
+ return out/3.09/((1-self._sf10)+self._sf10*0.86)\
201
+ *aebv(self._filter,sf10=self._sf10)
202
+
203
+ def dust_vals_disk(self,lcen,bcen,dist,radius):
204
+ """
205
+ NAME:
206
+ dust_vals_disk
207
+ PURPOSE:
208
+ return the distribution of extinction within a small disk as samples
209
+ INPUT:
210
+ lcen, bcen - Galactic longitude and latitude of the center of the disk (deg)
211
+ dist - distance in kpc
212
+ radius - radius of the disk (deg)
213
+ OUTPUT:
214
+ (pixarea,extinction) - arrays of pixel-area in sq rad and extinction value
215
+ HISTORY:
216
+ 2015-03-07 - Written - Bovy (IAS)
217
+ """
218
+ # Convert the disk center to a HEALPIX vector
219
+ vec= healpy.pixelfunc.ang2vec((90.-bcen)*_DEGTORAD,lcen*_DEGTORAD)
220
+ # We pixelize the map with a HEALPIX grid with nside=256, to somewhat
221
+ # oversample the Drimmel resolution
222
+ nside= 256
223
+ # Find the pixels at this resolution that fall within the disk
224
+ ipixs= healpy.query_disc(nside,vec,radius*_DEGTORAD,
225
+ inclusive=False,nest=False)
226
+ # Query the HEALPIX map for pixels that lie within the disk
227
+ pixarea= healpy.pixelfunc.nside2pixarea(nside)+numpy.zeros(len(ipixs))
228
+ extinction= []
229
+ for ii, ipix in enumerate(ipixs):
230
+ # Get glon and glat
231
+ b9, l= healpy.pixelfunc.pix2ang(nside,ipix,nest=False)
232
+ b= 90.-b9/_DEGTORAD
233
+ l/= _DEGTORAD
234
+ # Now evaluate
235
+ extinction.append(self._evaluate(l,b,dist))
236
+ extinction= numpy.array(extinction)
237
+ return (pixarea,extinction)
238
+
239
+ def fit(self,l,b,dist,ext,e_ext):
240
+ """
241
+ NAME:
242
+ fit
243
+ PURPOSE:
244
+ fit the amplitudes of the disk, spiral, and Orion parts of the
245
+ Drimmel map to other data
246
+ INPUT:
247
+ l,b- Galactic longitude and latitude in degree
248
+ dist - distance in kpc
249
+ ext - extinction at dist
250
+ e_ext - error in extinction
251
+ OUTPUT:
252
+ (fd,fs,fo,dist_stretch) amplitudes of disk, spiral, and Orion parts
253
+ and a 'distance stretch' applied to the model
254
+ (applied as self(l,b,dist*dist_stretch))
255
+ HISTORY:
256
+ 2013-12-16 - Written - Bovy (IAS)
257
+ """
258
+ #Fit consists of
259
+ #a) overall amplitude A
260
+ #b) relative amplitude fd/A, fs/A
261
+ #c) distance stretch
262
+ pars= numpy.array([0.,numpy.log(1./3.),numpy.log(1./3.),0.])
263
+ pars=\
264
+ optimize.fmin_powell(_fitFunc,pars,args=(self,l,b,dist,ext,e_ext))
265
+ amp= numpy.exp(pars[0])
266
+ fd= amp*numpy.exp(pars[1])
267
+ fs= amp*numpy.exp(pars[2])
268
+ fo= amp*(1.-fd-fs)
269
+ return (fd,fs,fo,numpy.exp(pars[3]))
270
+
271
+ @classmethod
272
+ def download(cls, test=False):
273
+ drimmel_folder_path = os.path.abspath(os.path.join(inspect.getfile(cls), "..", "util", "drimmeldata"))
274
+ drimmel_path = os.path.join(drimmel_folder_path, "data-for.tar.gz")
275
+ if not os.path.exists(drimmel_path):
276
+ if not os.path.exists(drimmel_folder_path):
277
+ os.mkdir(drimmel_folder_path)
278
+ _DRIMMEL_URL= "https://zenodo.org/record/7340108/files/data-for.tar.gz"
279
+ downloader(_DRIMMEL_URL, drimmel_path, cls.__name__, test=test)
280
+ if not test:
281
+ drim_file = tarfile.open(drimmel_path)
282
+ drim_file.extractall(drimmel_folder_path)
283
+ drim_file.close()
284
+ return None
285
+
286
+ def _fitFunc(pars,drim,l,b,dist,ext,e_ext):
287
+ amp= numpy.exp(pars[0])
288
+ fd= amp*numpy.exp(pars[1])
289
+ fs= amp*numpy.exp(pars[2])
290
+ fo= amp*(1.-fd-fs)
291
+ dist_stretch= numpy.exp(pars[3])
292
+ model_ext= drim(l,b,dist*dist_stretch,_fd=fd,_fs=fs,_fo=fo)
293
+ return 0.5*numpy.sum((model_ext-ext)**2./e_ext**2.)
mwdust/DustMap3D.py ADDED
@@ -0,0 +1,99 @@
1
+ ###############################################################################
2
+ #
3
+ # DustMap3D: top-level class for a 3D dust map; all other dust maps inherit
4
+ # from this
5
+ #
6
+ ###############################################################################
7
+ import numpy
8
+ try:
9
+ from galpy.util import plot as bovy_plot
10
+ _BOVY_PLOT_LOADED= True
11
+ except ImportError:
12
+ _BOVY_PLOT_LOADED= False
13
+
14
+ class DustMap3D(object):
15
+ """top-level class for a 3D dust map; all other dust maps inherit from this"""
16
+ def __init__(self, filter=None, **download_kwargs):
17
+ """
18
+ NAME:
19
+ __init__
20
+ PURPOSE:
21
+ Initialize the dust map
22
+ INPUT:
23
+ filter= filter to return the extinction in when called
24
+ OUTPUT:
25
+ HISTORY:
26
+ 2013-11-24 - Started - Bovy (IAS)
27
+ """
28
+ self._filter= filter
29
+ if hasattr(self, "download"):
30
+ self.download(**download_kwargs) # download the map
31
+
32
+ def __call__(self,*args,**kwargs):
33
+ """
34
+ NAME:
35
+ __call__
36
+ PURPOSE:
37
+ evaluate the dust map
38
+ INPUT:
39
+ Either:
40
+ (l,b,d) - Galactic longitude, latitude (deg), and distance (kpc)
41
+ OUTPUT:
42
+ HISTORY:
43
+ 2013-11-24 - Started - Bovy (IAS)
44
+ """
45
+ if True: #(l,b,d)
46
+ l,b,d= args
47
+ if isinstance(d,(int,float,numpy.float32,numpy.float64)):
48
+ d= numpy.array([d])
49
+ try:
50
+ return self._evaluate(l,b,d,**kwargs)
51
+ except AttributeError:
52
+ raise NotImplementedError("'_evaluate' for this DustMap3D not implemented yet")
53
+
54
+ def plot(self,l,b,*args,**kwargs):
55
+ """
56
+ NAME:
57
+ plot
58
+ PURPOSE:
59
+ plot the extinction along a given line of sight as a function of
60
+ distance
61
+ INPUT:
62
+ l,b - Galactic longitude and latitude (degree)
63
+ range= distance range in kpc
64
+ distmod= (False) if True, plot as a function of distance modulus (range is distmod range)
65
+ bovy_plot.plot args and kwargs
66
+ OUTPUT:
67
+ plot to output device
68
+ HISTORY:
69
+ 2013-12-11 - Written - Bovy (IAS)
70
+ """
71
+ if not _BOVY_PLOT_LOADED:
72
+ raise NotImplementedError("galpy.util.bovy_plot could not be loaded, so there is no plotting; might have to install galpy (http://github.com/jobovy/galpy) for plotting")
73
+ distmod= kwargs.pop('distmod',False)
74
+ range= kwargs.pop('range',None)
75
+ if range is None and distmod:
76
+ range= [4.,19.]
77
+ else:
78
+ range= [0.,12.]
79
+ nds= kwargs.get('nds',101)
80
+ #First evaluate the dust map
81
+ ds= numpy.linspace(range[0],range[1],nds)
82
+ if distmod:
83
+ adust= self(l,b,10.**(ds/5.-2.))
84
+ else:
85
+ adust= self(l,b,ds)
86
+ #Add labels
87
+ if distmod:
88
+ kwargs['xlabel']= r'$\mathrm{Distance\ modulus}$'
89
+ else:
90
+ kwargs['xlabel']= r'$D\,(\mathrm{kpc})$'
91
+ if not self._filter is None:
92
+ kwargs['ylabel']= r'$A_{%s}\,(\mathrm{mag})$' % (self._filter.split(' ')[-1])
93
+ else:
94
+ kwargs['ylabel']= r'$E(B-V)\,(\mathrm{mag})$'
95
+ return bovy_plot.plot(ds,adust,*args,**kwargs)
96
+
97
+ @classmethod
98
+ def download(cls, test=False):
99
+ pass
mwdust/Green15.py ADDED
@@ -0,0 +1,83 @@
1
+ ###############################################################################
2
+ #
3
+ # Green15: extinction model from Green et al. (2015)
4
+ #
5
+ ###############################################################################
6
+ import os, os.path
7
+ import numpy
8
+ import h5py
9
+ from mwdust.util.download import dust_dir, downloader
10
+ from mwdust.HierarchicalHealpixMap import HierarchicalHealpixMap
11
+ _DEGTORAD= numpy.pi/180.
12
+ _greendir= os.path.join(dust_dir, 'green15')
13
+ class Green15(HierarchicalHealpixMap):
14
+ """extinction model from Green et al. (2015)"""
15
+ def __init__(self,filter=None,sf10=True,load_samples=False,
16
+ interpk=1):
17
+ """
18
+ NAME:
19
+ __init__
20
+ PURPOSE:
21
+ Initialize the Green et al. (2015) dust map
22
+ INPUT:
23
+ filter= filter to return the extinction in
24
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
25
+ load_samples= (False) if True, also load the samples
26
+ interpk= (1) interpolation order
27
+ OUTPUT:
28
+ object
29
+ HISTORY:
30
+ 2015-03-02 - Started - Bovy (IAS)
31
+ """
32
+ HierarchicalHealpixMap.__init__(self,filter=filter,sf10=sf10)
33
+ #Read the map
34
+ with h5py.File(os.path.join(_greendir,'dust-map-3d.h5'),'r') \
35
+ as greendata:
36
+ self._pix_info= greendata['/pixel_info'][:]
37
+ if load_samples:
38
+ self._samples= greendata['/samples'][:]
39
+ self._best_fit= greendata['/best_fit'][:]
40
+ self._GR= greendata['/GRDiagnostic'][:]
41
+ # Utilities
42
+ self._distmods= numpy.linspace(4.,19.,31)
43
+ self._minnside= numpy.amin(self._pix_info['nside'])
44
+ self._maxnside= numpy.amax(self._pix_info['nside'])
45
+ nlevels= int(numpy.log2(self._maxnside//self._minnside))+1
46
+ self._nsides= [self._maxnside//2**ii for ii in range(nlevels)]
47
+ self._indexArray= numpy.arange(len(self._pix_info['healpix_index']))
48
+ # For the interpolation
49
+ self._intps= numpy.zeros(len(self._pix_info['healpix_index']),
50
+ dtype='object') #array to cache interpolated extinctions
51
+ self._interpk= interpk
52
+ return None
53
+
54
+ def substitute_sample(self,samplenum):
55
+ """
56
+ NAME:
57
+ substitute_sample
58
+ PURPOSE:
59
+ substitute a sample for the best fit to get the extinction from a sample with the same tools; need to have setup the instance with load_samples=True
60
+ INPUT:
61
+ samplenum - sample's index to load
62
+ OUTPUT:
63
+ (none; just resets the instance to use the sample rather than the best fit; one cannot go back to the best fit after this))
64
+ HISTORY:
65
+ 2015-03-08 - Written - Bovy (IAS)
66
+ """
67
+ # Substitute the sample
68
+ self._best_fit= self._samples[:,samplenum,:]
69
+ # Reset the cache
70
+ self._intps= numpy.zeros(len(self._pix_info['healpix_index']),
71
+ dtype='object') #array to cache interpolated extinctions
72
+ return None
73
+
74
+ @classmethod
75
+ def download(cls, test=False):
76
+ # Download Green et al. PanSTARRS data (alt.: http://dx.doi.org/10.7910/DVN/40C44C)
77
+ green15_path = os.path.join(dust_dir, "green15", "dust-map-3d.h5")
78
+ if not os.path.exists(green15_path):
79
+ if not os.path.exists(os.path.join(dust_dir, "green15")):
80
+ os.mkdir(os.path.join(dust_dir, "green15"))
81
+ _GREEN15_URL = "https://dataverse.harvard.edu/api/access/datafile/:persistentId?persistentId=doi:10.7910/DVN/40C44C/TERL5A"
82
+ downloader(_GREEN15_URL, green15_path, cls.__name__, test=test)
83
+ return None
mwdust/Green17.py ADDED
@@ -0,0 +1,85 @@
1
+ ###############################################################################
2
+ #
3
+ # Green17: extinction model from Green et al. (2017)
4
+ #
5
+ ###############################################################################
6
+ import os, os.path
7
+ import numpy
8
+ import h5py
9
+ from mwdust.util.download import dust_dir, downloader
10
+ from mwdust.HierarchicalHealpixMap import HierarchicalHealpixMap
11
+ _DEGTORAD= numpy.pi/180.
12
+ _greendir= os.path.join(dust_dir, 'green17')
13
+ class Green17(HierarchicalHealpixMap):
14
+ """extinction model from Green et al. (2018)"""
15
+ def __init__(self,filter=None,sf10=True,load_samples=False,
16
+ interpk=1):
17
+ """
18
+ NAME:
19
+ __init__
20
+ PURPOSE:
21
+ Initialize the Green et al. (2017) dust map
22
+ The reddening vector is not the one used in Green et al. (2015)
23
+ But instead: Schlafly et al. (2016)
24
+ INPUT:
25
+ filter= filter to return the extinction in
26
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
27
+ load_samples= (False) if True, also load the samples
28
+ interpk= (1) interpolation order
29
+ OUTPUT:
30
+ object
31
+ HISTORY:
32
+ 2019-10-09 - Adopted - Rybizki (MPIA)
33
+ """
34
+ HierarchicalHealpixMap.__init__(self,filter=filter,sf10=sf10)
35
+ #Read the map
36
+ with h5py.File(os.path.join(_greendir,'bayestar2017.h5'),'r') \
37
+ as greendata:
38
+ self._pix_info= greendata['/pixel_info'][:]
39
+ if load_samples:
40
+ self._samples= greendata['/samples'][:]
41
+ self._best_fit= greendata['/best_fit'][:]
42
+ self._GR= greendata['/GRDiagnostic'][:]
43
+ # Utilities
44
+ self._distmods= numpy.linspace(4,19,31)
45
+ self._minnside= numpy.amin(self._pix_info['nside'])
46
+ self._maxnside= numpy.amax(self._pix_info['nside'])
47
+ nlevels= int(numpy.log2(self._maxnside//self._minnside))+1
48
+ self._nsides= [self._maxnside//2**ii for ii in range(nlevels)]
49
+ self._indexArray= numpy.arange(len(self._pix_info['healpix_index']))
50
+ # For the interpolation
51
+ self._intps= numpy.zeros(len(self._pix_info['healpix_index']),
52
+ dtype='object') #array to cache interpolated extinctions
53
+ self._interpk= interpk
54
+ return None
55
+
56
+ def substitute_sample(self,samplenum):
57
+ """
58
+ NAME:
59
+ substitute_sample
60
+ PURPOSE:
61
+ substitute a sample for the best fit to get the extinction from a sample with the same tools; need to have setup the instance with load_samples=True
62
+ INPUT:
63
+ samplenum - sample's index to load
64
+ OUTPUT:
65
+ (none; just resets the instance to use the sample rather than the best fit; one cannot go back to the best fit after this))
66
+ HISTORY:
67
+ 2019-10-09 - Adopted - Rybizki (MPIA)
68
+ """
69
+ # Substitute the sample
70
+ self._best_fit= self._samples[:,samplenum,:]
71
+ # Reset the cache
72
+ self._intps= numpy.zeros(len(self._pix_info['healpix_index']),
73
+ dtype='object') #array to cache interpolated extinctions
74
+ return None
75
+
76
+ @classmethod
77
+ def download(cls, test=False):
78
+ # Download Green et al. 2018 PanSTARRS data
79
+ green17_path = os.path.join(dust_dir, "green17", "bayestar2017.h5")
80
+ if not os.path.exists(green17_path):
81
+ if not os.path.exists(os.path.join(dust_dir, "green17")):
82
+ os.mkdir(os.path.join(dust_dir, "green17"))
83
+ _GREEN17_URL = "https://dataverse.harvard.edu/api/access/datafile/:persistentId?persistentId=doi:10.7910/DVN/LCYHJG/S7MP4P"
84
+ downloader(_GREEN17_URL, green17_path, cls.__name__, test=test)
85
+ return None