mwdust 1.6__cp314-cp314-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mwdust might be problematic. Click here for more details.

mwdust/Green19.py ADDED
@@ -0,0 +1,84 @@
1
+ ###############################################################################
2
+ #
3
+ # Green17: extinction model from Green et al. (2019)
4
+ #
5
+ ###############################################################################
6
+ import os, os.path
7
+ import numpy
8
+ import h5py
9
+ from mwdust.util.download import dust_dir, downloader
10
+ from mwdust.HierarchicalHealpixMap import HierarchicalHealpixMap
11
+ _DEGTORAD= numpy.pi/180.
12
+ _greendir= os.path.join(dust_dir, 'green19')
13
+ class Green19(HierarchicalHealpixMap):
14
+ """extinction model from Green et al. (2019)"""
15
+ def __init__(self,filter=None,sf10=True,load_samples=False,
16
+ interpk=1):
17
+ """
18
+ NAME:
19
+ __init__
20
+ PURPOSE:
21
+ Initialize the Green et al. (2019) dust map
22
+ The reddening vector is not the one used in Green et al. (2015)
23
+ But instead: Schlafly et al. (2016)
24
+ INPUT:
25
+ filter= filter to return the extinction in
26
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
27
+ load_samples= (False) if True, also load the samples
28
+ interpk= (1) interpolation order
29
+ OUTPUT:
30
+ object
31
+ HISTORY:
32
+ 2019-10-09 - Adopted - Rybizki (MPIA)
33
+ """
34
+ HierarchicalHealpixMap.__init__(self,filter=filter,sf10=sf10)
35
+ #Read the map
36
+ with h5py.File(os.path.join(_greendir,'bayestar2019.h5'),'r') \
37
+ as greendata:
38
+ self._pix_info= greendata['/pixel_info'][:]
39
+ if load_samples:
40
+ self._samples= greendata['/samples'][:]
41
+ self._best_fit= greendata['/best_fit'][:]
42
+ # Utilities
43
+ self._distmods= numpy.linspace(4,18.875,120)
44
+ self._minnside= numpy.amin(self._pix_info['nside'])
45
+ self._maxnside= numpy.amax(self._pix_info['nside'])
46
+ nlevels= int(numpy.log2(self._maxnside//self._minnside))+1
47
+ self._nsides= [self._maxnside//2**ii for ii in range(nlevels)]
48
+ self._indexArray= numpy.arange(len(self._pix_info['healpix_index']))
49
+ # For the interpolation
50
+ self._intps= numpy.zeros(len(self._pix_info['healpix_index']),
51
+ dtype='object') #array to cache interpolated extinctions
52
+ self._interpk= interpk
53
+ return None
54
+
55
+ def substitute_sample(self,samplenum):
56
+ """
57
+ NAME:
58
+ substitute_sample
59
+ PURPOSE:
60
+ substitute a sample for the best fit to get the extinction from a sample with the same tools; need to have setup the instance with load_samples=True
61
+ INPUT:
62
+ samplenum - sample's index to load
63
+ OUTPUT:
64
+ (none; just resets the instance to use the sample rather than the best fit; one cannot go back to the best fit after this))
65
+ HISTORY:
66
+ 2019-10-09 - Adopted - Rybizki (MPIA)
67
+ """
68
+ # Substitute the sample
69
+ self._best_fit= self._samples[:,samplenum,:]
70
+ # Reset the cache
71
+ self._intps= numpy.zeros(len(self._pix_info['healpix_index']),
72
+ dtype='object') #array to cache interpolated extinctions
73
+ return None
74
+
75
+ @classmethod
76
+ def download(cls, test=False):
77
+ # Download Green et al. 2019 PanSTARRS data
78
+ green19_path = os.path.join(dust_dir, "green19", "bayestar2019.h5")
79
+ if not os.path.exists(green19_path):
80
+ if not os.path.exists(os.path.join(dust_dir, "green19")):
81
+ os.mkdir(os.path.join(dust_dir, "green19"))
82
+ _GREEN19_URL = "https://dataverse.harvard.edu/api/access/datafile/:persistentId?persistentId=doi:10.7910/DVN/2EJ9TX/1CUGA1"
83
+ downloader(_GREEN19_URL, green19_path, cls.__name__, test=test)
84
+ return None
@@ -0,0 +1,224 @@
1
+ ###############################################################################
2
+ #
3
+ # HierarchicalHealpixMap: General class for extinction maps given as
4
+ # a hierarchical HEALPix pixelation (e.g., Green
5
+ # et al. 2015)
6
+ #
7
+ ###############################################################################
8
+ import numpy
9
+ from scipy import interpolate
10
+ from mwdust.util.healpix import ang2pix
11
+ from mwdust.util.extCurves import aebv
12
+ from mwdust.DustMap3D import DustMap3D
13
+ _DEGTORAD= numpy.pi/180.
14
+ class HierarchicalHealpixMap(DustMap3D):
15
+ """General class for extinction maps given as a hierarchical HEALPix
16
+ pixelation (e.g., Green et al. 2015) """
17
+ def __init__(self,filter=None,sf10=True, **download_kwargs):
18
+ """
19
+ NAME:
20
+ __init__
21
+ PURPOSE:
22
+ Initialize the combined dust map
23
+ INPUT:
24
+ filter= filter to return the extinction in
25
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
26
+ OUTPUT:
27
+ object
28
+ HISTORY:
29
+ 2015-07-28 - Started - Bovy (UofT)
30
+ """
31
+ super(HierarchicalHealpixMap, self).__init__(filter=filter, **download_kwargs)
32
+ self._sf10 = sf10
33
+ return None
34
+
35
+
36
+ def _evaluate(self, ls, bs, ds):
37
+ """
38
+ NAME:
39
+ _evaluate
40
+ PURPOSE:
41
+ evaluate the dust-map for array input
42
+ INPUT:
43
+ l- Galactic longitude (deg) can be array
44
+ b- Galactic latitude (deg) can be array
45
+ d- distance (kpc) can be array
46
+ OUTPUT:
47
+ extinction E(B-V)
48
+ HISTORY:
49
+ 2015-03-02 - Started - Bovy (IAS)
50
+ 2023-07-05 - Vectorized - Henry Leung (UofT)
51
+ """
52
+ ls = numpy.atleast_1d(ls)
53
+ bs = numpy.atleast_1d(bs)
54
+ ds = numpy.atleast_1d(ds)
55
+
56
+ distmod= 5.*numpy.log10(ds)+10.
57
+ lbIndx= self._lbIndx(ls, bs)
58
+ if len(ls) == 1 and len(ds) > 1:
59
+ lbIndx = numpy.tile(lbIndx, len(ds))
60
+
61
+ result = numpy.zeros_like(ds)
62
+ for counter, i, d in zip(numpy.arange(len(result)), lbIndx, distmod):
63
+ if self._intps[i] != 0:
64
+ out= self._intps[i](d)
65
+ else:
66
+ interpData=\
67
+ interpolate.InterpolatedUnivariateSpline(self._distmods,
68
+ self._best_fit[i],
69
+ k=self._interpk)
70
+ out= interpData(d)
71
+ self._intps[i]= interpData
72
+ result[counter] = out
73
+ if self._filter is not None:
74
+ result = result * aebv(self._filter,sf10=self._sf10)
75
+ # set nan for invalid indices
76
+ result[lbIndx==-1] = numpy.nan
77
+ return result
78
+
79
+
80
+ def dust_vals_disk(self,lcen,bcen,dist,radius):
81
+ """
82
+ NAME:
83
+ dust_vals_disk
84
+ PURPOSE:
85
+ return the distribution of extinction within a small disk as samples
86
+ INPUT:
87
+ lcen, bcen - Galactic longitude and latitude of the center of the disk (deg)
88
+ dist - distance in kpc
89
+ radius - radius of the disk (deg)
90
+ OUTPUT:
91
+ (pixarea,extinction) - arrays of pixel-area in sq rad and extinction value
92
+ HISTORY:
93
+ 2015-03-06 - Written - Bovy (IAS)
94
+ """
95
+ try:
96
+ import healpy
97
+ except ImportError:
98
+ raise ModuleNotFoundError("This function requires healpy to be installed")
99
+ # Convert the disk center to a HEALPIX vector
100
+ vec= healpy.pixelfunc.ang2vec((90.-bcen)*_DEGTORAD,lcen*_DEGTORAD)
101
+ distmod= 5.*numpy.log10(dist)+10.
102
+ # Query the HEALPIX map for pixels that lie within the disk
103
+ pixarea= []
104
+ extinction= []
105
+ for nside in self._nsides:
106
+ # Find the pixels at this resolution that fall within the disk
107
+ ipixs= healpy.query_disc(nside,vec,radius*_DEGTORAD,
108
+ inclusive=False,nest=True)
109
+ # Get indices of all pixels within the disk at current nside level
110
+ nsideindx= self._pix_info['nside'] == nside
111
+ potenIndxs= self._indexArray[nsideindx]
112
+ nsidepix= self._pix_info['healpix_index'][nsideindx]
113
+ # Loop through the pixels in the (small) disk
114
+ tout= []
115
+ for ii,ipix in enumerate(ipixs):
116
+ lbIndx= potenIndxs[ipix == nsidepix]
117
+ if numpy.sum(lbIndx) == 0: continue
118
+ if self._intps[lbIndx] != 0:
119
+ tout.append(self._intps[lbIndx][0](distmod))
120
+ else:
121
+ interpData=\
122
+ interpolate.InterpolatedUnivariateSpline(self._distmods,
123
+ self._best_fit[lbIndx],
124
+ k=self._interpk)
125
+ tout.append(interpData(distmod))
126
+ self._intps[lbIndx]= interpData
127
+ tarea= healpy.pixelfunc.nside2pixarea(nside)
128
+ tarea= [tarea for ii in range(len(tout))]
129
+ pixarea.extend(tarea)
130
+ extinction.extend(tout)
131
+ pixarea= numpy.array(pixarea)
132
+ extinction= numpy.array(extinction)
133
+ if not self._filter is None:
134
+ extinction= extinction*aebv(self._filter,sf10=self._sf10)
135
+ return (pixarea,extinction)
136
+
137
+ def _lbIndx(self, ls, bs):
138
+ """Return the indices in the _combineddata array corresponding to arrays of (l, b)"""
139
+ stop_mask = numpy.zeros(len(ls), dtype=bool) # mask to be accumulated when looping through nside
140
+ indx_result = numpy.ones(len(ls), dtype=int) * -1 # -1 for bad star, int array has no nan
141
+ for nside in self._nsides:
142
+ tpix = ang2pix(nside, (90.-bs)*_DEGTORAD, ls*_DEGTORAD, nest=True)
143
+ nside_idx = numpy.where(self._pix_info['nside'] == nside)[0]
144
+ healpix_index_nside = self._pix_info['healpix_index'][nside_idx]
145
+ sorted_order = numpy.argsort(healpix_index_nside)
146
+ # use searchsorted to find the index of tpix in healpix_index_nside efficiently
147
+ result = numpy.searchsorted(healpix_index_nside, tpix, sorter=sorted_order)
148
+ # need to deal with indices where they are larger than the largest healpix_index_nside
149
+ known_bad_idx = (result == len(nside_idx))
150
+ result[known_bad_idx] = 0 # wrap around
151
+ result = sorted_order[result] # reverse the sorting before indexing
152
+ result = nside_idx[result]
153
+ good_result_idx = ((self._pix_info['healpix_index'][result] == tpix) & (self._pix_info['nside'][result] == nside) & (~known_bad_idx))
154
+ indx_result = numpy.where(~stop_mask & good_result_idx, result, indx_result)
155
+ indx_result = numpy.where(known_bad_idx & ~stop_mask, -1, indx_result) # set bad star to -1
156
+ stop_mask = stop_mask | good_result_idx # update mask for the next nside
157
+ return indx_result
158
+
159
+ def plot_mollweide(self,d,**kwargs):
160
+ """
161
+ NAME:
162
+ plot_mollweide
163
+ PURPOSE:
164
+ plot the extinction across the sky in Galactic coordinates out to a given distance using a Mollweide projection
165
+ INPUT:
166
+ d - distance in kpc (nearest distance to this in the map is plotted)
167
+ nside_plot= (2048) nside of the plotted map
168
+ healpy.visufunc.mollview kwargs
169
+ OUTPUT:
170
+ plot to output device
171
+ HISTORY:
172
+ 2019-12-06 - Written - Bovy (UofT)
173
+ """
174
+ try:
175
+ import healpy
176
+ except ImportError:
177
+ raise ModuleNotFoundError("This function requires healpy to be installed")
178
+ # Distance modulus
179
+ dm= 5.*numpy.log10(d)+10.
180
+ # Get factor to apply to map to obtain extinction in object's filter
181
+ filter_fac= aebv(self._filter,sf10=self._sf10) \
182
+ if not self._filter is None else 1.
183
+ # Map the dust map to a common nside, first find nearest distance pixel
184
+ tpix= numpy.argmin(numpy.fabs(dm-self._distmods))
185
+ # Construct an empty map at the highest HEALPix resolution present in the map; code snippets adapted from http://argonaut.skymaps.info/usage
186
+ nside_max= numpy.max(self._pix_info['nside'])
187
+ npix= healpy.pixelfunc.nside2npix(nside_max)
188
+ pix_val= numpy.empty(npix,dtype='f8')
189
+ pix_val[:] = healpy.UNSEEN
190
+ # Fill the upsampled map
191
+ for nside in numpy.unique(self._pix_info['nside']):
192
+ # Get indices of all pixels at current nside level
193
+ indx= self._pix_info['nside'] == nside
194
+ # Extract A_X of each selected pixel
195
+ pix_val_n= filter_fac*self._best_fit[indx,tpix]
196
+ # Determine nested index of each selected pixel in upsampled map
197
+ mult_factor = (nside_max//nside)**2
198
+ pix_idx_n = self._pix_info['healpix_index'][indx]*mult_factor
199
+ # Write the selected pixels into the upsampled map
200
+ for offset in range(mult_factor):
201
+ pix_val[pix_idx_n+offset] = pix_val_n[:]
202
+ # If the desired nside is less than the maximum nside in the map, degrade
203
+ nside_plot= kwargs.get('nside_plot',2048)
204
+ if not nside_plot is None and nside_plot < nside_max:
205
+ pix_val= healpy.pixelfunc.ud_grade(pix_val,
206
+ nside_plot,pess=False,
207
+ order_in='NEST',
208
+ order_out='NEST')
209
+ pix_val[pix_val == healpy.UNSEEN]= -1.
210
+
211
+ if not self._filter is None:
212
+ kwargs['unit']= r'$A_{%s}\,(\mathrm{mag})$' % (self._filter.split(' ')[-1])
213
+ else:
214
+ kwargs['unit']= r'$E(B-V)\,(\mathrm{mag})$'
215
+ kwargs['title']= kwargs.get('title',"")
216
+ healpy.visufunc.mollview(pix_val,
217
+ nest=True,
218
+ xsize=4000,
219
+ min=0.,
220
+ max=numpy.quantile(pix_val,0.99),
221
+ format=r'$%g$',
222
+ cmap='gist_yarg',
223
+ **kwargs)
224
+ return None
mwdust/Marshall06.py ADDED
@@ -0,0 +1,265 @@
1
+ ###############################################################################
2
+ #
3
+ # Marshall06: extinction model from Marshall et al. 2006 2006A&A...453..635M
4
+ #
5
+ ###############################################################################
6
+ import os, os.path
7
+ import sys
8
+ import gzip
9
+ import numpy
10
+ from scipy import interpolate
11
+ from astropy.io import ascii
12
+ from mwdust.util.extCurves import aebv
13
+ from mwdust.util.tools import cos_sphere_dist
14
+ from mwdust.util.download import dust_dir, downloader
15
+ from mwdust.DustMap3D import DustMap3D
16
+
17
+ try:
18
+ from galpy.util import plot as bovy_plot
19
+ _BOVY_PLOT_LOADED= True
20
+ except ImportError:
21
+ _BOVY_PLOT_LOADED= False
22
+ from matplotlib import pyplot
23
+ _DEGTORAD= numpy.pi/180.
24
+ _marshalldir= os.path.join(dust_dir,'marshall06')
25
+ _ERASESTR= " "
26
+ class Marshall06(DustMap3D):
27
+ """extinction model from Marshall et al. 2006 2006A&A...453..635M"""
28
+ def __init__(self,filter=None,sf10=True):
29
+ """
30
+ NAME:
31
+ __init__
32
+ PURPOSE:
33
+ Initialize the Marshall06 dust map
34
+ INPUT:
35
+ filter= filter to return the extinction in
36
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
37
+ OUTPUT:
38
+ object
39
+ HISTORY:
40
+ 2013-11-24 - Started - Bovy (IAS)
41
+ """
42
+ DustMap3D.__init__(self,filter=filter)
43
+ self._sf10= sf10
44
+ #Read the maps
45
+ sys.stdout.write('\r'+"Reading Marshall et al. (2006) data file ...\r")
46
+ sys.stdout.flush()
47
+ self._marshalldata= ascii.read(os.path.join(_marshalldir,
48
+ 'table1.dat'),
49
+ readme=os.path.join(_marshalldir,
50
+ 'ReadMe'),
51
+ guess=False, format='cds',
52
+ fill_values=[('', '-999')])
53
+ sys.stdout.write('\r'+_ERASESTR+'\r')
54
+ sys.stdout.flush()
55
+ #Sort the data on l and then b
56
+ negIndx= self._marshalldata['GLON'] > 180.
57
+ self._marshalldata['GLON'][negIndx]= self._marshalldata['GLON'][negIndx]-360.
58
+ sortIndx= numpy.arange(len(self._marshalldata))
59
+ keyArray= (self._marshalldata['GLON']+self._marshalldata['GLAT']/100.).data
60
+ sortIndx= sorted(sortIndx,key=lambda x: keyArray[x])
61
+ self._marshalldata= self._marshalldata[sortIndx]
62
+ self._dl= 0.25
63
+ self._db= 0.25
64
+ self._intps= numpy.zeros(len(self._marshalldata),dtype='object') #array to cache interpolated extinctions
65
+ return None
66
+
67
+ def _evaluate(self,l,b,d):
68
+ """
69
+ NAME:
70
+ _evaluate
71
+ PURPOSE:
72
+ evaluate the dust-map
73
+ INPUT:
74
+ l- Galactic longitude (deg)
75
+ b- Galactic latitude (deg)
76
+ d- distance (kpc) can be array
77
+ OUTPUT:
78
+ extinction
79
+ HISTORY:
80
+ 2013-12-12 - Started - Bovy (IAS)
81
+ """
82
+ if isinstance(l,numpy.ndarray) or isinstance(b,numpy.ndarray):
83
+ raise NotImplementedError("array input for l and b for Marshall06 dust map not implemented")
84
+ lbIndx= self._lbIndx(l,b)
85
+ if self._intps[lbIndx] != 0:
86
+ out= self._intps[lbIndx](d)
87
+ else:
88
+ tlbData= self.lbData(l,b,addBC=True)
89
+ interpData=\
90
+ interpolate.InterpolatedUnivariateSpline(tlbData['dist'],
91
+ tlbData['aks'],
92
+ k=1)
93
+ out= interpData(d)
94
+ self._intps[lbIndx]= interpData
95
+ if self._filter is None:
96
+ return out/aebv('2MASS Ks',sf10=self._sf10)
97
+ else:
98
+ return out/aebv('2MASS Ks',sf10=self._sf10)\
99
+ *aebv(self._filter,sf10=self._sf10)
100
+
101
+ def dust_vals_disk(self,lcen,bcen,dist,radius):
102
+ """
103
+ NAME:
104
+ dust_vals_disk
105
+ PURPOSE:
106
+ return the distribution of extinction within a small disk as samples
107
+ INPUT:
108
+ lcen, bcen - Galactic longitude and latitude of the center of the disk (deg)
109
+ dist - distance in kpc
110
+ radius - radius of the disk (deg)
111
+ OUTPUT:
112
+ (pixarea,extinction) - arrays of pixel-area in sq rad and extinction value
113
+ HISTORY:
114
+ 2015-03-07 - Written - Bovy (IAS)
115
+ """
116
+ # Find all of the (l,b) of the pixels within radius of (lcen,bcen)
117
+ lmin= round((lcen-radius-self._dl)/self._dl)*self._dl
118
+ lmax= round((lcen+radius+self._dl)/self._dl)*self._dl
119
+ bmin= round((bcen-radius-self._db)/self._db)*self._db
120
+ bmax= round((bcen+radius+self._db)/self._db)*self._db
121
+ ls= numpy.arange(lmin,lmax+self._dl,self._dl)
122
+ bs= numpy.arange(bmin,bmax+self._db,self._db)
123
+ ll,bb= numpy.meshgrid(ls,bs,indexing='ij')
124
+ ll= ll.flatten()
125
+ bb= bb.flatten()
126
+ indx= cos_sphere_dist(numpy.sin((90.-bb)*_DEGTORAD),
127
+ numpy.cos((90.-bb)*_DEGTORAD),
128
+ numpy.sin(ll*_DEGTORAD),
129
+ numpy.cos(ll*_DEGTORAD),
130
+ numpy.sin((90.-bcen)*_DEGTORAD),
131
+ numpy.cos((90.-bcen)*_DEGTORAD),
132
+ numpy.sin(lcen*_DEGTORAD),
133
+ numpy.cos(lcen*_DEGTORAD)) \
134
+ >= numpy.cos(radius*_DEGTORAD)
135
+ ll= ll[indx]
136
+ bb= bb[indx]
137
+ # Now get the extinctions for these pixels
138
+ pixarea= self._dl*self._db*_DEGTORAD**2.+numpy.zeros(numpy.sum(indx))
139
+ extinction= []
140
+ for l,b in zip(ll,bb):
141
+ extinction.append(self._evaluate(l,b,dist))
142
+ extinction= numpy.array(extinction)
143
+ return (pixarea,extinction)
144
+
145
+ def dmax(self,l,b):
146
+ """
147
+ NAME:
148
+ dmax
149
+ PURPOSE:
150
+ return the maximum distance for which there is Marshall et al.
151
+ (2006) data
152
+ INPUT:
153
+ l- Galactic longitude (deg)
154
+ b- Galactic latitude (deg)
155
+ OUTPUT:
156
+ maximum distance in kpc
157
+ HISTORY:
158
+ 2013-12-19 - Started - Bovy (IAS)
159
+ """
160
+ tlbData= self.lbData(l,b,addBC=False)
161
+ return tlbData['dist'][-1]
162
+
163
+ def lbData(self,l,b,addBC=False):
164
+ """
165
+ NAME:
166
+ lbData
167
+ PURPOSE:
168
+ return the Marshall et al. (2006) data corresponding to a given
169
+ line of sight
170
+ INPUT:
171
+ l- Galactic longitude (deg)
172
+ b- Galactic latitude (deg)
173
+ addBC= (False) if True, add boundary conditions (extinction is zero at zero distance; extinction is constant after last data point)
174
+ OUTPUT:
175
+ HISTORY:
176
+ 2013-12-13 - Written - Bovy (IAS)
177
+ """
178
+ #Find correct entry
179
+ lbIndx= self._lbIndx(l,b)
180
+ #Build output array
181
+ out= numpy.recarray((self._marshalldata[lbIndx]['nb']+2*addBC,),
182
+ dtype=[('dist', 'f8'),
183
+ ('e_dist', 'f8'),
184
+ ('aks', 'f8'),
185
+ ('e_aks','f8')])
186
+ if addBC:
187
+ #Add boundary conditions
188
+ out[0]['dist']= 0.
189
+ out[0]['e_dist']= 0.
190
+ out[0]['aks']= 0.
191
+ out[0]['e_aks']= 0.
192
+ out[-1]['dist']= 30.
193
+ out[-1]['e_dist']= 0.
194
+ out[-1]['aks']= self._marshalldata[lbIndx]['ext%i' % (self._marshalldata[lbIndx]['nb'])]
195
+ out[-1]['e_aks']= self._marshalldata[lbIndx]['e_ext%i' % (self._marshalldata[lbIndx]['nb'])]
196
+ for ii in range(self._marshalldata[lbIndx]['nb']):
197
+ out[ii+addBC]['dist']= self._marshalldata[lbIndx]['r%i' % (ii+1)]
198
+ out[ii+addBC]['e_dist']= self._marshalldata[lbIndx]['e_r%i' % (ii+1)]
199
+ out[ii+addBC]['aks']= self._marshalldata[lbIndx]['ext%i' % (ii+1)]
200
+ out[ii+addBC]['e_aks']= self._marshalldata[lbIndx]['e_ext%i' % (ii+1)]
201
+ return out
202
+
203
+ def plotData(self,l,b,*args,**kwargs):
204
+ """
205
+ NAME:
206
+ plotData
207
+ PURPOSE:
208
+ plot the Marshall et al. (2006) extinction values
209
+ along a given line of sight as a function of
210
+ distance
211
+ INPUT:
212
+ l,b - Galactic longitude and latitude (degree)
213
+ bovy_plot.plot args and kwargs
214
+ OUTPUT:
215
+ plot to output device
216
+ HISTORY:
217
+ 2013-12-15 - Written - Bovy (IAS)
218
+ """
219
+ if not _BOVY_PLOT_LOADED:
220
+ raise NotImplementedError("galpy.util.bovy_plot could not be loaded, so there is no plotting; might have to install galpy (http://github.com/jobovy/galpy) for plotting")
221
+ #First get the data
222
+ tdata= self.lbData(l,b)
223
+ #Filter
224
+ if self._filter is None:
225
+ filterFac= 1./aebv('2MASS Ks',sf10=self._sf10)
226
+ else:
227
+ filterFac= 1./aebv('2MASS Ks',sf10=self._sf10)\
228
+ *aebv(self._filter,sf10=self._sf10)
229
+ #Plot
230
+ out= bovy_plot.plot(tdata['dist'],tdata['aks']*filterFac,
231
+ *args,**kwargs)
232
+ #uncertainties
233
+ pyplot.errorbar(tdata['dist'],tdata['aks']*filterFac,
234
+ xerr=tdata['e_dist'],
235
+ yerr=tdata['e_aks']*filterFac,
236
+ ls='none',marker=None,color='k')
237
+ return out
238
+
239
+ def _lbIndx(self,l,b):
240
+ """Return the index in the _marshalldata array corresponding to this (l,b)"""
241
+ if l <= -100.125 or l >= 100.125 or b <= -10.125 or b >= 10.125:
242
+ raise IndexError("Given (l,b) pair not within the region covered by the Marshall et al. (2006) dust map")
243
+ lIndx= int(round((l+100.)/self._dl))
244
+ bIndx= int(round((b+10.)/self._db))
245
+ return lIndx*81+bIndx
246
+
247
+ @classmethod
248
+ def download(cls, test=False):
249
+ marshall_folder_path = os.path.join(dust_dir, "marshall06")
250
+ marshall_path = os.path.join(marshall_folder_path, "table1.dat.gz")
251
+ marshall_readme_path = os.path.join(dust_dir, "marshall06", "ReadMe")
252
+ if not os.path.exists(marshall_path[:-3]):
253
+ if not os.path.exists(marshall_folder_path):
254
+ os.mkdir(marshall_folder_path)
255
+ _MARSHALL_URL= "https://cdsarc.cds.unistra.fr/ftp/J/A+A/453/635/table1.dat.gz"
256
+ downloader(_MARSHALL_URL, marshall_path, cls.__name__, test=test)
257
+ if not test:
258
+ with open(marshall_path, "rb") as inf, open(os.path.join(marshall_folder_path, "table1.dat"), "w", encoding="utf8") as tof:
259
+ decom_str = gzip.decompress(inf.read()).decode("utf-8")
260
+ tof.write(decom_str)
261
+ os.remove(marshall_path)
262
+ if not os.path.exists(marshall_readme_path):
263
+ _MARSHALL_README_URL= "https://cdsarc.cds.unistra.fr/ftp/J/A+A/453/635/ReadMe"
264
+ downloader(_MARSHALL_README_URL, marshall_readme_path, f"{cls.__name__} (ReadMe)", test=test)
265
+ return None
mwdust/SFD.py ADDED
@@ -0,0 +1,72 @@
1
+ ###############################################################################
2
+ #
3
+ # SFD: Schlegel, Finkbeiner, & Davis (1998) dust map (2D)
4
+ #
5
+ ###############################################################################
6
+ import os
7
+ import numpy
8
+ from mwdust.util.download import downloader
9
+ from mwdust.util.read_SFD import read_SFD_EBV
10
+ from mwdust.util.extCurves import aebv
11
+ from mwdust.util.download import downloader, dust_dir
12
+ from mwdust.DustMap3D import DustMap3D
13
+
14
+ class SFD(DustMap3D):
15
+ """Schlegel, Finkbeiner, & Davis (1998) dust map (2D)"""
16
+ def __init__(self,filter=None,sf10=True,interp=True,noloop=False):
17
+ """
18
+ NAME:
19
+ __init__
20
+ PURPOSE:
21
+ Initialize the SFD dust map
22
+ INPUT:
23
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
24
+ filter= filter to return the extinction in
25
+ interp= (True) if True, interpolate using the nearest pixels
26
+ noloop= (False) if True, don't loop through the glons
27
+ OUTPUT:
28
+ object
29
+ HISTORY:
30
+ 2013-11-24 - Started - Bovy (IAS)
31
+ """
32
+ DustMap3D.__init__(self,filter=filter)
33
+ self._sf10= sf10
34
+ self._interp= interp
35
+ self._noloop= noloop
36
+ return None
37
+
38
+ def _evaluate(self,l,b,d):
39
+ """
40
+ NAME:
41
+ _evaluate
42
+ PURPOSE:
43
+ evaluate the dust-map
44
+ INPUT:
45
+ l- Galactic longitude (deg)
46
+ b- Galactic latitude (deg)
47
+ d- distance (kpc)
48
+ OUTPUT:
49
+ extinction
50
+ HISTORY:
51
+ 2013-11-24 - Started - Bovy (IAS)
52
+ """
53
+ tebv= read_SFD_EBV(l,b,interp=self._interp,
54
+ noloop=self._noloop,verbose=False)
55
+ if self._filter is None:
56
+ return tebv*numpy.ones_like(d)
57
+ else:
58
+ return tebv*aebv(self._filter,sf10=self._sf10)*numpy.ones_like(d)
59
+
60
+ @classmethod
61
+ def download(cls, test=False):
62
+ sfd_ngp_path = os.path.join(dust_dir, "maps", "SFD_dust_4096_ngp.fits")
63
+ if not os.path.exists(sfd_ngp_path):
64
+ if not os.path.exists(os.path.join(dust_dir, "maps")):
65
+ os.mkdir(os.path.join(dust_dir, "maps"))
66
+ _SFD_URL_NGP= "https://svn.sdss.org/public/data/sdss/catalogs/dust/trunk/maps/SFD_dust_4096_ngp.fits"
67
+ downloader(_SFD_URL_NGP, sfd_ngp_path, "SFD (NGP)", test=test)
68
+ sfd_sgp_path = os.path.join(dust_dir, "maps", "SFD_dust_4096_sgp.fits")
69
+ if not os.path.exists(sfd_sgp_path):
70
+ _SFD_URL_SGP= "https://svn.sdss.org/public/data/sdss/catalogs/dust/trunk/maps/SFD_dust_4096_sgp.fits"
71
+ downloader(_SFD_URL_SGP, sfd_sgp_path, "SFD (SGP)", test=test)
72
+ return None