mwdust 1.7__cp314-cp314-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mwdust/Sale14.py ADDED
@@ -0,0 +1,217 @@
1
+ ###############################################################################
2
+ #
3
+ # Sale14: extinction model from Sale et al. 2014 2014MNRAS.443.2907S
4
+ #
5
+ ###############################################################################
6
+ import os, os.path
7
+ import sys
8
+ import tarfile
9
+ import shutil
10
+ import numpy
11
+ from scipy import interpolate
12
+ from astropy.io import ascii
13
+ from mwdust.util.extCurves import aebv
14
+ from mwdust.util.tools import cos_sphere_dist
15
+ from mwdust.util.download import downloader, dust_dir
16
+ from mwdust.DustMap3D import DustMap3D
17
+
18
+ _DEGTORAD= numpy.pi/180.
19
+ _saledir= os.path.join(dust_dir,'sale14')
20
+ _ERASESTR= " "
21
+ class Sale14(DustMap3D):
22
+ """extinction model from Sale et al. 2014 2014MNRAS.443.2907S"""
23
+ def __init__(self,filter=None,sf10=True):
24
+ """
25
+ NAME:
26
+ __init__
27
+ PURPOSE:
28
+ Initialize the Sale14 dust map
29
+ INPUT:
30
+ filter= filter to return the extinction in
31
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
32
+ OUTPUT:
33
+ object
34
+ HISTORY:
35
+ 2015-03-08 - Started - Bovy (IAS)
36
+ """
37
+ DustMap3D.__init__(self,filter=filter)
38
+ self._sf10= sf10
39
+ #Read the maps
40
+ sys.stdout.write('\r'+"Reading Sale et al. (2014) data file ...\r")
41
+ sys.stdout.flush()
42
+ self._saledata= ascii.read(os.path.join(_saledir,
43
+ 'Amap.dat'),
44
+ readme=os.path.join(_saledir,
45
+ 'ReadMe'),
46
+ guess=False, format='cds',
47
+ fill_values=[('', '-999')])
48
+ sys.stdout.write('\r'+_ERASESTR+'\r')
49
+ sys.stdout.flush()
50
+ # Some summaries
51
+ self._dl= self._saledata['lmax']-self._saledata['lmin']
52
+ self._db= self._saledata['b_max']-self._saledata['b_min']
53
+ self._lmin= numpy.amin(self._saledata['lmin'])
54
+ self._lmax= numpy.amax(self._saledata['lmax'])
55
+ self._bmin= numpy.amin(self._saledata['b_min'])
56
+ self._bmax= numpy.amax(self._saledata['b_max'])
57
+ self._ndistbin= 150
58
+ self._ds= numpy.linspace(0.05,14.95,self._ndistbin)
59
+ # For dust_vals
60
+ self._sintheta= numpy.sin((90.-self._saledata['GLAT'])*_DEGTORAD)
61
+ self._costheta= numpy.cos((90.-self._saledata['GLAT'])*_DEGTORAD)
62
+ self._sinphi= numpy.sin(self._saledata['GLON']*_DEGTORAD)
63
+ self._cosphi= numpy.cos(self._saledata['GLON']*_DEGTORAD)
64
+ self._intps= numpy.zeros(len(self._saledata),dtype='object') #array to cache interpolated extinctions
65
+ return None
66
+
67
+ def _evaluate(self,l,b,d,_lbIndx=None):
68
+ """
69
+ NAME:
70
+ _evaluate
71
+ PURPOSE:
72
+ evaluate the dust-map
73
+ INPUT:
74
+ l- Galactic longitude (deg)
75
+ b- Galactic latitude (deg)
76
+ d- distance (kpc) can be array
77
+ OUTPUT:
78
+ extinction
79
+ HISTORY:
80
+ 2015-03-08 - Started - Bovy (IAS)
81
+ """
82
+ if isinstance(l,numpy.ndarray) or isinstance(b,numpy.ndarray):
83
+ raise NotImplementedError("array input for l and b for Sale et al. (2014) dust map not implemented")
84
+ if _lbIndx is None: lbIndx= self._lbIndx(l,b)
85
+ else: lbIndx= _lbIndx
86
+ if self._intps[lbIndx] != 0:
87
+ out= self._intps[lbIndx](d)
88
+ else:
89
+ tlbData= self.lbData(l,b)
90
+ interpData=\
91
+ interpolate.InterpolatedUnivariateSpline(self._ds,
92
+ tlbData['a0'],
93
+ k=1)
94
+ out= interpData(d)
95
+ self._intps[lbIndx]= interpData
96
+ if self._filter is None: # Sale et al. say A0/Aks = 11
97
+ return out/11./aebv('2MASS Ks',sf10=self._sf10)
98
+ else: # if sf10, first put ebv on SFD scale
99
+ return out/11./aebv('2MASS Ks',sf10=self._sf10)\
100
+ *aebv(self._filter,sf10=self._sf10)
101
+
102
+ def dust_vals_disk(self,lcen,bcen,dist,radius):
103
+ """
104
+ NAME:
105
+ dust_vals_disk
106
+ PURPOSE:
107
+ return the distribution of extinction within a small disk as samples
108
+ INPUT:
109
+ lcen, bcen - Galactic longitude and latitude of the center of the disk (deg)
110
+ dist - distance in kpc
111
+ radius - radius of the disk (deg)
112
+ OUTPUT:
113
+ (pixarea,extinction) - arrays of pixel-area in sq rad and extinction value
114
+ HISTORY:
115
+ 2015-03-07 - Written - Bovy (IAS)
116
+ """
117
+ # Find all of the (l,b) of the pixels within radius of (lcen,bcen)
118
+ indx= cos_sphere_dist(self._sintheta,self._costheta,
119
+ self._sinphi,self._cosphi,
120
+ numpy.sin((90.-bcen)*_DEGTORAD),
121
+ numpy.cos((90.-bcen)*_DEGTORAD),
122
+ numpy.sin(lcen*_DEGTORAD),
123
+ numpy.cos(lcen*_DEGTORAD)) \
124
+ >= numpy.cos(radius*_DEGTORAD)
125
+ ll= self._saledata['GLON'][indx]
126
+ bb= self._saledata['GLAT'][indx]
127
+ # Now get the extinctions for these pixels
128
+ pixarea= []
129
+ extinction= []
130
+ for l,b in zip(ll,bb):
131
+ lbIndx= self._lbIndx(l,b)
132
+ extinction.append(self._evaluate(l,b,dist,_lbIndx=lbIndx))
133
+ pixarea.append(self._dl[lbIndx]*self._db[lbIndx]*_DEGTORAD**2.)
134
+ pixarea= numpy.array(pixarea)
135
+ extinction= numpy.array(extinction)
136
+ return (pixarea,extinction)
137
+
138
+ def dmax(self,l,b):
139
+ """
140
+ NAME:
141
+ dmax
142
+ PURPOSE:
143
+ return the maximum distance for which to trust the Sale et al. (2014) data
144
+ INPUT:
145
+ l- Galactic longitude (deg)
146
+ b- Galactic latitude (deg)
147
+ OUTPUT:
148
+ maximum distance in kpc
149
+ HISTORY:
150
+ 2015-03-08 - Written - Bovy (IAS)
151
+ """
152
+ lbIndx= self._lbIndx(l,b)
153
+ return self._saledata['trust'][lbIndx]/1000.
154
+
155
+ def lbData(self,l,b):
156
+ """
157
+ NAME:
158
+ lbData
159
+ PURPOSE:
160
+ return the Sale et al. (2014) data corresponding to a given
161
+ line of sight
162
+ INPUT:
163
+ l- Galactic longitude (deg)
164
+ b- Galactic latitude (deg)
165
+ OUTPUT:
166
+ HISTORY:
167
+ 2015-03-08 - Written - Bovy (IAS)
168
+ """
169
+ #Find correct entry
170
+ lbIndx= self._lbIndx(l,b)
171
+ #Build output array
172
+ out= numpy.recarray((self._ndistbin,),
173
+ dtype=[('a0', 'f8'),
174
+ ('e_a0','f8')])
175
+ for ii in range(self._ndistbin):
176
+ out[ii]['a0']= self._saledata[lbIndx]['meanA%i' % (ii+1)]
177
+ out[ii]['e_a0']= self._saledata[lbIndx]['meanA%i' % (ii+1)]
178
+ return out
179
+
180
+ def _lbIndx(self,l,b):
181
+ """Return the index in the _saledata array corresponding to this (l,b)"""
182
+ if l <= self._lmin or l >= self._lmax \
183
+ or b <= self._bmin or b >= self._bmax:
184
+ raise IndexError("Given (l,b) pair not within the region covered by the Sale et al. (2014) dust map")
185
+ return numpy.argmin((l-self._saledata['GLON'])**2./self._dl**2.\
186
+ +(b-self._saledata['GLAT'])**2./self._db**2.)
187
+
188
+ @classmethod
189
+ def download(cls, test=False):
190
+ sale_folder_path = os.path.join(dust_dir, "sale14")
191
+ sale_path = os.path.join(sale_folder_path, "Amap.tar.gz")
192
+ if not os.path.exists(sale_path[:-6] + "dat"):
193
+ if not os.path.exists(sale_folder_path):
194
+ os.mkdir(sale_folder_path)
195
+ _SALE_URL= "http://www.iphas.org/data/extinction/Amap.tar.gz"
196
+ downloader(_SALE_URL, sale_path, cls.__name__, test=test)
197
+ if not test:
198
+ sale_file = tarfile.open(sale_path)
199
+ sale_file.extractall(sale_folder_path)
200
+ sale_file.close()
201
+ os.remove(sale_path)
202
+ # Fix one line in the dust map
203
+ with open(os.path.join(sale_folder_path, "tmp.dat"), "w") as fout:
204
+ with open(os.path.join(sale_folder_path, "Amap.dat"), "r") as fin:
205
+ for line in fin:
206
+ if "15960.40000" in line: # bad line
207
+ newline= ''
208
+ for ii,word in enumerate(line.split(' ')):
209
+ if ii > 0: newline+= ' '
210
+ if ii > 6 and len(word) > 9:
211
+ word= "747.91400"
212
+ newline+= word
213
+ fout.write(newline+'\n')
214
+ else:
215
+ fout.write(line)
216
+ shutil.move(os.path.join(sale_folder_path, "tmp.dat"), os.path.join(sale_folder_path, "Amap.dat"))
217
+ return None
mwdust/Zero.py ADDED
@@ -0,0 +1,63 @@
1
+ ###############################################################################
2
+ #
3
+ # Zero: model with zero extinction
4
+ #
5
+ ###############################################################################
6
+ import numpy
7
+ from mwdust.DustMap3D import DustMap3D
8
+ _DEGTORAD= numpy.pi/180.
9
+ class Zero(DustMap3D):
10
+ """model with zero extinction"""
11
+ def __init__(self,filter=None,sf10=True):
12
+ """
13
+ NAME:
14
+ __init__
15
+ PURPOSE:
16
+ Initialize the zero extinction model
17
+ INPUT:
18
+ filter= filter to return the extinction in
19
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
20
+ OUTPUT:
21
+ Instance
22
+ HISTORY:
23
+ 2015-03-07 - Written - Bovy (IAS)
24
+ """
25
+ DustMap3D.__init__(self,filter=filter)
26
+ self._sf10= sf10
27
+ return None
28
+
29
+ def _evaluate(self,l,b,d):
30
+ """
31
+ NAME:
32
+ _evaluate
33
+ PURPOSE:
34
+ evaluate the dust-map
35
+ INPUT:
36
+ l- Galactic longitude (deg)
37
+ b- Galactic latitude (deg)
38
+ d- distance (kpc) can be array
39
+ OUTPUT:
40
+ extinction E(B-V)
41
+ HISTORY:
42
+ 2015-03-07 - Written - Bovy (IAS)
43
+ """
44
+ return numpy.zeros_like(d)
45
+
46
+ def dust_vals_disk(self,lcen,bcen,dist,radius):
47
+ """
48
+ NAME:
49
+ dust_vals_disk
50
+ PURPOSE:
51
+ return the distribution of extinction within a small disk as samples
52
+ INPUT:
53
+ lcen, bcen - Galactic longitude and latitude of the center of the disk (deg)
54
+ dist - distance in kpc
55
+ radius - radius of the disk (deg)
56
+ OUTPUT:
57
+ (pixarea,extinction) - arrays of pixel-area in sq rad and extinction value
58
+ HISTORY:
59
+ 2015-03-06 - Written - Bovy (IAS)
60
+ """
61
+ pixarea= (1.-numpy.cos(radius*_DEGTORAD))*2.*numpy.pi
62
+ return (numpy.array([pixarea]),numpy.zeros((1,len(dist))))
63
+
mwdust/Zucker25.py ADDED
@@ -0,0 +1,107 @@
1
+ ###############################################################################
2
+ #
3
+ # DECaPS25: extinction model from Zucker et al. (2025)
4
+ #
5
+ ###############################################################################
6
+ import os, os.path
7
+ import numpy
8
+ import h5py
9
+ from mwdust.util.download import dust_dir, downloader
10
+ from mwdust.HierarchicalHealpixMap import HierarchicalHealpixMap
11
+
12
+ _DEGTORAD = numpy.pi/180.
13
+ _decapsdir = os.path.join(dust_dir, 'zucker25')
14
+
15
+ class Zucker25(HierarchicalHealpixMap):
16
+ """DECaPS 3D dust-reddening map (Zucker et al. 2025)"""
17
+ def __init__(self, filter=None, sf10=True, load_samples=False, interpk=1):
18
+ """
19
+ NAME:
20
+ __init__
21
+ PURPOSE:
22
+ Initialize the DECaPS (2025) dust map
23
+ INPUT:
24
+ filter= filter to return the extinction in
25
+ sf10= (True) if True, use the Schlafly & Finkbeiner calibrations
26
+ load_samples= (False) if True, also load the samples
27
+ interpk= (1) interpolation order
28
+ OUTPUT:
29
+ object
30
+ HISTORY:
31
+ 2025-10-01 - Adopted
32
+ """
33
+ HierarchicalHealpixMap.__init__(self, filter=filter, sf10=sf10, samples=load_samples)
34
+ if not os.path.isdir(_decapsdir):
35
+ os.mkdir(_decapsdir)
36
+ fname = 'decaps_mean_and_samples.h5' if load_samples else 'decaps_mean.h5'
37
+ fpath = os.path.join(_decapsdir, fname)
38
+ if not os.path.exists(fpath):
39
+ self.download(samples=load_samples)
40
+ self._f = h5py.File(fpath, 'r')
41
+ self._best_fit = self._f['/mean'][:, 0, :]
42
+ p = self._f['/pixel_info']
43
+ hpx = p['healpix_index'][:]
44
+ nside_attr = int(p.attrs['nside'])
45
+ pix_dtype = numpy.dtype([('healpix_index', hpx.dtype), ('nside', numpy.int64)])
46
+ self._pix_info = numpy.empty(hpx.shape[0], dtype=pix_dtype)
47
+ self._pix_info['healpix_index'] = hpx
48
+ self._pix_info['nside'] = nside_attr
49
+ dm = numpy.array(p.attrs['DM_bin_edges'], dtype=numpy.float64)
50
+ if dm.shape[0] != self._best_fit.shape[1]:
51
+ raise RuntimeError("DM_bin_edges length does not match radial dimension")
52
+ self._distmods = dm
53
+ if load_samples:
54
+ if 'samples' not in self._f:
55
+ raise RuntimeError("Requested load_samples=True, but 'samples' dataset not found.")
56
+ self._samples_dset = self._f['/samples']
57
+ else:
58
+ self._samples_dset = None
59
+ self._minnside = numpy.amin(self._pix_info['nside'])
60
+ self._maxnside = numpy.amax(self._pix_info['nside'])
61
+ nlevels = int(numpy.log2(self._maxnside // self._minnside)) + 1
62
+ self._nsides = [self._maxnside // 2**ii for ii in range(nlevels)]
63
+ self._indexArray = numpy.arange(len(self._pix_info['healpix_index']))
64
+ self._intps = numpy.zeros(len(self._pix_info['healpix_index']), dtype='object')
65
+ self._interpk = interpk
66
+ return None
67
+
68
+ def substitute_sample(self, samplenum):
69
+ """
70
+ NAME:
71
+ substitute_sample
72
+ PURPOSE:
73
+ substitute a sample for the best fit to get the extinction from a sample with the same tools; need to have setup the instance with load_samples=True
74
+ INPUT:
75
+ samplenum - sample's index to load
76
+ OUTPUT:
77
+ (none; just resets the instance to use the sample rather than the best fit)
78
+ HISTORY:
79
+ 2025-10-01 - Adopted
80
+ """
81
+ if self._samples_dset is None:
82
+ raise RuntimeError('No samples present in DECaPS file')
83
+ self._best_fit = self._samples_dset[:, samplenum, :]
84
+ self._intps = numpy.zeros(len(self._pix_info['healpix_index']), dtype='object')
85
+ return None
86
+
87
+ @classmethod
88
+ def download(cls, samples=False, test=False):
89
+ subdir = os.path.join(dust_dir, "decaps25")
90
+ if not os.path.exists(subdir):
91
+ os.mkdir(subdir)
92
+ if samples:
93
+ target = os.path.join(subdir, "decaps_mean_and_samples.h5")
94
+ url = "https://dataverse.harvard.edu/api/access/datafile/11840498"
95
+ else:
96
+ target = os.path.join(subdir, "decaps_mean.h5")
97
+ url = "https://dataverse.harvard.edu/api/access/datafile/11838924"
98
+ if not os.path.exists(target):
99
+ downloader(url, target, cls.__name__, test=test)
100
+ return None
101
+
102
+ def __del__(self):
103
+ try:
104
+ if hasattr(self, "_f") and self._f:
105
+ self._f.close()
106
+ except Exception:
107
+ pass
mwdust/__init__.py ADDED
@@ -0,0 +1,27 @@
1
+ from mwdust.SFD import SFD
2
+ from mwdust.Marshall06 import Marshall06
3
+ from mwdust.Drimmel03 import Drimmel03
4
+ from mwdust.Sale14 import Sale14
5
+ from mwdust.Green15 import Green15
6
+ from mwdust.Green17 import Green17
7
+ from mwdust.Green19 import Green19
8
+ from mwdust.Combined15 import Combined15
9
+ from mwdust.Combined19 import Combined19
10
+ from mwdust.Zucker25 import Zucker25
11
+ from mwdust.Zero import Zero
12
+
13
+ __version__ = "1.7"
14
+
15
+
16
+ def download_all(test=False):
17
+ SFD.download(test=test)
18
+ Marshall06.download(test=test)
19
+ Drimmel03.download(test=test)
20
+ if not test:
21
+ Sale14.download(test=test)
22
+ Green15.download(test=test)
23
+ Green17.download(test=test)
24
+ Green19.download(test=test)
25
+ Combined15.download(test=test)
26
+ Combined19.download(test=test)
27
+ Zucker25.download(test=test)
File without changes
@@ -0,0 +1,202 @@
1
+ # combine_dustmaps.py: make a composite extinction map (Marshall,Green,Drimmel)
2
+ # Using legacy code and only runs in python 2
3
+ # Creates a composite map. Run from command line:
4
+ # python combine_dustmaps19.py combine19.sav (runs the combine_dustmap function)
5
+ # python combine_dustmaps19.py combine19.sav save (runs the store_h5 function)
6
+ # python combine_dustmaps19.py combine19.sav fixnans (runs the fix_nans function)
7
+ # The first call takes a few hours, can be tested via _DRYISHRUN = False first
8
+ ###############################################################################
9
+ import sys
10
+ import os, os.path
11
+ import pickle
12
+ import numpy
13
+ import h5py
14
+ import healpy
15
+ import mwdust
16
+ from mwdust.util.download import dust_dir
17
+
18
+ from galpy.util import save_pickles
19
+
20
+
21
+ def distmod2dist(distmod):
22
+ """distance modulus to distance in kpc"""
23
+ return 10.**(distmod/5.-2.)
24
+
25
+ _greendir= os.path.join(dust_dir,'green19')
26
+ #_GREEN15DISTMODS= numpy.linspace(4.,19.,31) # distance spacing for green15 and green17
27
+ _GREEN19DISTMODS= numpy.linspace(4,18.875,120) # distance spacing for green19
28
+ _GREEN19DISTS= distmod2dist(_GREEN19DISTMODS)
29
+ _DEGTORAD= numpy.pi/180.
30
+ _ERASESTR= " "
31
+ _DRYISHRUN= False
32
+ def combine_dustmap(picklename):
33
+ if os.path.exists(picklename): return None
34
+ ndists= len(_GREEN19DISTS)
35
+ # First fill in with NSIDE = 512 for Marshall
36
+ marshallmap= mwdust.Marshall06()
37
+ nside_mar= 512
38
+ mar_pix= numpy.arange(healpy.pixelfunc.nside2npix(nside_mar))
39
+ mar_val= numpy.zeros((len(mar_pix),ndists))+healpy.UNSEEN
40
+ theta, phi= \
41
+ healpy.pixelfunc.pix2ang(nside_mar,mar_pix,nest=True)
42
+ bb= (numpy.pi/2.-theta)/numpy.pi*180.
43
+ ll= phi/numpy.pi*180.
44
+ subIndx= (numpy.fabs(bb) < 10.125)\
45
+ *((ll < 100.125)+(ll > 259.875))
46
+ mar_pix= mar_pix[subIndx]
47
+ mar_val= mar_val[subIndx]
48
+ ll= ll[subIndx]
49
+ ll[ll > 180.]-= 360.
50
+ bb= bb[subIndx]
51
+ for pp,dpix in enumerate(mar_pix):
52
+ sys.stdout.write('\r'+"Working on pixel %i, %i remaining ...\r" % (pp+1,len(mar_pix)-pp))
53
+ sys.stdout.flush()
54
+ if _DRYISHRUN and pp > 100: break
55
+ mar_val[pp]= marshallmap(ll[pp],bb[pp],_GREEN19DISTS)
56
+ sys.stdout.write('\r'+_ERASESTR+'\r')
57
+ sys.stdout.flush()
58
+ # Now load Green19 and remove those pixels that fall within the Marshall map
59
+ with h5py.File(os.path.join(_greendir,'bayestar2019.h5'),'r') as greendata:
60
+ pix_info= greendata['/pixel_info'][:]
61
+ best_fit= greendata['/best_fit'][:]
62
+ # Check whether any of these fall within the Marshall map
63
+ theta, phi= healpy.pixelfunc.pix2ang(pix_info['nside'].astype('int32'),
64
+ pix_info['healpix_index'].astype('int64'),
65
+ nest=True)
66
+ inMar= ((phi < 100.125*_DEGTORAD)+(phi > 259.875*_DEGTORAD))\
67
+ *(numpy.fabs(numpy.pi/2.-theta) < 10.125*_DEGTORAD)
68
+ best_fit[inMar]= healpy.UNSEEN
69
+ nside_min= numpy.min(pix_info['nside'])
70
+ nside_max= numpy.max(pix_info['nside'])
71
+ # Fill in remaining gaps with Drimmel at NSIDE=256
72
+ pix_drim= []
73
+ pix_drim_nside= []
74
+ val_drim= []
75
+ drimmelmap= mwdust.Drimmel03()
76
+ for nside_drim in 2**numpy.arange(8,int(numpy.log2(nside_max))+1,1):
77
+ tpix= numpy.arange(healpy.pixelfunc.nside2npix(nside_drim))
78
+ rmIndx= numpy.zeros(len(tpix),dtype='bool')
79
+ # Remove pixels that already have values at this or a higher level
80
+ for nside in 2**numpy.arange(8,
81
+ int(numpy.log2(nside_max))+1,1):
82
+ mult_factor = (nside/nside_drim)**2
83
+ tgpix= pix_info['healpix_index'][pix_info['nside'] == nside]
84
+ for offset in numpy.arange(mult_factor):
85
+ rmIndx[numpy.in1d(tpix*mult_factor+offset,tgpix,
86
+ assume_unique=True)]= True
87
+ # Remove pixels that already have values at a lower level
88
+ for nside in 2**numpy.arange(int(numpy.log2(nside_min)),
89
+ int(numpy.log2(nside_drim)),1):
90
+ mult_factor = (nside_drim/nside)**2
91
+ # in Green 19
92
+ tgpix= pix_info['healpix_index'][pix_info['nside'] == nside]
93
+ rmIndx[numpy.in1d(tpix//mult_factor,tgpix,assume_unique=False)]= True
94
+ # In the current Drimmel
95
+ tdpix= numpy.array(pix_drim)[numpy.array(pix_drim_nside) == nside]
96
+ rmIndx[numpy.in1d(tpix//mult_factor,tdpix,assume_unique=False)]= True
97
+ # Also remove pixels that lie within the Marshall area
98
+ theta, phi= healpy.pixelfunc.pix2ang(nside_drim,tpix,nest=True)
99
+ inMar= ((phi < 100.125*_DEGTORAD)+(phi > 259.875*_DEGTORAD))\
100
+ *(numpy.fabs(numpy.pi/2.-theta) < 10.125*_DEGTORAD)
101
+ rmIndx[inMar]= True
102
+ tpix= tpix[True-rmIndx]
103
+ pix_drim.extend(tpix)
104
+ pix_drim_nside.extend(nside_drim*numpy.ones(len(tpix)))
105
+ ll= phi[True-rmIndx]/_DEGTORAD
106
+ bb= (numpy.pi/2.-theta[True-rmIndx])/_DEGTORAD
107
+ for pp in range(len(tpix)):
108
+ sys.stdout.write('\r'+"Working on level %i, pixel %i, %i remaining ...\r" % (nside_drim,pp+1,len(tpix)-pp))
109
+ sys.stdout.flush()
110
+ val_drim.append(drimmelmap(ll[pp],bb[pp],_GREEN19DISTS))
111
+ if _DRYISHRUN and pp > 1000: break
112
+ sys.stdout.write('\r'+_ERASESTR+'\r')
113
+ sys.stdout.flush()
114
+ # Save
115
+ g19Indx= best_fit[:,0] != healpy.UNSEEN
116
+ save_pickles(picklename,mar_pix,mar_val,
117
+ pix_info['nside'][g19Indx],pix_info['healpix_index'][g19Indx],
118
+ best_fit[g19Indx],
119
+ pix_drim,pix_drim_nside,val_drim)
120
+ return None
121
+
122
+ def store_h5(picklename):
123
+ # Restore pickle
124
+ if not os.path.exists(picklename):
125
+ print("file %s does not exist!" % picklename)
126
+ return None
127
+ with open(picklename,'rb') as picklefile:
128
+ mar_pix= pickle.load(picklefile)
129
+ mar_val= pickle.load(picklefile)
130
+ pix_info_nside= pickle.load(picklefile)
131
+ pix_info_healpix= pickle.load(picklefile)
132
+ best_fit= pickle.load(picklefile)
133
+ pix_drim= pickle.load(picklefile)
134
+ pix_drim_nside= pickle.load(picklefile)
135
+ val_drim= pickle.load(picklefile)
136
+ # Combine
137
+ nout= len(mar_pix)+len(pix_info_nside)+len(val_drim)
138
+ out_nside= numpy.empty(nout,dtype='uint32')
139
+ out_healpix= numpy.empty(nout,dtype='uint64')
140
+ out_bf= numpy.empty((nout,len(_GREEN19DISTS)),dtype='float64')
141
+ # Load Marshall
142
+ out_nside[:len(mar_pix)]= 512
143
+ out_healpix[:len(mar_pix)]= mar_pix
144
+ out_bf[:len(mar_pix)]= mar_val
145
+ # Load Green 19
146
+ out_nside[len(mar_pix):len(mar_pix)+len(pix_info_nside)]= pix_info_nside
147
+ out_healpix[len(mar_pix):len(mar_pix)+len(pix_info_nside)]= pix_info_healpix
148
+ out_bf[len(mar_pix):len(mar_pix)+len(pix_info_nside)]= best_fit
149
+ # Load Drimmel
150
+ if _DRYISHRUN:
151
+ out_nside[len(mar_pix)+len(pix_info_nside):]= pix_drim_nside[:len(val_drim)]
152
+ out_healpix[len(mar_pix)+len(pix_info_nside):]= pix_drim[:len(val_drim)]
153
+ else:
154
+ out_nside[len(mar_pix)+len(pix_info_nside):]= pix_drim_nside
155
+ out_healpix[len(mar_pix)+len(pix_info_nside):]= pix_drim
156
+ out_bf[len(mar_pix)+len(pix_info_nside):]= val_drim
157
+ # Save to h5 file
158
+ outfile= h5py.File(picklename.replace('.sav','.h5'),"w")
159
+ pixinfo= numpy.recarray(len(out_nside),
160
+ dtype=[('nside','uint32'),
161
+ ('healpix_index','uint64')])
162
+ pixinfo['nside']= out_nside
163
+ pixinfo['healpix_index']= out_healpix
164
+ outfile.create_dataset("pixel_info", data=pixinfo)
165
+ outfile.create_dataset("best_fit",data=out_bf)
166
+ outfile.close()
167
+ return None
168
+
169
+ def fix_nans(picklename):
170
+ """Marshall has a few NaNs, replace these with Drimmel"""
171
+ with h5py.File(picklename.replace('.sav','.h5'),'r') as combdata:
172
+ pix_info= combdata['/pixel_info'][:]
173
+ best_fit= combdata['/best_fit'][:]
174
+ nanIndx= numpy.isnan(best_fit[:,0])
175
+ print("Found %i NaNs ..." % numpy.sum(nanIndx))
176
+ theta, phi= healpy.pixelfunc.pix2ang(pix_info['nside'].astype('int32'),
177
+ pix_info['healpix_index'].astype('int64'),
178
+ nest=True)
179
+ bb= (numpy.pi/2.-theta)/_DEGTORAD
180
+ ll= phi/_DEGTORAD
181
+ indices= numpy.arange(len(pix_info['nside']))[nanIndx]
182
+ drimmelmap= mwdust.Drimmel03()
183
+ for ii in range(numpy.sum(nanIndx)):
184
+ best_fit[indices[ii]]= drimmelmap(ll[ii],bb[ii],_GREEN19DISTS)
185
+ # Now save
186
+ nanIndx= numpy.isnan(best_fit[:,0])
187
+ print("Found %i NaNs ..." % numpy.sum(nanIndx))
188
+ # Save to h5 file
189
+ outfile= h5py.File(picklename.replace('.sav','.h5'),"w")
190
+ outfile.create_dataset("pixel_info", data=pix_info)
191
+ outfile.create_dataset("best_fit",data=best_fit)
192
+ outfile.close()
193
+ return None
194
+
195
+ if __name__ == '__main__':
196
+ if len(sys.argv) > 2 and sys.argv[2] == 'fixnans':
197
+ fix_nans(sys.argv[1])
198
+ elif len(sys.argv) > 2:
199
+ store_h5(sys.argv[1])
200
+ else:
201
+ print(sys.argv[1])
202
+ combine_dustmap(sys.argv[1])
@@ -0,0 +1,41 @@
1
+ import os
2
+ import pathlib
3
+ import shutil
4
+ import requests
5
+ import tqdm
6
+
7
+ dust_dir = os.environ.get("DUST_DIR")
8
+ if dust_dir is None:
9
+ dust_dir = os.path.expanduser(os.path.join("~", ".mwdust"))
10
+ if not os.path.exists(dust_dir):
11
+ os.mkdir(dust_dir)
12
+
13
+
14
+ def downloader(url, fullfilename, name, test=False):
15
+ """
16
+ url: URL of data file
17
+ fullfilename: full local path
18
+ name: name of the task
19
+ """
20
+ user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36"
21
+ r = requests.get(
22
+ url,
23
+ stream=True,
24
+ allow_redirects=True,
25
+ verify=True,
26
+ headers={"User-Agent": user_agent},
27
+ )
28
+ if r.status_code == 404:
29
+ raise ConnectionError(f"Cannot find {name} data file at {url}")
30
+ r.raise_for_status() # Will only raise for 4xx codes
31
+ if not test:
32
+ file_size = int(r.headers.get("Content-Length", 0))
33
+ path = pathlib.Path(fullfilename).expanduser().resolve()
34
+ path.parent.mkdir(parents=True, exist_ok=True)
35
+
36
+ # r.raw.read
37
+ with tqdm.tqdm.wrapattr(
38
+ r.raw, "read", total=file_size, desc=f"Download {name}"
39
+ ) as r_raw:
40
+ with path.open("wb") as f:
41
+ shutil.copyfileobj(r_raw, f)