chanter 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,234 @@
1
+ import numpy as np
2
+ from astropy.io import fits
3
+ import os
4
+
5
+ max_redshift = 10.
6
+ igm_redshifts = np.arange(0.0, max_redshift + 0.01, 0.01)
7
+ igm_wavelengths = np.arange(1.0, 1225.01, 1.0)
8
+ print(igm_redshifts.shape)
9
+
10
+ coefs = np.loadtxt("/Users/struanstevenson/Desktop/research/CHANTER/lyman_series_coefs_inoue_2014_table2.txt")
11
+
12
+
13
+ def get_Inoue14_trans(rest_wavs, z_obs):
14
+ """ Calculate IGM transmission using Inoue et al. (2014) model. """
15
+
16
+ if isinstance(rest_wavs, float):
17
+ rest_wavs = np.array([rest_wavs])
18
+
19
+ tau_LAF_LS = np.zeros((39, rest_wavs.shape[0]))
20
+ tau_DLA_LS = np.zeros((39, rest_wavs.shape[0]))
21
+ tau_LAF_LC = np.zeros(rest_wavs.shape[0])
22
+ tau_DLA_LC = np.zeros(rest_wavs.shape[0])
23
+
24
+ # Populate tau_LAF_LS
25
+ for j in range(39):
26
+
27
+ if z_obs < 1.2:
28
+ wav_slice = ((rest_wavs*(1.+z_obs) > coefs[j, 1])
29
+ & (rest_wavs*(1.+z_obs)
30
+ < (1+z_obs)*coefs[j, 1]))
31
+
32
+ tau_LAF_LS[j, wav_slice] = (coefs[j, 2]
33
+ * (rest_wavs[wav_slice]
34
+ * (1.+z_obs)/coefs[j, 1])**1.2)
35
+
36
+ elif z_obs < 4.7:
37
+ wav_slice_1 = ((rest_wavs*(1.+z_obs) > coefs[j, 1])
38
+ & (rest_wavs*(1.+z_obs) < 2.2*coefs[j, 1]))
39
+ wav_slice_2 = ((rest_wavs*(1.+z_obs) > 2.2*coefs[j, 1])
40
+ & (rest_wavs*(1.+z_obs)
41
+ < (1+z_obs)*coefs[j, 1]))
42
+
43
+ tau_LAF_LS[j, wav_slice_1] = (coefs[j, 2]
44
+ * (rest_wavs[wav_slice_1]
45
+ * (1.+z_obs)/coefs[j, 1])**1.2)
46
+
47
+ tau_LAF_LS[j, wav_slice_2] = (coefs[j, 3]
48
+ * (rest_wavs[wav_slice_2]
49
+ * (1.+z_obs)/coefs[j, 1])**3.7)
50
+
51
+ else:
52
+ wav_slice_1 = ((rest_wavs*(1.+z_obs) > coefs[j, 1])
53
+ & (rest_wavs*(1.+z_obs) < 2.2*coefs[j, 1]))
54
+
55
+ wav_slice_2 = ((rest_wavs*(1.+z_obs) > 2.2*coefs[j, 1])
56
+ & (rest_wavs*(1.+z_obs) < 5.7*coefs[j, 1]))
57
+
58
+ wav_slice_3 = ((rest_wavs*(1.+z_obs) > 5.7*coefs[j, 1])
59
+ & (rest_wavs*(1.+z_obs)
60
+ < (1+z_obs)*coefs[j, 1]))
61
+
62
+ tau_LAF_LS[j, wav_slice_1] = (coefs[j, 2]
63
+ * (rest_wavs[wav_slice_1]
64
+ * (1.+z_obs)/coefs[j, 1])**1.2)
65
+
66
+ tau_LAF_LS[j, wav_slice_2] = (coefs[j, 3]
67
+ * (rest_wavs[wav_slice_2]
68
+ * (1.+z_obs)/coefs[j, 1])**3.7)
69
+
70
+ tau_LAF_LS[j, wav_slice_3] = (coefs[j, 4]
71
+ * (rest_wavs[wav_slice_3]
72
+ * (1.+z_obs)/coefs[j, 1])**5.5)
73
+
74
+ # Populate tau_DLA_LS
75
+ for j in range(39):
76
+
77
+ if z_obs < 2.0:
78
+ wav_slice = ((rest_wavs*(1.+z_obs) > coefs[j, 1])
79
+ & (rest_wavs*(1.+z_obs)
80
+ < (1+z_obs)*coefs[j, 1]))
81
+
82
+ tau_DLA_LS[j, wav_slice] = (coefs[j, 5]
83
+ * (rest_wavs[wav_slice]
84
+ * (1.+z_obs)/coefs[j, 1])**2.0)
85
+
86
+ else:
87
+ wav_slice_1 = ((rest_wavs*(1.+z_obs) > coefs[j, 1])
88
+ & (rest_wavs*(1.+z_obs) < 3.0*coefs[j, 1]))
89
+
90
+ wav_slice_2 = ((rest_wavs*(1.+z_obs) > 3.0*coefs[j, 1])
91
+ & (rest_wavs*(1.+z_obs) < (1+z_obs)
92
+ * coefs[j, 1]))
93
+
94
+ tau_DLA_LS[j, wav_slice_1] = (coefs[j, 5]
95
+ * (rest_wavs[wav_slice_1]
96
+ * (1.+z_obs)/coefs[j, 1])**2.0)
97
+
98
+ tau_DLA_LS[j, wav_slice_2] = (coefs[j, 6]
99
+ * (rest_wavs[wav_slice_2]
100
+ * (1.+z_obs)/coefs[j, 1])**3.0)
101
+
102
+ # Populate tau_LAF_LC
103
+ if z_obs < 1.2:
104
+ wav_slice = ((rest_wavs*(1.+z_obs) > 911.8)
105
+ & (rest_wavs*(1.+z_obs) < 911.8*(1.+z_obs)))
106
+
107
+ tau_LAF_LC[wav_slice] = (0.325*((rest_wavs[wav_slice]
108
+ * (1.+z_obs)/911.8)**1.2
109
+ - (((1+z_obs)**-0.9)
110
+ * (rest_wavs[wav_slice]
111
+ * (1.+z_obs)/911.8)**2.1)))
112
+
113
+ elif z_obs < 4.7:
114
+ wav_slice_1 = ((rest_wavs*(1.+z_obs) > 911.8)
115
+ & (rest_wavs*(1.+z_obs) < 911.8*2.2))
116
+
117
+ wav_slice_2 = ((rest_wavs*(1.+z_obs) > 911.8*2.2)
118
+ & (rest_wavs*(1.+z_obs) < 911.8*(1.+z_obs)))
119
+
120
+ tau_LAF_LC[wav_slice_1] = (((2.55*10**-2)*((1+z_obs)**1.6)
121
+ * (rest_wavs[wav_slice_1]
122
+ * (1.+z_obs)/911.8)**2.1)
123
+ + (0.325*((rest_wavs[wav_slice_1]
124
+ * (1.+z_obs)/911.8)**1.2))
125
+ - (0.25*((rest_wavs[wav_slice_1]
126
+ * (1.+z_obs)/911.8)**2.1)))
127
+
128
+ tau_LAF_LC[wav_slice_2] = ((2.55*10**-2)
129
+ * (((1.+z_obs)**1.6)
130
+ * ((rest_wavs[wav_slice_2]
131
+ * (1.+z_obs)/911.8)**2.1)
132
+ - ((rest_wavs[wav_slice_2]
133
+ * (1.+z_obs)/911.8)**3.7)))
134
+
135
+ else:
136
+ wav_slice_1 = ((rest_wavs*(1.+z_obs) > 911.8)
137
+ & (rest_wavs*(1.+z_obs) < 911.8*2.2))
138
+
139
+ wav_slice_2 = ((rest_wavs*(1.+z_obs) > 911.8*2.2)
140
+ & (rest_wavs*(1.+z_obs) < 911.8*5.7))
141
+
142
+ wav_slice_3 = ((rest_wavs*(1.+z_obs) > 911.8*5.7)
143
+ & (rest_wavs*(1.+z_obs) < 911.8*(1.+z_obs)))
144
+
145
+ tau_LAF_LC[wav_slice_1] = (((5.22*10**-4)*((1+z_obs)**3.4)
146
+ * (rest_wavs[wav_slice_1]
147
+ * (1.+z_obs)/911.8)**2.1)
148
+ + (0.325*(rest_wavs[wav_slice_1]
149
+ * (1.+z_obs)/911.8)**1.2)
150
+ - ((3.14*10**-2)*((rest_wavs[wav_slice_1]
151
+ * (1.+z_obs)/911.8)**2.1)))
152
+
153
+ tau_LAF_LC[wav_slice_2] = (((5.22*10**-4)*((1+z_obs)**3.4)
154
+ * (rest_wavs[wav_slice_2]
155
+ * (1.+z_obs)/911.8)**2.1)
156
+ + (0.218*((rest_wavs[wav_slice_2]
157
+ * (1.+z_obs)/911.8)**2.1))
158
+ - ((2.55*10**-2)*((rest_wavs[wav_slice_2]
159
+ * (1.+z_obs)
160
+ / 911.8)**3.7)))
161
+
162
+ tau_LAF_LC[wav_slice_3] = ((5.22*10**-4)
163
+ * (((1+z_obs)**3.4)
164
+ * (rest_wavs[wav_slice_3]
165
+ * (1.+z_obs)/911.8)**2.1
166
+ - (rest_wavs[wav_slice_3]
167
+ * (1.+z_obs)/911.8)**5.5))
168
+
169
+ # Populate tau_DLA_LC
170
+ if z_obs < 2.0:
171
+ wav_slice = ((rest_wavs*(1.+z_obs) > 911.8)
172
+ & (rest_wavs*(1.+z_obs) < 911.8*(1.+z_obs)))
173
+
174
+ tau_DLA_LC[wav_slice] = (0.211*((1+z_obs)**2.)
175
+ - (7.66*10**-2)*(((1+z_obs)**2.3)
176
+ * (rest_wavs[wav_slice]
177
+ * (1.+z_obs)/911.8)**-0.3)
178
+ - 0.135*((rest_wavs[wav_slice]
179
+ * (1.+z_obs)/911.8)**2.0))
180
+
181
+ else:
182
+ wav_slice_1 = ((rest_wavs*(1.+z_obs) > 911.8)
183
+ & (rest_wavs*(1.+z_obs) < 911.8*3.0))
184
+
185
+ wav_slice_2 = ((rest_wavs*(1.+z_obs) > 911.8*3.0)
186
+ & (rest_wavs*(1.+z_obs) < 911.8*(1.+z_obs)))
187
+
188
+ tau_DLA_LC[wav_slice_1] = (0.634 + (4.7*10**-2)*(1.+z_obs)**3.
189
+ - ((1.78*10**-2)*((1.+z_obs)**3.3)
190
+ * (rest_wavs[wav_slice_1]
191
+ * (1.+z_obs)/911.8)**-0.3)
192
+ - (0.135*(rest_wavs[wav_slice_1]
193
+ * (1.+z_obs)/911.8)**2.0)
194
+ - 0.291*(rest_wavs[wav_slice_1]
195
+ * (1.+z_obs)/911.8)**-0.3)
196
+
197
+ tau_DLA_LC[wav_slice_2] = ((4.7*10**-2)*(1.+z_obs)**3.
198
+ - ((1.78*10**-2)*((1.+z_obs)**3.3)
199
+ * (rest_wavs[wav_slice_2]
200
+ * (1.+z_obs)/911.8)**-0.3)
201
+ - ((2.92*10**-2)
202
+ * (rest_wavs[wav_slice_2]
203
+ * (1.+z_obs)/911.8)**3.0))
204
+
205
+ tau_LAF_LS_sum = np.sum(tau_LAF_LS, axis=0)
206
+ tau_DLA_LS_sum = np.sum(tau_DLA_LS, axis=0)
207
+
208
+ tau = tau_LAF_LS_sum + tau_DLA_LS_sum + tau_LAF_LC + tau_DLA_LC
209
+
210
+ return np.exp(-tau)
211
+
212
+
213
+ def make_table(z_array, rest_wavs):
214
+ """ Make up the igm absorption table used by bagpipes. """
215
+
216
+ print("BAGPIPES: Generating IGM absorption table.")
217
+
218
+ d_IGM_grid = np.zeros((z_array.shape[0], rest_wavs.shape[0]))
219
+
220
+ for i in range(z_array.shape[0]):
221
+ d_IGM_grid[i, :] = get_Inoue14_trans(rest_wavs, z_array[i])
222
+
223
+ hdulist = fits.HDUList(hdus=[fits.PrimaryHDU(),
224
+ fits.ImageHDU(name="trans", data=d_IGM_grid),
225
+ fits.ImageHDU(name="wavs", data=rest_wavs),
226
+ fits.ImageHDU(name="zred", data=z_array)])
227
+
228
+ if os.path.exists("d_igm_grid_inoue14.fits"):
229
+ os.system("rm " + "d_igm_grid_inoue14.fits")
230
+
231
+ hdulist.writeto("d_igm_grid_inoue14.fits")
232
+
233
+
234
+ make_table(igm_redshifts, igm_wavelengths)
@@ -0,0 +1,9 @@
1
+ import sys
2
+ import os
3
+ import glob
4
+
5
+ bins = [f for f in sorted(glob.glob(f'CHANTER/Miles_Atlas/Chabrier_IMF/*ised'))]
6
+
7
+
8
+ for i in bins:
9
+ os.system('$bc03/ascii_ised ~/Desktop/research/' + i)
@@ -0,0 +1,24 @@
1
+ import numpy as np
2
+ from astropy.table import Table
3
+ import pandas as pd
4
+ import matplotlib.pyplot as plt
5
+ from astropy.io import fits
6
+
7
+
8
+ base22 = np.genfromtxt('/Users/struanstevenson/Desktop/research/CHANTER/ssp/m22_ssp.csv', delimiter=',')
9
+ base32 = np.genfromtxt('/Users/struanstevenson/Desktop/research/CHANTER/ssp/m32_ssp.csv', delimiter=',')
10
+ base42 = np.genfromtxt('/Users/struanstevenson/Desktop/research/CHANTER/ssp/m42_ssp.csv', delimiter=',')
11
+ base52 = np.genfromtxt('/Users/struanstevenson/Desktop/research/CHANTER/ssp/m52_ssp.csv', delimiter=',')
12
+ base62 = np.genfromtxt('/Users/struanstevenson/Desktop/research/CHANTER/ssp/m62_ssp.csv', delimiter=',')
13
+ base72 = np.genfromtxt('/Users/struanstevenson/Desktop/research/CHANTER/ssp/m72_ssp.csv', delimiter=',')
14
+ base82 = np.genfromtxt('/Users/struanstevenson/Desktop/research/CHANTER/ssp/m82_ssp.csv', delimiter=',')
15
+
16
+ master_base = np.array((base22, base32, base42, base52, base62, base72, base82))
17
+
18
+ hdul = fits.HDUList()
19
+ hdul.append(fits.PrimaryHDU())
20
+
21
+ for img in master_base:
22
+ hdul.append(fits.ImageHDU(data=img))
23
+
24
+ hdul.writeto('/Users/struanstevenson/Desktop/research/CHANTER/ssp/ssps.fits')
@@ -0,0 +1,32 @@
1
+ import numpy as np
2
+ from astropy.table import Table
3
+ import pandas as pd
4
+ import matplotlib.pyplot as plt
5
+
6
+ def get_ssp(ascii_file):
7
+
8
+
9
+ ages = pd.read_table(ascii_file, sep='\s+', nrows=1, header=None, dtype='float')
10
+ ages = ages.to_numpy()[0][1:]
11
+ ages_df = pd.DataFrame(ages, columns=['age'])
12
+
13
+ waves = pd.read_table(ascii_file, sep='\s+', skiprows=6, nrows=1, header=None, dtype='float')
14
+ waves = waves.drop(waves.columns[0], axis=1)
15
+
16
+ flux = pd.read_table(ascii_file, sep='\s+', skiprows=7, nrows=221, header=None)
17
+ flux = flux.drop(flux.columns[0], axis=1)
18
+ flux = flux.drop(flux.columns[-53:], axis=1)
19
+ flux.columns = waves.to_numpy()[0]
20
+
21
+
22
+ base = ages_df.join(flux)
23
+
24
+ return base
25
+
26
+ specs = ['m22', 'm32', 'm42', 'm52', 'm62', 'm72', 'm82']
27
+
28
+
29
+ for spec in specs:
30
+ df = get_ssp('/Users/struanstevenson/Desktop/research/CHANTER/ssp/ascii_files/bc2003_hr_xmiless_'+spec+'_chab_ssp.ised_ASCII')
31
+ #df.to_csv('./CHANTER/ssp/'+spec+'_ssp.csv', index=False)
32
+
@@ -0,0 +1,8 @@
1
+ import numpy as np
2
+ from astropy.table import Table
3
+ import pandas as pd
4
+ import matplotlib.pyplot as plt
5
+ from astropy.io import fits
6
+
7
+ hdul = fits.open('output.fits')
8
+ print(hdul[7].data)