mimical 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mimical/__init__.py ADDED
@@ -0,0 +1,5 @@
1
+ from . import fitting
2
+ from . import plotting
3
+
4
+
5
+ from .fitting import BOGfit
@@ -0,0 +1,3 @@
1
+ from .fitter import BOGfit
2
+ from .prior_handler import priorHandler
3
+
@@ -0,0 +1,187 @@
1
+ import numpy as np
2
+ import matplotlib.pyplot as plt
3
+ import petrofit as pf
4
+ from astropy.convolution.utils import discretize_model
5
+ import corner
6
+ from nautilus import Sampler
7
+ import time
8
+ import os
9
+ from astropy.modeling import models
10
+ import pandas as pd
11
+
12
+ from .prior_handler import priorHandler
13
+ from ..plotting import Plotter
14
+
15
+ from ..utils import filter_set
16
+
17
+ dir_path = os.getcwd()
18
+ if not os.path.isdir(dir_path + "/bogout"):
19
+ os.system('mkdir ' + dir_path + "/bogout")
20
+ os.system('mkdir ' + dir_path + "/bogout/plots")
21
+ os.system('mkdir ' + dir_path + "/bogout/posteriors")
22
+
23
+
24
+
25
+ class BOGfit(object):
26
+ """ Bayesian Observer of Galaxies - Fit.
27
+ Fits multi-filter images of galaxies simultaeneously using Petrofit and Nautilus
28
+ by specifying either an individual filter dependency for model parameters or
29
+ a user-specified order polynomial. By default a Sersic profile is used however any
30
+ Astropy model can be specified.
31
+
32
+ Parameters
33
+ ----------
34
+
35
+ id : str
36
+ An ID for the fitting run. Only really used for output files.
37
+
38
+ images : array
39
+ A 3D array of image data with slices for each filter. Each image
40
+ must be the same shape.
41
+
42
+ filt_list : list - str
43
+ A list of path strings to the filter transmission curve files, relative
44
+ to the current working directory. Must be in ascending order with effective wavelength.
45
+
46
+ psfs : array
47
+ A 3D array of normalised PSF images with slices for each filter. Each PSF image
48
+ must be the same shape, and the same shape as the data images.
49
+
50
+ astropy_model : array
51
+ Astropy Fittable2DModel used to model the image data. The subsequent prior must include
52
+ only and all parameters in the astropy_model.parameters variable, as well as a 'psf_pa' parameter.
53
+
54
+ user_prior : dict
55
+ The user specified prior which set out the priors for the model parameters
56
+ and passes information about whether to let these vary for each filter or
57
+ whether they follow an order-specified polynomial relationship.
58
+ """
59
+
60
+
61
+ def __init__(self, id, images, filt_list, psfs, user_prior, astropy_model=models.Sersic2D(), pool=None):
62
+
63
+ self.id = id
64
+
65
+ print(f"Fitting object {id}")
66
+ self.images = images
67
+ self.psfs = psfs
68
+ self.user_prior = user_prior
69
+ self.astropy_model = astropy_model
70
+
71
+ # Using the filter files, find the name of the filters and the effective wavelengths.
72
+ self.filter_names = [x.split('/')[-1] for x in filt_list]
73
+ self.wavs = filter_set([dir_path+'/'+x for x in filt_list]).eff_wavs / 1e4
74
+
75
+ # Initiate the prior handler object, used to parse and translate priors and parameters.
76
+ self.prior_handler = priorHandler(user_prior, self.filter_names, self.wavs)
77
+
78
+ # Translate user specified prior into a prior parseable by nautlius.
79
+ self.fitter_prior = self.prior_handler.translate()
80
+ print(f"Fitting with parameters: {self.fitter_prior.keys}")
81
+
82
+ self.t0 = time.time()
83
+ self.calls = 0
84
+
85
+ self.pool = pool
86
+
87
+
88
+
89
+
90
+ def lnlike(self, param_dict):
91
+ """ Returns the log-likelihood for a given parameter vector. """
92
+
93
+ # Translate parameter vector into model parameters in each filter.
94
+ pars = self.prior_handler.revert(param_dict, self.wavs)
95
+
96
+ # Define empty arrays for models and rms images.
97
+ models = np.zeros_like(self.images)
98
+ rms = np.zeros_like(self.images)
99
+
100
+ # Loop over filters
101
+ for i in range(len(self.wavs)):
102
+ # Update the model and evaluate over a pixel grid.
103
+ self.convolved_models[i].parameters = pars[i]
104
+ model = discretize_model(model=self.convolved_models[i],
105
+ x_range=[0,self.images[i].shape[1]],
106
+ y_range=[0,self.images[i].shape[0]],
107
+ mode='center')
108
+
109
+ # If, for whatever reason, the model has NaNs, set to zero and blow up errors.
110
+ if np.isnan(np.sum(model)):
111
+ models[i] = np.zeros_like(model)
112
+ rms[i] = np.zeros_like(model) + 1e99
113
+
114
+ # Else, append to respective arrays.
115
+ else:
116
+ models[i] = model
117
+ rms[i] = param_dict['rms'] + (param_dict['rms_sersic']*np.sqrt(np.abs(model)))
118
+ #rms[i] += 1.483 * median_abs_deviation(self.images[i].flatten())
119
+
120
+
121
+ # Broadcast the 3D data and model arrays and sum through the resulting 3D log-likelihood array.
122
+ log_like_array = np.log((1/(np.sqrt(2*np.pi*(rms**2))))) + ((-(self.images - models)**2) / (2*(rms**2)))
123
+ log_like = np.sum(log_like_array.flatten())
124
+
125
+ # Print calls
126
+ self.calls+=1
127
+ if not self.calls % 100:
128
+ #print('Time % 100: ' + str(time.time()-self.t0))
129
+ self.t0 = time.time()
130
+
131
+ return(log_like)
132
+
133
+
134
+
135
+
136
+ def fit(self):
137
+ """ Runs the Nautlius sampler to fit models, and processes its output. """
138
+
139
+ # Define empty models for each filter
140
+ sersic_model = self.astropy_model
141
+ self.convolved_models = []
142
+ for i in range(len(self.wavs)):
143
+ self.convolved_models.append(pf.PSFConvolvedModel2D(sersic_model, psf=self.psfs[i], oversample=(self.images.shape[2]/2, self.images.shape[1]/2, 15, 10)))
144
+
145
+ # Check that the user specified prior contains the same parameters as the user specified model.
146
+ if list(self.convolved_models[0].param_names) != list(self.user_prior.keys()):
147
+ raise Exception("Prior labels do not match model parameters.")
148
+
149
+
150
+ if os.path.isfile(dir_path+'/bogout/posteriors' + f'/{self.id}.txt'):
151
+ samples = pd.read_csv(dir_path+'/bogout/posteriors' + f'/{self.id}.txt', delimiter=' ').to_numpy()
152
+
153
+ else:
154
+ # Run sampler
155
+ t0 = time.time()
156
+ #sampler = Sampler(self.fitter_prior, self.lnlike, n_live=400, filepath= dir_path+'/bogout'+f'/{self.id}.hdf5', resume=True, pool=self.pool)
157
+ sampler = Sampler(self.fitter_prior, self.lnlike, n_live=400, pool=self.pool)
158
+ sampler.run(verbose=True, timeout=2700)
159
+ print(f"Sampling time (minutes): {(time.time()-t0)/60}")
160
+
161
+ # Sample the posterior information
162
+ points, log_w, log_l = sampler.posterior()
163
+
164
+ # Plot and save the corner plot
165
+ corner.corner(points, weights=np.exp(log_w), bins=20, labels=np.array(self.fitter_prior.keys), color='purple', plot_datapoints=False, range=np.repeat(0.999, len(self.fitter_prior.keys)))
166
+ plt.savefig(dir_path+'/bogout/plots' + f'/corner_{self.id}.pdf', bbox_inches='tight')
167
+
168
+ # Sample an appropriately weighted posterior for representative samples.
169
+ n_post = 10000
170
+ indices = np.random.choice(np.arange(points.shape[0]), size = n_post, p=np.exp(log_w))
171
+ samples = points[indices]
172
+ samples_df = pd.DataFrame(data=samples, columns=self.fitter_prior.keys)
173
+ samples_df.to_csv(dir_path+'/bogout/posteriors' + f'/{self.id}.txt', sep=' ', index=False)
174
+
175
+
176
+ # Plot and save the median-parameter fit
177
+ Plotter().plot_median(self.images, self.wavs, self.convolved_models, samples, list(self.fitter_prior.keys), self.prior_handler)
178
+ plt.savefig(dir_path+'/bogout/plots' + f'/{self.id}_best_model.pdf', bbox_inches='tight')
179
+
180
+ # Return the median-parameter model
181
+ fit_dic = dict(zip((np.array((list(self.fitter_prior.keys)))+"_50").tolist(), np.median(samples, axis=0).tolist()))
182
+
183
+ print("Finished.")
184
+ print(" ")
185
+
186
+ return fit_dic
187
+
@@ -0,0 +1,98 @@
1
+ from nautilus import Prior
2
+ from scipy.stats import norm
3
+ import numpy as np
4
+
5
+ class priorHandler(object):
6
+ """ Contains functions for translating BOGfit priors into
7
+ Nautlius priors and translating Nautilus samples into model
8
+ paremeters in each filter.
9
+
10
+ Parameters
11
+ ----------
12
+
13
+ user_prior : dict
14
+ The user specified prior which set out the priors for the model parameters
15
+ and passes information about whether to let these vary for each filter or
16
+ whether they follow an order-specified polynomial relationship.
17
+
18
+ filter_names : list - str
19
+ A list of filter names e.g., [F356W, F444W, ...]
20
+
21
+ wavs : array
22
+ A 1D array of effective wavelengths corresponding to each filter.
23
+
24
+ """
25
+
26
+
27
+ def __init__(self, user_prior, filter_names, wavs):
28
+ self.user_prior = user_prior
29
+ self.filter_names = filter_names
30
+ self.wavs = wavs
31
+
32
+
33
+ def translate(self):
34
+ """ Translate a BOGfit prior into a Nautilus prior."""
35
+ # Initiate Nautlius prior
36
+ prior = Prior()
37
+
38
+ # Loop over model parameters
39
+ for key in self.user_prior.keys():
40
+ param_prior_traits = self.user_prior[key]
41
+ param_prior_dist = param_prior_traits[0]
42
+ param_fit_type = param_prior_traits[1]
43
+
44
+ # If user specifies 'Individual', add a fitter free-parameter for each filter.
45
+ if param_fit_type == "Individual":
46
+ for i in range(len(self.wavs)):
47
+ prior.add_parameter(f'{key}_{self.filter_names[i]}', dist=param_prior_dist)
48
+
49
+ # If user specifies 'Polynomial', add a fitter free-parameter for each coefficient.
50
+ # e.g., For order 0, only one free-parameter is included for the whole fitting run a.k.a constant between filters.
51
+ # e.g., For order 1, two free-parameter are included for the whole fitting run a.k.a straight-line relationship with effective wavelength.
52
+ # The lowest wavelength is chosen as the origin.
53
+ elif param_fit_type == "Polynomial":
54
+ prior.add_parameter(key+f'_C0', dist=param_prior_dist)
55
+ poly_order = param_prior_traits[2]
56
+ higher_order_dist = norm(loc=0, scale=1) # Priors for gradients, higher-order coefficients.
57
+ for i in range(1,poly_order+1):
58
+ prior.add_parameter(key+f'_C{i}', dist=higher_order_dist)
59
+
60
+ else:
61
+ raise Exception("Fitting type not supported, please choose either 'Individual' or 'Polynomial'.")
62
+
63
+ # Add free-parameters for a constant RMS background noise and constant Sersic poisson uncertainty scaling.
64
+ prior.add_parameter(f'rms', dist=(0,1))
65
+ prior.add_parameter(f'rms_sersic', dist=(0,100))
66
+
67
+ return prior
68
+
69
+
70
+ def revert(self, param_dict, wavs):
71
+ """ Translate a Nautilus sample into a model parameters for each filter."""
72
+
73
+ # Empty parameter array
74
+ params_final = np.zeros((len(self.wavs), len(self.user_prior.keys())))
75
+
76
+ # Loop over model parameters
77
+ keys = list(self.user_prior.keys())
78
+ for i in range(len(keys)):
79
+ param_prior_traits = self.user_prior[keys[i]]
80
+ param_fit_type = param_prior_traits[1]
81
+
82
+ # If individual, add the Nautilus sample for each filter
83
+ if param_fit_type == "Individual":
84
+ for j in range(len(self.wavs)):
85
+ params_final[j,i] = param_dict[f"{keys[i]}_{self.filter_names[j]}"]
86
+
87
+ # If polynomial, calculate the expected parameter in each filter given its effective wavlength
88
+ elif param_fit_type == "Polynomial":
89
+ poly_order = param_prior_traits[2]
90
+ coeffs = np.zeros(poly_order+1)
91
+ for k in range(poly_order+1):
92
+ coeffs[k] = param_dict[f"{keys[i]}_C{k}"]
93
+ polywavs = np.power(np.tile(wavs-wavs[0], (poly_order+1,1)).T, np.arange(poly_order+1))
94
+ comps = coeffs * polywavs
95
+ comps_summed = np.sum(comps, axis=1)
96
+ params_final[:,i] =comps_summed
97
+
98
+ return params_final
@@ -0,0 +1 @@
1
+ from .plotting import Plotter
@@ -0,0 +1,72 @@
1
+ import matplotlib.pyplot as plt
2
+ import numpy as np
3
+ from astropy.convolution.utils import discretize_model
4
+ from tqdm import tqdm
5
+
6
+ class Plotter(object):
7
+
8
+ def plot_best(self, images, wavs, convolved_models, samples, fitter_keys, prior_handler):
9
+
10
+ fig,axes=plt.subplots(3, images.shape[0], figsize=(images.shape[0],3))
11
+
12
+ # Get median Nautilus parameters and transalte into median model parameters.
13
+ param_dict = dict(zip(fitter_keys, np.median(samples, axis=0)))
14
+ pars = prior_handler.revert(param_dict, wavs)
15
+
16
+ for i in range(len(wavs)):
17
+ convolved_models[i].parameters = pars[i]
18
+ model = discretize_model(model=convolved_models[i],
19
+ x_range=[0,images[i].shape[1]],
20
+ y_range=[0,images[i].shape[0]],
21
+ mode='center')
22
+
23
+
24
+ v = np.percentile(images[-1], 99.9)
25
+
26
+ axes[0,i].imshow(images[i], vmax=v, vmin=-v)
27
+ axes[0,i].set_axis_off()
28
+
29
+ axes[1,i].imshow(model, vmax=v, vmin=-v)
30
+ axes[1,i].set_axis_off()
31
+
32
+ axes[2,i].imshow(images[i]-model, vmax=v, vmin=-v)
33
+ axes[2,i].set_axis_off()
34
+
35
+
36
+ def plot_median(self, images, wavs, convolved_models, samples, fitter_keys, prior_handler):
37
+
38
+ fig,axes=plt.subplots(3, images.shape[0], figsize=(images.shape[0],3))
39
+
40
+
41
+ models = np.zeros((samples.shape[0], *images.shape))
42
+
43
+ print("Computing median model image...")
44
+ for j in tqdm(range(samples.shape[0])):
45
+ # Get median Nautilus parameters and transalte into median model parameters.
46
+ param_dict = dict(zip(fitter_keys, samples[j]))
47
+ pars = prior_handler.revert(param_dict, wavs)
48
+
49
+ for k in range(len(wavs)):
50
+ convolved_models[k].parameters = pars[k]
51
+ model = discretize_model(model=convolved_models[k],
52
+ x_range=[0,images[k].shape[1]],
53
+ y_range=[0,images[k].shape[0]],
54
+ mode='center')
55
+ models[j,k] = model
56
+
57
+
58
+ median_models = np.median(models, axis=0)
59
+
60
+ for i in range(len(wavs)):
61
+
62
+ v = np.percentile(images[-1], 99.9)
63
+
64
+ axes[0,i].imshow(images[i], vmax=v, vmin=-v)
65
+ axes[0,i].set_axis_off()
66
+
67
+ axes[1,i].imshow(median_models[i], vmax=v, vmin=-v)
68
+ axes[1,i].set_axis_off()
69
+
70
+ axes[2,i].imshow(images[i]-median_models[i], vmax=v, vmin=-v)
71
+ axes[2,i].set_axis_off()
72
+
@@ -0,0 +1 @@
1
+ from .filter_set import filter_set
@@ -0,0 +1,175 @@
1
+ import numpy as np
2
+
3
+
4
+ class filter_set(object):
5
+ """ Class for loading and manipulating sets of filter curves. This
6
+ is where integration over filter curves to get photometry happens.
7
+
8
+ Parameters
9
+ ----------
10
+
11
+ filt_list : list
12
+ List of strings containing paths from the working directory to
13
+ files where filter curves are stored. The filter curve files
14
+ should contain an array of wavelengths in Angstroms followed by
15
+ a column of relative transmission values.
16
+ """
17
+
18
+ def __init__(self, filt_list):
19
+ self.filt_list = filt_list
20
+ self.wavelengths = None
21
+ self._load_filter_curves()
22
+ self._calculate_min_max_wavelengths()
23
+ self._calculate_effective_wavelengths()
24
+
25
+ def _load_filter_curves(self):
26
+ """ Loads filter files for the specified filt_list and truncates
27
+ any zeros from either of their edges. """
28
+
29
+ self.filt_dict = {}
30
+
31
+ for filt in self.filt_list:
32
+ self.filt_dict[filt] = np.loadtxt(filt, usecols=(0, 1))
33
+
34
+
35
+ def _calculate_min_max_wavelengths(self):
36
+ """ Finds the min and max wavelength values across all of the
37
+ filter curves. """
38
+
39
+ self.min_phot_wav = 9.9*10**99
40
+ self.max_phot_wav = 0.
41
+
42
+ for filt in self.filt_list:
43
+ min_wav = (self.filt_dict[filt][0, 0]
44
+ - 2*(self.filt_dict[filt][1, 0]
45
+ - self.filt_dict[filt][0, 0]))
46
+
47
+ max_wav = (self.filt_dict[filt][-1, 0]
48
+ + 2*(self.filt_dict[filt][-1, 0]
49
+ - self.filt_dict[filt][-2, 0]))
50
+
51
+ if min_wav < self.min_phot_wav:
52
+ self.min_phot_wav = min_wav
53
+
54
+ if max_wav > self.max_phot_wav:
55
+ self.max_phot_wav = max_wav
56
+
57
+ def _calculate_effective_wavelengths(self):
58
+ """ Calculates effective wavelengths for each filter curve. """
59
+
60
+ self.eff_wavs = np.zeros(len(self.filt_list))
61
+
62
+ for i in range(len(self.filt_list)):
63
+ filt = self.filt_list[i]
64
+ dlambda = self.make_bins(self.filt_dict[filt][:, 0])[1]
65
+ filt_weights = dlambda*self.filt_dict[filt][:, 1]
66
+ self.eff_wavs[i] = np.sqrt(np.sum(filt_weights*self.filt_dict[filt][:, 0])
67
+ / np.sum(filt_weights
68
+ / self.filt_dict[filt][:, 0]))
69
+
70
+
71
+
72
+ def make_bins(self, midpoints, make_rhs=False):
73
+ """ A general function for turning an array of bin midpoints into an
74
+ array of bin left hand side positions and bin widths. Splits the
75
+ distance between bin midpoints equally in linear space.
76
+
77
+ Parameters
78
+ ----------
79
+
80
+ midpoints : numpy.ndarray
81
+ Array of bin midpoint positions
82
+
83
+ make_rhs : bool
84
+ Whether to add the position of the right hand side of the final
85
+ bin to bin_lhs, defaults to false.
86
+ """
87
+
88
+ bin_widths = np.zeros_like(midpoints)
89
+ if make_rhs:
90
+ bin_lhs = np.zeros(midpoints.shape[0]+1)
91
+ bin_lhs[0] = midpoints[0] - (midpoints[1]-midpoints[0])/2
92
+ bin_widths[-1] = (midpoints[-1] - midpoints[-2])
93
+ bin_lhs[-1] = midpoints[-1] + (midpoints[-1]-midpoints[-2])/2
94
+ bin_lhs[1:-1] = (midpoints[1:] + midpoints[:-1])/2
95
+ bin_widths[:-1] = bin_lhs[1:-1]-bin_lhs[:-2]
96
+
97
+ else:
98
+ bin_lhs = np.zeros_like(midpoints)
99
+ bin_lhs[0] = midpoints[0] - (midpoints[1]-midpoints[0])/2
100
+ bin_widths[-1] = (midpoints[-1] - midpoints[-2])
101
+ bin_lhs[1:] = (midpoints[1:] + midpoints[:-1])/2
102
+ bin_widths[:-1] = bin_lhs[1:]-bin_lhs[:-1]
103
+
104
+ return bin_lhs, bin_widths
105
+
106
+
107
+ def resample_filter_curves(self, wavelengths):
108
+ """ Resamples the filter curves onto a new set of wavelengths
109
+ and creates a 2D array of filter curves on this sampling. """
110
+
111
+ self.wavelengths = wavelengths # Wavelengths for new sampling
112
+
113
+ # Array containing filter profiles on new wavelength sampling
114
+ self.filt_array = np.zeros((wavelengths.shape[0], len(self.filt_list)))
115
+
116
+ # Array containing the width in wavelength space for each point
117
+ self.widths = self.make_bins(wavelengths)[1]
118
+
119
+ for i in range(len(self.filt_list)):
120
+ filt = self.filt_list[i]
121
+ self.filt_array[:, i] = np.interp(wavelengths,
122
+ self.filt_dict[filt][:, 0],
123
+ self.filt_dict[filt][:, 1],
124
+ left=0, right=0)
125
+
126
+ def get_photometry(self, spectrum, redshift, unit_conv=None):
127
+ """ Calculates photometric fluxes. The filters are first re-
128
+ sampled onto the same wavelength grid with transmission values
129
+ blueshifted by (1+z). This is followed by an integration over
130
+ the observed spectrum in the rest frame:
131
+
132
+ flux = integrate[(f_lambda*lambda*T(lambda*(1+z))*dlambda)]
133
+ norm = integrate[(lambda*T(lambda*(1+z))*dlambda))]
134
+ photometry = flux/norm
135
+
136
+ lambda: rest-frame wavelength array
137
+ f_lambda: observed spectrum
138
+ T(lambda*(1+z)): transmission of blueshifted filters
139
+ dlambda: width of each wavelength bin
140
+
141
+ The integrals over all filters are done in one array operation
142
+ to improve the speed of the code.
143
+ """
144
+
145
+ if self.wavelengths is None:
146
+ raise ValueError("Please use resample_filter_curves method to set"
147
+ + " wavelengths before calculating photometry.")
148
+
149
+ redshifted_wavs = self.wavelengths*(1. + redshift)
150
+
151
+ # Array containing blueshifted filter curves
152
+ filters_z = np.zeros_like(self.filt_array)
153
+
154
+ # blueshift filter curves to sample right bit of rest frame spec
155
+ for i in range(len(self.filt_list)):
156
+ filters_z[:, i] = np.interp(redshifted_wavs, self.wavelengths,
157
+ self.filt_array[:, i],
158
+ left=0, right=0)
159
+
160
+ # Calculate numerator of expression
161
+ flux = np.expand_dims(spectrum*self.widths*self.wavelengths, axis=1)
162
+ flux = np.sum(flux*filters_z, axis=0)
163
+
164
+ # Calculate denominator of expression
165
+ norm = filters_z*np.expand_dims(self.widths*self.wavelengths, axis=1)
166
+ norm = np.sum(norm, axis=0)
167
+
168
+ photometry = np.squeeze(flux/norm)
169
+
170
+ # This is a little dodgy as pointed out by Ivo, it should depend
171
+ # on the spectral shape however only currently used for UVJ mags
172
+ if unit_conv == "cgs_to_mujy":
173
+ photometry /= (10**-29*2.9979*10**18/self.eff_wavs**2)
174
+
175
+ return photometry
@@ -0,0 +1,21 @@
1
+ Metadata-Version: 2.4
2
+ Name: mimical
3
+ Version: 0.0.1
4
+ Summary: Sersic Fitting
5
+ Author: Struan Stevenson
6
+ Author-email: struan.stevenson@ed.ac.uk
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: numpy
9
+ Requires-Dist: astropy
10
+ Requires-Dist: matplotlib
11
+ Requires-Dist: nautilus-sampler
12
+ Requires-Dist: petrofit
13
+ Dynamic: author
14
+ Dynamic: author-email
15
+ Dynamic: description
16
+ Dynamic: description-content-type
17
+ Dynamic: requires-dist
18
+ Dynamic: summary
19
+
20
+ # Mimical (Modelling the Intensity of Multiply Imaged CelestiAl light)
21
+
@@ -0,0 +1,12 @@
1
+ mimical/__init__.py,sha256=jPAlI6xX4U07RG_3_BUtr34CN_jQmzuqgduf2QpCAhE,74
2
+ mimical/fitting/__init__.py,sha256=AVsW3xF_8s267G2EZAO7mqrr99Vdg8_AIIBwJkouvoY,68
3
+ mimical/fitting/fitter.py,sha256=QDI0piD7BIpXoP9vrdy5d-kHa8KCgYywfOyKvtAoW0E,7680
4
+ mimical/fitting/prior_handler.py,sha256=yXvxbohS6f94CMRyeIViFc83hRYAj82IZNA5mzhMSRw,4193
5
+ mimical/plotting/__init__.py,sha256=I1ZpQA48g-YPkqwrDGgTrqaTxDUgR8n0mbBX0MdbZtU,30
6
+ mimical/plotting/plotting.py,sha256=OWTR-uOT4UpBw0XcKpl_uFSODuEdcfw7nIk3Sf1cOGY,2646
7
+ mimical/utils/__init__.py,sha256=yolMgYFpvXoHuyV-ijGo26SQ0_niw0gz__iAFgdiQiY,35
8
+ mimical/utils/filter_set.py,sha256=EITLa2c3FG3n1-v7KatKRwHCRyfZ4TTjsi_WM7Bz64k,6819
9
+ mimical-0.0.1.dist-info/METADATA,sha256=wFUCBu6XeHIvu1Ty82NsUYdGoo0vvC_WhCQ8Z1jx6yI,512
10
+ mimical-0.0.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
11
+ mimical-0.0.1.dist-info/top_level.txt,sha256=z6HTYpsoNjLUFayXjn8WyjX8C1mIbZw8Arb334cUbwc,8
12
+ mimical-0.0.1.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1 @@
1
+ mimical