DiadFit 0.0.69__tar.gz → 0.0.70__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. {DiadFit-0.0.69 → DiadFit-0.0.70}/PKG-INFO +1 -1
  2. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/CO2_EOS.py +3 -0
  3. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/H2O_fitting.py +107 -95
  4. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/_version.py +1 -1
  5. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/densimeters.py +59 -6
  6. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/diads.py +7 -0
  7. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/ne_lines.py +9 -9
  8. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit.egg-info/PKG-INFO +1 -1
  9. {DiadFit-0.0.69 → DiadFit-0.0.70}/README.md +0 -0
  10. {DiadFit-0.0.69 → DiadFit-0.0.70}/setup.cfg +0 -0
  11. {DiadFit-0.0.69 → DiadFit-0.0.70}/setup.py +0 -0
  12. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/CO2_in_bubble_error.py +0 -0
  13. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/Highrho_polyfit_data.pkl +0 -0
  14. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/Lowrho_polyfit_data.pkl +0 -0
  15. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/Mediumrho_polyfit_data.pkl +0 -0
  16. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/Psensor.py +0 -0
  17. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/__init__.py +0 -0
  18. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/argon_lines.py +0 -0
  19. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/cosmicray_filter.py +0 -0
  20. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/densimeter_fitting.py +0 -0
  21. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/density_depth_crustal_profiles.py +0 -0
  22. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/error_propagation.py +0 -0
  23. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit/importing_data_files.py +0 -0
  24. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit.egg-info/SOURCES.txt +0 -0
  25. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit.egg-info/dependency_links.txt +0 -0
  26. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit.egg-info/requires.txt +0 -0
  27. {DiadFit-0.0.69 → DiadFit-0.0.70}/src/DiadFit.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: DiadFit
3
- Version: 0.0.69
3
+ Version: 0.0.70
4
4
  Summary: DiadFit
5
5
  Home-page: https://github.com/PennyWieser/DiadFit
6
6
  Author: Penny Wieser
@@ -430,6 +430,9 @@ def calculate_P_for_rho_T(CO2_dens_gcm3, T_K, EOS='SW96', Sample_ID=None):
430
430
  if Sample_ID is not None:
431
431
  df['Sample_ID']=Sample_ID
432
432
 
433
+ # Replace infinities with nan
434
+ df = df.replace([np.inf, -np.inf], np.nan)
435
+
433
436
  return df
434
437
 
435
438
  # Calculating P for a given density and Temperature using Coolprop
@@ -13,7 +13,7 @@ from numpy import trapz
13
13
  from scipy.integrate import simps
14
14
  ##
15
15
  def extract_xstal_MI_name(*, files, char_xstal, pos_xstal, char_MI, pos_MI,
16
- prefix=True, str_prefix=" ", file_type='.txt'):
16
+ prefix=True, str_prefix=" ", file_ext='.txt'):
17
17
 
18
18
  """ Extracts the names of the crystal and MI samples from a list of filenames
19
19
 
@@ -44,8 +44,8 @@ def extract_xstal_MI_name(*, files, char_xstal, pos_xstal, char_MI, pos_MI,
44
44
 
45
45
  file_simple=pf.extracting_filenames_generic(names=files,
46
46
  prefix=prefix, str_prefix=str_prefix,
47
- file_type=file_type)
48
-
47
+ file_ext=file_ext)
48
+
49
49
 
50
50
 
51
51
  xstal=np.empty(len(file_simple), dtype=object)
@@ -62,16 +62,16 @@ def extract_xstal_MI_name(*, files, char_xstal, pos_xstal, char_MI, pos_MI,
62
62
 
63
63
 
64
64
 
65
- def find_olivine_peak_trough_pos(smoothed_ol_y, x_new, height=1):
65
+ def find_host_peak_trough_pos(smoothed_host_y, x_new, height=1):
66
66
 
67
- """" This function identifies the peaks and troughs in the Olivine spectra
67
+ """" This function identifies the peaks and troughs in the host mineral spectra
68
68
 
69
69
  Parameters
70
70
  -----------
71
71
 
72
- path: smoothed_ol_y
73
- Olivine spectra y values after applying a cubic spline, and trimming to the spectra region around the peaks
74
- (from function smooth_and_trim_around_olivine)
72
+ path: smoothed_host_y
73
+ Host spectra y values after applying a cubic spline, and trimming to the spectra region around the peaks
74
+ (from function smooth_and_trim_around_host)
75
75
  x_new: X values corresponding to y values in smoothed_ol_y
76
76
 
77
77
  height: int
@@ -85,34 +85,34 @@ def find_olivine_peak_trough_pos(smoothed_ol_y, x_new, height=1):
85
85
 
86
86
  """
87
87
  # Find peaks with Scipy
88
- peaks_Ol = find_peaks(smoothed_ol_y, height)
89
- peak_height_Ol_unsort=peaks_Ol[1]['peak_heights']
90
- peak_pos_Ol_unsort = x_new[peaks_Ol[0]]
88
+ peaks_Host= find_peaks(smoothed_host_y, height)
89
+ peak_height_Host_unsort=peaks_Host[1]['peak_heights']
90
+ peak_pos_Host_unsort = x_new[peaks_Host[0]]
91
91
 
92
- df_peaks=pd.DataFrame(data={'pos': peak_pos_Ol_unsort,
93
- 'height': peak_height_Ol_unsort})
92
+ df_peaks=pd.DataFrame(data={'pos': peak_pos_Host_unsort,
93
+ 'height': peak_height_Host_unsort})
94
94
  df_peaks_sort=df_peaks.sort_values('height', axis=0, ascending=False)
95
95
  df_peak_sort_short1=df_peaks_sort[0:2]
96
96
  df_peak_sort_short=df_peak_sort_short1.sort_values('pos', axis=0, ascending=True)
97
- peak_pos_Ol=df_peak_sort_short['pos'].values
98
- peak_height_Ol=df_peak_sort_short['height'].values
97
+ peak_pos_Host=df_peak_sort_short['pos'].values
98
+ peak_height_Host=df_peak_sort_short['height'].values
99
99
 
100
100
 
101
101
  # Find troughs - e..g find minimum point +3 from the 1st peak, -3 units from the 2nd peak
102
- trim_y_cub_Ol=smoothed_ol_y[(x_new>(peak_pos_Ol[0]+3)) & (x_new<(peak_pos_Ol[1]-3))]
103
- trim_x=x_new[(x_new>(peak_pos_Ol[0]+3)) & (x_new<(peak_pos_Ol[1]-3))]
102
+ trim_y_cub_Host=smoothed_host_y[(x_new>(peak_pos_Host[0]+3)) & (x_new<(peak_pos_Host[1]-3))]
103
+ trim_x=x_new[(x_new>(peak_pos_Host[0]+3)) & (x_new<(peak_pos_Host[1]-3))]
104
104
 
105
105
 
106
- trough_y=np.min(trim_y_cub_Ol)
107
- trough_x=trim_x[trim_y_cub_Ol==trough_y]
106
+ trough_y=np.min(trim_y_cub_Host)
107
+ trough_x=trim_x[trim_y_cub_Host==trough_y]
108
108
 
109
109
 
110
- return peak_pos_Ol, peak_height_Ol, trough_y, trough_x
110
+ return peak_pos_Host, peak_height_Host, trough_y, trough_x
111
111
 
112
- def smooth_and_trim_around_olivine(filename=None, x_range=[800,900], x_max=900, Ol_spectra=None,
112
+ def smooth_and_trim_around_host(filename=None, x_range=[800,900], x_max=900, Host_spectra=None,
113
113
  MI_spectra=None, plot_figure=True):
114
114
  """
115
- Takes melt inclusion and olivine spectra, and trims into the region around the olivine peaks,
115
+ Takes melt inclusion and host spectra, and trims into the region around the host peaks,
116
116
  and fits a cubic spline (used for unmixing spectra)
117
117
 
118
118
  Parameters
@@ -120,8 +120,8 @@ def smooth_and_trim_around_olivine(filename=None, x_range=[800,900], x_max=900,
120
120
  x_range: list
121
121
  range of x coordinates to smooth between (e.g. [800, 900] by default
122
122
 
123
- Ol_spectra: nd.array
124
- numpy array of olivine spectra (x is wavenumber, y is intensity)
123
+ Host_spectra: nd.array
124
+ numpy array of host spectra (x is wavenumber, y is intensity)
125
125
 
126
126
  MI_spectra: nd.array
127
127
  numpy array of melt inclusion spectra (x is wavenumber, y is intensity)
@@ -135,19 +135,19 @@ def smooth_and_trim_around_olivine(filename=None, x_range=[800,900], x_max=900,
135
135
  -----------
136
136
  x_new: x coordinates of smoothed curves
137
137
  y_cub_MI: smoothed y coordinates using a cubic spline for MI
138
- y_cub_Ol: smoothed y coordinates using a cubic spline for Ol
138
+ y_cub_Host: smoothed y coordinates using a cubic spline for Ol
139
139
 
140
- peak_pos_Ol: x coordinates of 2 olivine peaks
141
- peak_height_Ol: y coordinates of 2 olivine peaks
140
+ peak_pos_Host: x coordinates of 2 host peaks
141
+ peak_height_Host: y coordinates of 2 host peaks
142
142
  trough_x: x coordinate of minimum point between peaks
143
143
  trough_y: y coordinate of minimum point between peaks
144
144
  """
145
145
  x_min=x_range[0]
146
146
  x_max=x_range[1]
147
147
  # Trim to region of interest
148
- Filt_Ol=Ol_spectra[~(
149
- (Ol_spectra[:, 0]<x_min) |
150
- (Ol_spectra[:, 0]>x_max)
148
+ Filt_Host=Host_spectra[~(
149
+ (Host_spectra[:, 0]<x_min) |
150
+ (Host_spectra[:, 0]>x_max)
151
151
  )]
152
152
  Filt_MI=MI_spectra[~(
153
153
  (MI_spectra[:, 0]<x_min) |
@@ -157,60 +157,60 @@ def smooth_and_trim_around_olivine(filename=None, x_range=[800,900], x_max=900,
157
157
  # Fit spline to data
158
158
 
159
159
  x_MI=Filt_MI[:, 0]
160
- x_Ol=Filt_Ol[:, 0]
160
+ x_Host=Filt_Host[:, 0]
161
161
 
162
162
  y_MI=Filt_MI[:, 1]
163
- y_Ol=Filt_Ol[:, 1]
163
+ y_Host=Filt_Host[:, 1]
164
164
 
165
165
 
166
166
  # Fit a cubic spline
167
167
  f2_MI = interp1d(x_MI, y_MI, kind='cubic')
168
- f2_Ol = interp1d(x_Ol, y_Ol, kind='cubic')
168
+ f2_Host = interp1d(x_Host, y_Host, kind='cubic')
169
169
 
170
- x_new=np.linspace(min(x_Ol),max(x_Ol), 100000)
170
+ x_new=np.linspace(min(x_Host),max(x_Host), 100000)
171
171
 
172
172
  y_cub_MI=f2_MI(x_new)
173
- y_cub_Ol=f2_Ol(x_new)
173
+ y_cub_Host=f2_Host(x_new)
174
174
 
175
175
  # Plot peaks and troughs on this to check they are right
176
- peak_pos_Ol, peak_height_Ol, trough_y, trough_x=find_olivine_peak_trough_pos(
177
- smoothed_ol_y=y_cub_Ol, x_new=x_new, height=1)
176
+ peak_pos_Host, peak_height_Host, trough_y, trough_x=find_host_peak_trough_pos(
177
+ smoothed_host_y=y_cub_Host, x_new=x_new, height=1)
178
178
 
179
179
 
180
180
  if plot_figure is True:
181
181
  fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(8,3.5))
182
182
  if filename is not None:
183
183
  fig.suptitle('file='+filename)
184
- ax1.plot(Ol_spectra[:, 0], Ol_spectra[:, 1], '-g', label='Ol Spectra')
184
+ ax1.plot(Host_spectra[:, 0], Host_spectra[:, 1], '-g', label='Host Spectra')
185
185
  ax1.plot(MI_spectra[:, 0], MI_spectra[:, 1], '-',
186
186
  color='salmon', label='MI Spectra')
187
187
 
188
188
  ax2.plot(Filt_MI[:, 0], Filt_MI[:, 1], '+', color='salmon')
189
- ax2.plot(Filt_Ol[:, 0], Filt_Ol[:, 1], '+g')
190
- ax2.plot(x_new, y_cub_MI, '-', color='salmon', label='MI Spectra')
191
- ax2.plot(x_new, y_cub_Ol, '-g', label='Ol Spectra')
192
- ax2.plot(peak_pos_Ol, peak_height_Ol, '*k',mfc='yellow', ms=10, label='Peaks')
189
+ ax2.plot(Filt_Host[:, 0], Filt_Host[:, 1], '+g')
190
+ ax2.plot(x_new, y_cub_MI, '-', color='salmon')
191
+ ax2.plot(x_new, y_cub_Host, '-g')
192
+ ax2.plot(peak_pos_Host, peak_height_Host, '*k',mfc='yellow', ms=10, label='Peaks')
193
193
  ax2.plot(trough_x, trough_y, 'dk', mfc='cyan', ms=10, label='Trough')
194
194
 
195
195
  ax1.set_xlabel('Wavenumber (cm$^{-1}$)')
196
196
  ax2.set_xlabel('Wavenumber (cm$^{-1}$)')
197
197
  ax1.set_ylabel('Intensity')
198
- ax2.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc='lower left',
199
- ncol=2, mode="expand", borderaxespad=0.)
198
+ ax1.legend(fontsize=8)
199
+ ax2.legend(fontsize=8)
200
200
 
201
- return x_new, y_cub_MI, y_cub_Ol, peak_pos_Ol, peak_height_Ol, trough_x, trough_y, fig
201
+ return x_new, y_cub_MI, y_cub_Host, peak_pos_Host, peak_height_Host, trough_x, trough_y, fig
202
202
  else:
203
- return x_new, y_cub_MI, y_cub_Ol, peak_pos_Ol, peak_height_Ol, trough_x, trough_y
203
+ return x_new, y_cub_MI, y_cub_Host, peak_pos_Host, peak_height_Host, trough_x, trough_y
204
204
 
205
205
 
206
206
 
207
- ## Unmix the olivine
207
+ ## Unmix the host
208
208
  def trough_or_peak_higher(spectra_x, spectra_y, peak_pos_x,
209
209
  trough_pos_x, trough_pos_y, av_width=1, plot=False,
210
210
  print_result=False):
211
211
  """
212
212
  This function assesses whether the line between the 2 peaks is above or below the trough position
213
- Called by a loop to select the optimum unmixing ratio for olivine and melt
213
+ Called by a loop to select the optimum unmixing ratio for host and melt
214
214
 
215
215
 
216
216
  Parameters
@@ -219,11 +219,11 @@ def trough_or_peak_higher(spectra_x, spectra_y, peak_pos_x,
219
219
 
220
220
  spectra_y: y coordinates of spectra to test
221
221
 
222
- peak_pos_x: x positions of 2 olivine peaks (from find_olivne_peak_trough_pos)
222
+ peak_pos_x: x positions of 2 host peaks (from find_host_peak_trough_pos)
223
223
 
224
- trough_pos_x: x position of trough (from find_olivne_peak_trough_pos)
224
+ trough_pos_x: x position of trough (from find_host_peak_trough_pos)
225
225
 
226
- trough_pos_y: y position of trough (from find_olivne_peak_trough_pos)
226
+ trough_pos_y: y position of trough (from find_host_peak_trough_pos)
227
227
 
228
228
  av_width: averages +- 1 width either side of the peak and troughs when doing assesment and regression
229
229
 
@@ -301,36 +301,36 @@ def trough_or_peak_higher(spectra_x, spectra_y, peak_pos_x,
301
301
 
302
302
 
303
303
  # Now lets mix up spectra
304
- def make_evaluate_mixed_spectra(*, path, filename, smoothed_Ol_y, smoothed_MI_y,
305
- Ol_spectra, MI_spectra, x_new, peak_pos_Ol,
304
+ def make_evaluate_mixed_spectra(*, path, filename, smoothed_host_y, smoothed_MI_y,
305
+ Host_spectra, MI_spectra, x_new, peak_pos_Host,
306
306
  trough_x, trough_y, N_steps=20, av_width=2,
307
307
  X_min=0, X_max=1, plot_figure=True, dpi=200):
308
308
 
309
309
  """
310
- This function unmixes glass and olivine spectra, and fits the best fit proportion where the olivine peak and trough disapears. Specifically, it calculates the mixed spectra by taking the measured MI spectra and subtracting X*Ol spectra, where X is the mixing proportions
310
+ This function unmixes glass and host spectra, and fits the best fit proportion where the host peak and trough disapears. Specifically, it calculates the mixed spectra by taking the measured MI spectra and subtracting X*Ol spectra, where X is the mixing proportions
311
311
 
312
312
  Parameters
313
313
  -----------
314
- smoothed_Ol_y: np.array
315
- y coordinates of olivine around peak region (from the function smooth_and_trim_around_olivine)
314
+ smoothed_host_y: np.array
315
+ y coordinates of host around peak region (from the function smooth_and_trim_around_host)
316
316
 
317
317
  smoothed_MI_y: np.array
318
- y coordinates of melt inclusion around peak region (from the function smooth_and_trim_around_olivine)
318
+ y coordinates of melt inclusion around peak region (from the function smooth_and_trim_around_host)
319
319
 
320
320
  x_new: np.array
321
- x coordinates from smoothed Ol and MI curves (from the function smooth_and_trim_around_olivine)
321
+ x coordinates from smoothed Ol and MI curves (from the function smooth_and_trim_around_host)
322
322
 
323
- Ol_Spectra: np.array
324
- Full olivine spectra, not trimmed or smoothed (from the function get_data)
323
+ Host_Spectra: np.array
324
+ Full host spectra, not trimmed or smoothed (from the function get_data)
325
325
 
326
326
  MI_Spectra: np.array
327
327
  Full MI spectra, not trimmed or smoothed (from the function get_data)
328
328
 
329
- peak_pos_Ol: list
330
- Peak positions (x) of Olivine peaks (from the function smooth_and_trim_around_olivine)
329
+ peak_pos_Host: list
330
+ Peak positions (x) of Olivine peaks (from the function smooth_and_trim_around_host)
331
331
 
332
332
  trough_x: float, int
333
- Peak position (x) of Olivine trough (from the function smooth_and_trim_around_olivine)
333
+ Peak position (x) of Olivine trough (from the function smooth_and_trim_around_host)
334
334
 
335
335
  x_min: float or int
336
336
  Minimum mixing proportion allowed
@@ -349,11 +349,11 @@ def make_evaluate_mixed_spectra(*, path, filename, smoothed_Ol_y, smoothed_MI_y,
349
349
  Returns:
350
350
  -----------
351
351
  MI_Mix_Best: np.array
352
- Spectra of best-fit unmixed spectra (e.g. where olivine peak and trough the smallest)
352
+ Spectra of best-fit unmixed spectra (e.g. where host peak and trough the smallest)
353
353
  ideal_mix: float
354
354
  Best fit mixing proportion (i.e. X)
355
355
  Dist: float
356
- Vertical distance between the olivine peak and trough (in intensity units)
356
+ Vertical distance between the host peak and trough (in intensity units)
357
357
  MI_Mix: np.array
358
358
  Umixed spectra for each of the N_steps
359
359
  X: np.array
@@ -376,11 +376,11 @@ def make_evaluate_mixed_spectra(*, path, filename, smoothed_Ol_y, smoothed_MI_y,
376
376
  # Geochemistry style mix
377
377
  #MI_Mix[i, :]=(smoothed_MI_y- smoothed_Ol_y*X[i])/(1-X[i])
378
378
  # True subtraction mix from Smith 2021
379
- MI_Mix[i, :]=smoothed_MI_y- smoothed_Ol_y*X[i]
379
+ MI_Mix[i, :]=smoothed_MI_y- smoothed_host_y*X[i]
380
380
 
381
381
  Dist[i]=trough_or_peak_higher(spectra_x=x_new,
382
382
  spectra_y=MI_Mix[i, :],
383
- peak_pos_x=peak_pos_Ol,
383
+ peak_pos_x=peak_pos_Host,
384
384
  trough_pos_x=trough_x,
385
385
  trough_pos_y=trough_y,
386
386
  av_width=2)
@@ -399,8 +399,8 @@ def make_evaluate_mixed_spectra(*, path, filename, smoothed_Ol_y, smoothed_MI_y,
399
399
  #print('best fit proportion')
400
400
  #print(ideal_mix)
401
401
 
402
- MI_Mix_Best_syn=(smoothed_MI_y-smoothed_Ol_y*ideal_mix)/(1-ideal_mix)
403
- MI_Mix_Best=(MI_spectra- Ol_spectra*ideal_mix)/(1-ideal_mix)
402
+ MI_Mix_Best_syn=(smoothed_MI_y-smoothed_host_y*ideal_mix)/(1-ideal_mix)
403
+ MI_Mix_Best=(MI_spectra- Host_spectra*ideal_mix)/(1-ideal_mix)
404
404
 
405
405
  if plot_figure is True:
406
406
 
@@ -408,8 +408,8 @@ def make_evaluate_mixed_spectra(*, path, filename, smoothed_Ol_y, smoothed_MI_y,
408
408
  fig.suptitle('file='+filename)
409
409
  for i in range(0, N_steps):
410
410
  ax1.plot(x_new, MI_Mix[i, :], '-k')
411
- ax1.plot([peak_pos_Ol[0], peak_pos_Ol[0]], [0.7, 1.5], '-', color='yellow')
412
- ax1.plot([peak_pos_Ol[1], peak_pos_Ol[1]], [0.7, 1.5], '-', color='yellow')
411
+ ax1.plot([peak_pos_Host[0], peak_pos_Host[0]], [0.7, 1.5], '-', color='yellow')
412
+ ax1.plot([peak_pos_Host[1], peak_pos_Host[1]], [0.7, 1.5], '-', color='yellow')
413
413
  ax1.plot([trough_x, trough_x], [0.7, 1.5], '-', color='cyan')
414
414
 
415
415
 
@@ -424,13 +424,13 @@ def make_evaluate_mixed_spectra(*, path, filename, smoothed_Ol_y, smoothed_MI_y,
424
424
 
425
425
  ax3.plot(MI_spectra[:, 0],MI_Mix_Best[:, 1], '-k')
426
426
  ax3.plot(MI_spectra[:, 0],MI_spectra[:, 1], '-', color='salmon')
427
- ax3.plot(Ol_spectra[:, 0],Ol_spectra[:, 1], '-', color='g')
427
+ ax3.plot(Host_spectra[:, 0],Host_spectra[:, 1], '-', color='g')
428
428
  ax3.set_xlim([775, 900])
429
429
 
430
430
 
431
431
  ax4.plot(MI_spectra[:, 0],MI_Mix_Best[:, 1], '-k', label='Umixed glass')
432
432
  ax4.plot(MI_spectra[:, 0],MI_spectra[:, 1], '-', color='salmon',label='Measured MI')
433
- ax4.plot(Ol_spectra[:, 0],Ol_spectra[:, 1], '-', color='g', label='Measured Ol')
433
+ ax4.plot(Host_spectra[:, 0],Host_spectra[:, 1], '-', color='g', label='Measured Host')
434
434
  ax4.legend()
435
435
  ax3.set_xlabel('Wavenumber (cm$^{-1}$')
436
436
  ax4.set_xlabel('Wavenumber (cm$^{-1}$')
@@ -445,17 +445,17 @@ def make_evaluate_mixed_spectra(*, path, filename, smoothed_Ol_y, smoothed_MI_y,
445
445
 
446
446
 
447
447
  file=filename
448
- fig.savefig(path3+'/'+'Ol_Glass_Umixing_{}.png'.format(filename), dpi=dpi)
448
+ fig.savefig(path3+'/'+'Host_Glass_Umixing_{}.png'.format(filename), dpi=dpi)
449
449
 
450
450
  return MI_Mix_Best, ideal_mix, Dist, MI_Mix, X
451
451
 
452
452
 
453
453
  ## Fitting silica and water peak areas
454
- def check_if_spectra_negative(*, path, filename, Spectra=None, peak_pos_Ol=None, tie_x_cord=2000,
454
+ def check_if_spectra_negative(*, path, filename, Spectra=None, peak_pos_Host=None, tie_x_cord=2000,
455
455
  override=False, flip=False, plot_figure=True, dpi=200):
456
456
  """
457
457
  This function checks if the unmixed specta is negative, based on two tie points.
458
- The first tie point is the mean y coordinate of the peak position of olivine +5 wavenumbers,
458
+ The first tie point is the mean y coordinate of the peak position of host +5 wavenumbers,
459
459
  and the second tie point (tie_x_cord) is an optional input. If the specta is inverte, this function inverts it.
460
460
 
461
461
 
@@ -464,11 +464,11 @@ override=False, flip=False, plot_figure=True, dpi=200):
464
464
  Spectra: np.array
465
465
  Spectra from the function make_evaluate_mixed_spectra
466
466
 
467
- peak_pos_Ol: list
468
- Olivine peak positions from the function find_olivine_peak_trough_pos
467
+ peak_pos_Host: list
468
+ Host peak positions from the function find_host_peak_trough_pos
469
469
 
470
470
  tie_x_cord: int or float
471
- X cooordinate to use as a tie point to ask whether the olivine peak's y coordinate is higher or lower than this.
471
+ X cooordinate to use as a tie point to ask whether the host peak's y coordinate is higher or lower than this.
472
472
 
473
473
  override: bool
474
474
  if False, function flips the spectra if its upsideown,
@@ -491,9 +491,9 @@ override=False, flip=False, plot_figure=True, dpi=200):
491
491
  tie_y_cord=Spectra[val, 1]
492
492
 
493
493
  mean_around_peak=np.nanmean(
494
- Spectra[:, 1][(Spectra[:, 0]>peak_pos_Ol[0])
494
+ Spectra[:, 1][(Spectra[:, 0]>peak_pos_Host[0])
495
495
  &
496
- (Spectra[:, 0]<peak_pos_Ol[0]+5)]
496
+ (Spectra[:, 0]<peak_pos_Host[0]+5)]
497
497
  )
498
498
 
499
499
 
@@ -513,7 +513,7 @@ override=False, flip=False, plot_figure=True, dpi=200):
513
513
  ax2.set_ylabel('Intensity')
514
514
  ax1.plot(x, y_init, '-r')
515
515
  ax1.plot(tie_x_cord, tie_y_cord, '*k', ms=10, label='tie_cord')
516
- ax1.plot(peak_pos_Ol[0], mean_around_peak, '*k', mfc='yellow', ms=15,label='Av Ol coordinate')
516
+ ax1.plot(peak_pos_Host[0], mean_around_peak, '*k', mfc='yellow', ms=15,label='Av host coordinate')
517
517
 
518
518
  if override is False:
519
519
  if mean_around_peak>tie_y_cord:
@@ -526,7 +526,7 @@ override=False, flip=False, plot_figure=True, dpi=200):
526
526
 
527
527
  ax2.plot(x, y, '-r')
528
528
  ax2.plot(tie_x_cord, tie_y_cord, '*k', ms=10, label='tie_cord')
529
- ax2.plot(peak_pos_Ol[0], mean_around_peak, '*k', mfc='yellow', ms=15, label='Av Ol coordinate')
529
+ ax2.plot(peak_pos_Host[0], mean_around_peak, '*k', mfc='yellow', ms=15, label='Av Ol coordinate')
530
530
  ax2.legend()
531
531
 
532
532
  else:
@@ -537,7 +537,7 @@ override=False, flip=False, plot_figure=True, dpi=200):
537
537
  if plot_figure is True:
538
538
  ax2.plot(x, y, '-r')
539
539
  ax2.plot(tie_x_cord, -tie_y_cord, '*k', ms=10, label='tie_cord')
540
- ax2.plot(peak_pos_Ol[0], -mean_around_peak, '*k', mfc='yellow', ms=15, label='Av Ol coordinate')
540
+ ax2.plot(peak_pos_Host[0], -mean_around_peak, '*k', mfc='yellow', ms=15, label='Av Ol coordinate')
541
541
 
542
542
  ax2.legend()
543
543
 
@@ -955,7 +955,8 @@ fit_sil='poly', dpi=200):
955
955
 
956
956
 
957
957
 
958
- df_sil=pd.DataFrame(data={'Silicate_LHS_Back1':lower_range_sil[0],
958
+ df_sil=pd.DataFrame(data={
959
+ 'Silicate_LHS_Back1':lower_range_sil[0],
959
960
  'Silicate_LHS_Back2':lower_range_sil[1],
960
961
  'Silicate_RHS_Back1':upper_range_sil[0],
961
962
  'Silicate_RHS_Back2':upper_range_sil[1],
@@ -1020,7 +1021,7 @@ def fit_area_for_water_region(*, path, filename, Spectra=None, config1: water_bc
1020
1021
  Configuration object for water peak and background positions. Default parameters stored in water_bck_pos, user can tweak.
1021
1022
  Parameters that need tweaking:
1022
1023
 
1023
- fit_water: str 'poly',
1024
+ fit_water: str 'poly',
1024
1025
  N_poly_water: str, degree of polynomial to fit to background
1025
1026
  lower_bck_water: [float, float], background position to left of water peak
1026
1027
  upper_bck_water: [float, float], background position to right of water peak
@@ -1204,7 +1205,8 @@ def fit_area_for_water_region(*, path, filename, Spectra=None, config1: water_bc
1204
1205
 
1205
1206
 
1206
1207
 
1207
- df_water=pd.DataFrame(data={'Water_LHS_Back1':lower_range_water[0],
1208
+ df_water=pd.DataFrame(data={'Water Filename': filename,
1209
+ 'Water_LHS_Back1':lower_range_water[0],
1208
1210
  'Water_LHS_Back2':lower_range_water[1],
1209
1211
  'Water_RHS_Back1':upper_range_water[0],
1210
1212
  'Water_RHS_Back2':upper_range_water[1],
@@ -1217,7 +1219,7 @@ def fit_area_for_water_region(*, path, filename, Spectra=None, config1: water_bc
1217
1219
  ## Stitching results together nicely for output.
1218
1220
 
1219
1221
 
1220
- def stitch_dataframes_together(df_sil, df_water, MI_file, Ol_file=None):
1222
+ def stitch_dataframes_together(df_sil, df_water, MI_file, Host_file=None, save_csv=False, path=False):
1221
1223
  """ This function stitches together results from the fit_area function for silicate and water peaks and returns a DataFrame with the combined results. The DataFrame includes peak areas and background positions for both silicate and water peaks, and adds columns for the ratios of water to silicate areas.
1222
1224
 
1223
1225
  Parameters
@@ -1232,7 +1234,7 @@ def stitch_dataframes_together(df_sil, df_water, MI_file, Ol_file=None):
1232
1234
  MI_file: str
1233
1235
  MI file name
1234
1236
 
1235
- Ol_file: str, optional
1237
+ Host_file: str, optional
1236
1238
  Olivine file name
1237
1239
 
1238
1240
 
@@ -1241,22 +1243,22 @@ def stitch_dataframes_together(df_sil, df_water, MI_file, Ol_file=None):
1241
1243
  pd.DataFrame
1242
1244
  DataFrame with columns for MI filename, HW:LW_Trapezoid, HW:LW_Simpson, Water_Trapezoid_Area,
1243
1245
  Water_Simpson_Area, Silicate_Trapezoid_Area, and Silicate_Simpson_Area.
1244
- If Ol_file is provided,
1245
- the DataFrame will also include a column for Olivine filename.
1246
+ If Host_file is provided,
1247
+ the DataFrame will also include a column for Host filename.
1246
1248
 
1247
1249
 
1248
1250
  """
1249
1251
  Combo_Area=pd.concat([df_sil, df_water], axis=1)
1250
- if Ol_file is not None:
1251
- Combo_Area.insert(0, 'Olivine filename', Ol_file)
1252
+ if Host_file is not None:
1253
+ Combo_Area.insert(0, 'Host filename', Host_file)
1252
1254
  Combo_Area.insert(1, 'MI filename', MI_file)
1253
1255
  Combo_Area.insert(2, 'HW:LW_Trapezoid',
1254
1256
  Combo_Area['Water_Trapezoid_Area']/Combo_Area['HW_Silicate_Trapezoid_Area'])
1255
1257
  Combo_Area.insert(3, 'HW:LW_Simpson',
1256
1258
  Combo_Area['Water_Simpson_Area']/Combo_Area['HW_Silicate_Simpson_Area'])
1257
1259
 
1258
- if Ol_file is not None:
1259
- cols_to_move=['Olivine filename', 'MI filename', 'HW:LW_Trapezoid', 'HW:LW_Simpson',
1260
+ if Host_file is not None:
1261
+ cols_to_move=['Host filename', 'MI filename', 'HW:LW_Trapezoid', 'HW:LW_Simpson',
1260
1262
  'Water_Trapezoid_Area', 'Water_Simpson_Area', 'Silicate_Trapezoid_Area', 'Silicate_Simpson_Area']
1261
1263
  else:
1262
1264
  cols_to_move=['MI filename', 'HW:LW_Trapezoid', 'HW:LW_Simpson',
@@ -1267,4 +1269,14 @@ def stitch_dataframes_together(df_sil, df_water, MI_file, Ol_file=None):
1267
1269
  Combo_Area = Combo_Area[cols_to_move + [
1268
1270
  col for col in Combo_Area.columns if col not in cols_to_move]]
1269
1271
 
1272
+ if save_csv is True:
1273
+ if path is False:
1274
+ raise TypeError('You need to enter a path to say where to save the CSV')
1275
+ filename_with_ext=Combo_Area['Water Filename'][0]
1276
+ filename, extension = os.path.splitext(filename_with_ext)
1277
+ filename = filename.split('.')[0]
1278
+ filename2=filename+ '_combo_fit.csv'
1279
+ full_path = os.path.join(path, filename2)
1280
+ Combo_Area.to_csv(full_path)
1281
+
1270
1282
  return Combo_Area
@@ -5,4 +5,4 @@
5
5
  # 1) we don't load dependencies by storing it in __init__.py
6
6
  # 2) we can import it in setup.py for the same reason
7
7
  # 3) we can import it into your module
8
- __version__ = '0.0.69'
8
+ __version__ = '0.0.70'
@@ -52,6 +52,7 @@ def calculate_density_cornell(*, temp='SupCrit', Split, split_err=None):
52
52
 
53
53
  df=pd.DataFrame(data={'Preferred D': 0,
54
54
  'in range': 'Y',
55
+ 'Corrected_Splitting': Split,
55
56
  'Notes': 'not in range',
56
57
  'LowD_RT': LowD_RT,
57
58
  'HighD_RT': HighD_RT,
@@ -66,6 +67,7 @@ def calculate_density_cornell(*, temp='SupCrit', Split, split_err=None):
66
67
  else:
67
68
  df=pd.DataFrame(data={'Preferred D': 0,
68
69
  'in range': 'Y',
70
+ 'Corrected_Splitting': Split,
69
71
  'Notes': 'not in range',
70
72
  'LowD_RT': LowD_RT,
71
73
  'HighD_RT': HighD_RT,
@@ -274,7 +276,7 @@ def calculate_Densimeter_std_err_values(*, pickle_str, corrected_split, correcte
274
276
 
275
277
 
276
278
  df=pd.DataFrame(data={
277
- str_d+'_Corrected_Splitting': new_x,
279
+
278
280
  str_d+'_Density': preferred_values,
279
281
  str_d + '_Density_σ': total_uncertainty,
280
282
  str_d+'_Density+1σ': preferred_values-total_uncertainty,
@@ -285,10 +287,9 @@ def calculate_Densimeter_std_err_values(*, pickle_str, corrected_split, correcte
285
287
  })
286
288
 
287
289
  return df
290
+ ## Function for if we dont have a densimeter yet
288
291
 
289
-
290
- ## UCBerkeley densimeters
291
- def calculate_density_ucb(*, df_combo, Ne_pickle_str='polyfit_data.pkl', temp='SupCrit', split_err=0, CI_split=0.67, CI_neon=0.67):
292
+ def calculate_errors_no_densimeter(*, df_combo, Ne_pickle_str='polyfit_data.pkl', temp='SupCrit', split_err=0, CI_split=0.67, CI_neon=0.67):
292
293
  """ This function converts Diad Splitting into CO$_2$ density using densimeters of UCB
293
294
 
294
295
  Parameters
@@ -320,12 +321,61 @@ def calculate_density_ucb(*, df_combo, Ne_pickle_str='polyfit_data.pkl', temp='
320
321
 
321
322
  # Lets calculate corrected splitting and the error on this.
322
323
  Split=df_combo['Splitting']*Ne_corr['preferred_values']
323
-
324
+ df_combo['Corrected_Splitting']=Split
324
325
  Split_err, pk_err=propagate_error_split_neon_peakfit(Ne_corr=Ne_corr, df_fits=df_combo)
325
326
  df_combo['Corrected_Splitting_σ']=Split_err
326
327
  df_combo['Corrected_Splitting_σ_Ne']=(Ne_corr['upper_values']*df_combo['Splitting']-Ne_corr['lower_values']*df_combo['Splitting'])/2
327
328
  df_combo['Corrected_Splitting_σ_peak_fit']=pk_err
328
329
 
330
+ cols_to_move = ['filename',
331
+ 'Corrected_Splitting', 'Corrected_Splitting_σ',
332
+ 'Corrected_Splitting_σ_Ne', 'Corrected_Splitting_σ_peak_fit']
333
+ df_merge = df_combo[cols_to_move + [
334
+ col for col in df_combo.columns if col not in cols_to_move]]
335
+
336
+ return df_combo
337
+
338
+
339
+ ## UCBerkeley densimeters
340
+ def calculate_density_ucb(*, df_combo, Ne_pickle_str='polyfit_data.pkl', temp='SupCrit', split_err=0, CI_split=0.67, CI_neon=0.67):
341
+ """ This function converts Diad Splitting into CO$_2$ density using densimeters of UCB
342
+
343
+ Parameters
344
+ -------------
345
+ df_combo_c: pandas DataFrame
346
+ data frame of peak fitting information
347
+
348
+ Ne_corr: pandas DataFrame
349
+ dataframe of Ne correction factors
350
+
351
+ temp: str
352
+ 'SupCrit' if measurements done at 37C
353
+ 'RoomT' if measurements done at 24C - Not supported at Berkeley.
354
+
355
+ Split: int, float, pd.Series, np.array
356
+
357
+ Returns
358
+ --------------
359
+ pd.DataFrame
360
+ Prefered Density (based on different equatoins being merged), and intermediate calculations
361
+
362
+
363
+
364
+
365
+ """
366
+ df_combo_c=df_combo.copy()
367
+ time=df_combo_c['sec since midnight']
368
+ Ne_corr=calculate_Ne_corr_std_err_values(pickle_str=Ne_pickle_str,
369
+ new_x=time, CI=CI_neon)
370
+
371
+ # Lets calculate corrected splitting and the error on this.
372
+ Split=df_combo_c['Splitting']*Ne_corr['preferred_values']
373
+
374
+ Split_err, pk_err=propagate_error_split_neon_peakfit(Ne_corr=Ne_corr, df_fits=df_combo_c)
375
+ df_combo_c['Corrected_Splitting_σ']=Split_err
376
+ df_combo_c['Corrected_Splitting_σ_Ne']=(Ne_corr['upper_values']*df_combo_c['Splitting']-Ne_corr['lower_values']*df_combo_c['Splitting'])/2
377
+ df_combo_c['Corrected_Splitting_σ_peak_fit']=pk_err
378
+
329
379
  if temp=='RoomT':
330
380
  raise TypeError('Sorry, no UC Berkeley calibration at 24C, please enter temp=SupCrit')
331
381
  if isinstance(Split, float) or isinstance(Split, int):
@@ -363,6 +413,9 @@ def calculate_density_ucb(*, df_combo, Ne_pickle_str='polyfit_data.pkl', temp='
363
413
  MedD_SC = pd.Series(medrho_model(Split), index=Split.index)
364
414
  HighD_SC = pd.Series(highrho_model(Split), index=Split.index)
365
415
 
416
+ print('testing')
417
+ #print(df_combo_c['Corrected_Splitting'])
418
+
366
419
 
367
420
 
368
421
 
@@ -493,7 +546,7 @@ def calculate_density_ucb(*, df_combo, Ne_pickle_str='polyfit_data.pkl', temp='
493
546
  df.loc[SupCrit&Upper_Cal_SC, 'in range']='N'
494
547
 
495
548
 
496
- df_merge1=pd.concat([df_combo, Ne_corr], axis=1).reset_index(drop=True)
549
+ df_merge1=pd.concat([df_combo_c, Ne_corr], axis=1).reset_index(drop=True)
497
550
  df_merge=pd.concat([df, df_merge1], axis=1).reset_index(drop=True)
498
551
 
499
552
  df_merge = df_merge.rename(columns={'Preferred D': 'Density g/cm3'})
@@ -1463,9 +1463,16 @@ def fit_gaussian_voigt_generic_diad(config1, *, diad1=False, diad2=False, path=N
1463
1463
  """
1464
1464
 
1465
1465
 
1466
+
1466
1467
  # Calculate the amplitude from the sigma and the prominence
1467
1468
  calc_diad_amplitude=((config1.diad_sigma)*(config1.diad_prom))/0.3939
1468
1469
  calc_HB_amplitude=((config1.diad_sigma)*(config1.HB_prom))/0.3939
1470
+
1471
+ # If ask for Gaussian, but fit peaks is 1, remove gaussian
1472
+ if fit_peaks==1 and config1.fit_gauss is True:
1473
+ config1.fit_gauss=False
1474
+
1475
+
1469
1476
  if diad2 is True and fit_peaks==3:
1470
1477
  calc_C13_amplitude=(0.5*(config1.diad_sigma)*(config1.C13_prom))/0.3939
1471
1478
  # Gets overridden if you have triggered any of the warnings
@@ -661,8 +661,8 @@ const_params=True, spec_res=0.4) :
661
661
  min_off=0.8
662
662
  max_off=1.2
663
663
  if const_params is False:
664
- min_off=0
665
- max_off=100
664
+ min_off=0.3
665
+ max_off=3
666
666
 
667
667
  # Flatten x and y if needed
668
668
  xdat=x.flatten()
@@ -691,7 +691,7 @@ const_params=True, spec_res=0.4) :
691
691
  model0 = VoigtModel(prefix='p0_')#+ ConstantModel(prefix='c0')
692
692
 
693
693
  pars0 = model0.make_params()
694
- pars0['p0_center'].set(Ne_center)
694
+ pars0['p0_center'].set(Ne_center, min=Ne_center-2*spec_res, max=Ne_center+2*spec_res)
695
695
  pars0['p0_amplitude'].set(amplitude)
696
696
 
697
697
 
@@ -727,7 +727,7 @@ const_params=True, spec_res=0.4) :
727
727
  model1 = VoigtModel(prefix='p1_')#+ ConstantModel(prefix='c0')
728
728
  pars1 = model1.make_params()
729
729
  pars1['p1_'+ 'amplitude'].set(Amp_p0, min=min_off*Amp_p0, max=max_off*Amp_p0)
730
- pars1['p1_'+ 'center'].set(Center_p0, min=Center_p0-0.2, max=Center_p0+0.2)
730
+ pars1['p1_'+ 'center'].set(Center_p0, min=Center_p0-spec_res/2, max=Center_p0+spec_res/2)
731
731
  pars1['p1_'+ 'sigma'].set(pk1_sigma, min=pk1_sigma*min_off, max=pk1_sigma*max_off)
732
732
 
733
733
 
@@ -881,7 +881,7 @@ const_params=True, spec_res=0.4) :
881
881
 
882
882
 
883
883
  def fit_pk2(x, y_corr, x_span=[-5, 5], Ne_center=1447.5, amplitude=1000, pk2_sigma=0.4,
884
- model_name='PseudoVoigtModel', print_report=False, const_params=True) :
884
+ model_name='PseudoVoigtModel', print_report=False, const_params=True, spec_res=0.4) :
885
885
  """ This function fits the 1447 Ne line as a single Voigt
886
886
 
887
887
  Parameters
@@ -1191,7 +1191,7 @@ plot_figure=True, loop=True,
1191
1191
 
1192
1192
 
1193
1193
  # Fit the 1447 peak
1194
- cent_pk2,Area_pk2, sigma_pk2, gamma_pk2, Ne_pk2_reg_x_plot, Ne_pk2_reg_y_plot, Ne_pk2_reg_x, Ne_pk2_reg_y, xx_pk2, result_pk2, error_pk2, result_pk2_origx, Peak2_Prop_Lor = fit_pk2( x_pk2, y_corr_pk2, x_span=x_span_pk2, Ne_center=Ne_center_2, model_name=config.model_name, amplitude=Pk2_Amp, pk2_sigma=config.pk2_sigma, const_params=const_params)
1194
+ cent_pk2,Area_pk2, sigma_pk2, gamma_pk2, Ne_pk2_reg_x_plot, Ne_pk2_reg_y_plot, Ne_pk2_reg_x, Ne_pk2_reg_y, xx_pk2, result_pk2, error_pk2, result_pk2_origx, Peak2_Prop_Lor = fit_pk2( x_pk2, y_corr_pk2, x_span=x_span_pk2, Ne_center=Ne_center_2, model_name=config.model_name, amplitude=Pk2_Amp, pk2_sigma=config.pk2_sigma, const_params=const_params,spec_res=spec_res)
1195
1195
 
1196
1196
 
1197
1197
  # Calculate difference between peak centers, and Delta Ne
@@ -1504,7 +1504,7 @@ def plot_Ne_corrections(df=None, x_axis=None, x_label='index', marker='o', mec='
1504
1504
  ## Looping Ne lines
1505
1505
  def loop_Ne_lines(*, files, spectra_path, filetype,
1506
1506
  config, config_ID_peaks, df_fit_params=None, prefix=False, print_df=False,
1507
- plot_figure=True, single_acq=False):
1507
+ plot_figure=True, single_acq=False, const_params=True):
1508
1508
 
1509
1509
  df = pd.DataFrame([])
1510
1510
  # This is for repeated acquisition of Ne lines
@@ -1525,7 +1525,7 @@ def loop_Ne_lines(*, files, spectra_path, filetype,
1525
1525
  Ne_center_2=df_fit_params['Peak2_cent'].iloc[0],
1526
1526
  Ne_prom_1=df_fit_params['Peak1_prom'].iloc[0],
1527
1527
  Ne_prom_2=df_fit_params['Peak2_prom'].iloc[0],
1528
- const_params=False,
1528
+ const_params=const_params,
1529
1529
  plot_figure=plot_figure)
1530
1530
  df = pd.concat([df, data], axis=0)
1531
1531
 
@@ -1544,7 +1544,7 @@ def loop_Ne_lines(*, files, spectra_path, filetype,
1544
1544
  Ne_center_2=df_fit_params['Peak2_cent'].iloc[0],
1545
1545
  Ne_prom_1=df_fit_params['Peak1_prom'].iloc[0],
1546
1546
  Ne_prom_2=df_fit_params['Peak2_prom'].iloc[0],
1547
- const_params=False,
1547
+ const_params=const_params,
1548
1548
  plot_figure=plot_figure)
1549
1549
  df = pd.concat([df, data], axis=0)
1550
1550
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: DiadFit
3
- Version: 0.0.69
3
+ Version: 0.0.70
4
4
  Summary: DiadFit
5
5
  Home-page: https://github.com/PennyWieser/DiadFit
6
6
  Author: Penny Wieser
File without changes
File without changes
File without changes