PYME-extra 1.0.4.post0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. PYMEcs/Acquire/Actions/__init__.py +0 -0
  2. PYMEcs/Acquire/Actions/custom.py +167 -0
  3. PYMEcs/Acquire/Hardware/LPthreadedSimple.py +248 -0
  4. PYMEcs/Acquire/Hardware/LPthreadedSimpleSim.py +246 -0
  5. PYMEcs/Acquire/Hardware/NikonTiFlaskServer.py +45 -0
  6. PYMEcs/Acquire/Hardware/NikonTiFlaskServerT.py +59 -0
  7. PYMEcs/Acquire/Hardware/NikonTiRESTClient.py +73 -0
  8. PYMEcs/Acquire/Hardware/NikonTiSim.py +35 -0
  9. PYMEcs/Acquire/Hardware/__init__.py +0 -0
  10. PYMEcs/Acquire/Hardware/driftTrackGUI.py +329 -0
  11. PYMEcs/Acquire/Hardware/driftTrackGUI_n.py +472 -0
  12. PYMEcs/Acquire/Hardware/driftTracking.py +424 -0
  13. PYMEcs/Acquire/Hardware/driftTracking_n.py +433 -0
  14. PYMEcs/Acquire/Hardware/fakeCamX.py +15 -0
  15. PYMEcs/Acquire/Hardware/offsetPiezoRESTCorrelLog.py +38 -0
  16. PYMEcs/Acquire/__init__.py +0 -0
  17. PYMEcs/Analysis/MBMcollection.py +552 -0
  18. PYMEcs/Analysis/MINFLUX.py +280 -0
  19. PYMEcs/Analysis/MapUtils.py +77 -0
  20. PYMEcs/Analysis/NPC.py +1176 -0
  21. PYMEcs/Analysis/Paraflux.py +218 -0
  22. PYMEcs/Analysis/Simpler.py +81 -0
  23. PYMEcs/Analysis/Sofi.py +140 -0
  24. PYMEcs/Analysis/__init__.py +0 -0
  25. PYMEcs/Analysis/decSofi.py +211 -0
  26. PYMEcs/Analysis/eventProperties.py +50 -0
  27. PYMEcs/Analysis/fitDarkTimes.py +569 -0
  28. PYMEcs/Analysis/objectVolumes.py +20 -0
  29. PYMEcs/Analysis/offlineTracker.py +130 -0
  30. PYMEcs/Analysis/stackTracker.py +180 -0
  31. PYMEcs/Analysis/timeSeries.py +63 -0
  32. PYMEcs/Analysis/trackFiducials.py +186 -0
  33. PYMEcs/Analysis/zerocross.py +91 -0
  34. PYMEcs/IO/MINFLUX.py +851 -0
  35. PYMEcs/IO/NPC.py +117 -0
  36. PYMEcs/IO/__init__.py +0 -0
  37. PYMEcs/IO/darkTimes.py +19 -0
  38. PYMEcs/IO/picasso.py +219 -0
  39. PYMEcs/IO/tabular.py +11 -0
  40. PYMEcs/__init__.py +0 -0
  41. PYMEcs/experimental/CalcZfactor.py +51 -0
  42. PYMEcs/experimental/FRC.py +338 -0
  43. PYMEcs/experimental/ImageJROItools.py +49 -0
  44. PYMEcs/experimental/MINFLUX.py +1537 -0
  45. PYMEcs/experimental/NPCcalcLM.py +560 -0
  46. PYMEcs/experimental/Simpler.py +369 -0
  47. PYMEcs/experimental/Sofi.py +78 -0
  48. PYMEcs/experimental/__init__.py +0 -0
  49. PYMEcs/experimental/binEventProperty.py +187 -0
  50. PYMEcs/experimental/chaining.py +23 -0
  51. PYMEcs/experimental/clusterTrack.py +179 -0
  52. PYMEcs/experimental/combine_maps.py +104 -0
  53. PYMEcs/experimental/eventProcessing.py +93 -0
  54. PYMEcs/experimental/fiducials.py +323 -0
  55. PYMEcs/experimental/fiducialsNew.py +402 -0
  56. PYMEcs/experimental/mapTools.py +271 -0
  57. PYMEcs/experimental/meas2DplotDh5view.py +107 -0
  58. PYMEcs/experimental/mortensen.py +131 -0
  59. PYMEcs/experimental/ncsDenoise.py +158 -0
  60. PYMEcs/experimental/onTimes.py +295 -0
  61. PYMEcs/experimental/procPoints.py +77 -0
  62. PYMEcs/experimental/pyme2caml.py +73 -0
  63. PYMEcs/experimental/qPAINT.py +965 -0
  64. PYMEcs/experimental/randMap.py +188 -0
  65. PYMEcs/experimental/regExtraCmaps.py +11 -0
  66. PYMEcs/experimental/selectROIfilterTable.py +72 -0
  67. PYMEcs/experimental/showErrs.py +51 -0
  68. PYMEcs/experimental/showErrsDh5view.py +58 -0
  69. PYMEcs/experimental/showShiftMap.py +56 -0
  70. PYMEcs/experimental/snrEvents.py +188 -0
  71. PYMEcs/experimental/specLabeling.py +51 -0
  72. PYMEcs/experimental/splitRender.py +246 -0
  73. PYMEcs/experimental/testChannelByName.py +36 -0
  74. PYMEcs/experimental/timedSpecies.py +28 -0
  75. PYMEcs/experimental/utils.py +31 -0
  76. PYMEcs/misc/ExtraCmaps.py +177 -0
  77. PYMEcs/misc/__init__.py +0 -0
  78. PYMEcs/misc/configUtils.py +169 -0
  79. PYMEcs/misc/guiMsgBoxes.py +27 -0
  80. PYMEcs/misc/mapUtils.py +230 -0
  81. PYMEcs/misc/matplotlib.py +136 -0
  82. PYMEcs/misc/rectsFromSVG.py +182 -0
  83. PYMEcs/misc/shellutils.py +1110 -0
  84. PYMEcs/misc/utils.py +205 -0
  85. PYMEcs/misc/versionCheck.py +20 -0
  86. PYMEcs/misc/zcInfo.py +90 -0
  87. PYMEcs/pyme_warnings.py +4 -0
  88. PYMEcs/recipes/__init__.py +0 -0
  89. PYMEcs/recipes/base.py +75 -0
  90. PYMEcs/recipes/localisations.py +2380 -0
  91. PYMEcs/recipes/manipulate_yaml.py +83 -0
  92. PYMEcs/recipes/output.py +177 -0
  93. PYMEcs/recipes/processing.py +247 -0
  94. PYMEcs/recipes/simpler.py +290 -0
  95. PYMEcs/version.py +2 -0
  96. pyme_extra-1.0.4.post0.dist-info/METADATA +114 -0
  97. pyme_extra-1.0.4.post0.dist-info/RECORD +101 -0
  98. pyme_extra-1.0.4.post0.dist-info/WHEEL +5 -0
  99. pyme_extra-1.0.4.post0.dist-info/entry_points.txt +3 -0
  100. pyme_extra-1.0.4.post0.dist-info/licenses/LICENSE +674 -0
  101. pyme_extra-1.0.4.post0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,280 @@
1
+ from scipy.stats import binned_statistic
2
+ from PYMEcs.IO.MINFLUX import get_stddev_property
3
+ import numpy as np
4
+ import matplotlib.pyplot as plt
5
+ from PYMEcs.pyme_warnings import warn
6
+
7
+ import pandas as pd
8
+ from PYMEcs.misc.matplotlib import boxswarmplot, violinswarmplot
9
+
10
+ def plot_stats(ds,ax,errdict,sdmax=None,swarmsize=3,siteKey='siteID',mode='box',showpoints=True,strip=False):
11
+ df = site_stats(ds,errdict,siteKey=siteKey)
12
+ if mode == 'box':
13
+ kwargs = dict(swarmsize=swarmsize,width=0.2,annotate_means=True,annotate_medians=True,swarmalpha=0.4,
14
+ showpoints=showpoints,strip=strip)
15
+ boxswarmplot(df,ax=ax,**kwargs)
16
+ elif mode == 'violin':
17
+ kwargs = dict(swarmsize=swarmsize,width=0.8,annotate_means=True,annotate_medians=True,swarmalpha=0.4,
18
+ linecolor="0.4",linewidth=1.0,annotate_width=0.4,showpoints=showpoints,strip=strip)
19
+ violinswarmplot(df,ax=ax,**kwargs)
20
+ else:
21
+ raise RuntimeError("invalid mode, should be on eof 'box' or 'violin', is '%s'" % mode)
22
+ ax.set_ylim(0,sdmax)
23
+ ax.set_ylabel('precision [nm]')
24
+ return df
25
+
26
+ def site_stats(ds,sitedict,siteKey='siteID'):
27
+ uids, idx = np.unique(ds[siteKey],return_index=True)
28
+ sitestats = {}
29
+ for key in sitedict:
30
+ prop = sitedict[key]
31
+ sitestats[key] = ds[prop][idx]
32
+ df = pd.DataFrame.from_dict(sitestats)
33
+ return df[df > 0].dropna() # this drops rows with zeroes; these should not occur but apparently do; probably a bug somewhere
34
+
35
+ def plotsitestats(p,origamiErrorLimit=10,figsize=None,swarmsize=3,siteKey='siteID',fignum=None,mode='box',showpoints=True,strip=False):
36
+ uids = np.unique(p[siteKey])
37
+ fig, axs = plt.subplots(2, 2, figsize=figsize,num=fignum)
38
+ plot_stats(p,axs[0, 0],dict(xd_sd_corr='error_x',x_sd='error_x_nc',x_sd_trace='error_x_ori'),
39
+ sdmax=origamiErrorLimit,swarmsize=swarmsize,siteKey=siteKey,mode=mode,showpoints=showpoints,strip=strip)
40
+ plot_stats(p,axs[0, 1],dict(yd_sd_corr='error_y',y_sd='error_y_nc',y_sd_trace='error_y_ori'),
41
+ sdmax=origamiErrorLimit,swarmsize=swarmsize,siteKey=siteKey,mode=mode,showpoints=showpoints,strip=strip)
42
+ if p.mdh.get('MINFLUX.Is3D'):
43
+ plot_stats(p,axs[1, 0],dict(zd_sd_corr='error_z',z_sd='error_z_nc',z_sd_trace='error_z_ori'),
44
+ sdmax=origamiErrorLimit,swarmsize=swarmsize,siteKey=siteKey,mode=mode,showpoints=showpoints,strip=strip)
45
+
46
+ all_axes = dict(xd_sd_corr='error_x',yd_sd_corr='error_y')
47
+ if p.mdh.get('MINFLUX.Is3D'):
48
+ all_axes['zd_sd_corr'] = 'error_z'
49
+ df_allaxes = plot_stats(p,axs[1, 1],all_axes,sdmax=origamiErrorLimit,swarmsize=swarmsize,
50
+ siteKey=siteKey,mode=mode,showpoints=showpoints,strip=strip)
51
+ fig.suptitle('Site stats for %d sites' % df_allaxes.shape[0])
52
+ plt.tight_layout()
53
+ return df_allaxes
54
+
55
+ def propcheck_density_stats(ds,warning=True):
56
+ for prop in ['clst_area','clst_vol','clst_density','clst_stdz']:
57
+ if prop not in ds.keys():
58
+ if warning:
59
+ warn("required property %s not in data source" % prop)
60
+ return False
61
+ return True
62
+
63
+ def density_stats(ds,objectID='dbscanClumpID'):
64
+ uids, idx = np.unique(ds[objectID],return_index=True)
65
+ area = ds['clst_area'][idx]
66
+ vol = ds['clst_vol'][idx]
67
+ dens = ds['clst_density'][idx]
68
+ sz = ds['clst_stdz'][idx]
69
+
70
+ return area, vol, dens, sz
71
+
72
+ def plot_density_stats(ds,objectID='dbscanClumpID',scatter=False):
73
+ if not propcheck_density_stats(ds):
74
+ return
75
+ area, vol, dens, sz = density_stats(ds,objectID=objectID)
76
+ fig, (ax0,ax1) = plt.subplots(2,2)
77
+ if not scatter:
78
+ ax0[0].boxplot(dens,labels=['Density'])
79
+ ax0[1].boxplot(area,labels=['Area'])
80
+ ax1[0].boxplot(vol/1e6,labels=['Volume'])
81
+ ax1[1].boxplot(sz,labels=['Stddev Z'])
82
+ else:
83
+ bp_dict = ax0[0].scattered_boxplot(dens,labels=['Density'],showmeans=True)
84
+ for line in bp_dict['means']:
85
+ # get position data for median line
86
+ x, y = line.get_xydata()[0] # top of median line
87
+ # overlay median value
88
+ ax0[0].text(x-0.25, 1.05*y, '%.0f' % y,
89
+ horizontalalignment='center') # draw above, centered
90
+ for line in bp_dict['medians']:
91
+ # get position data for median line
92
+ x, y = line.get_xydata()[0] # top of median line
93
+ # overlay median value
94
+ ax0[0].text(x-0.25, 0.95*y, '%.0f' % y,
95
+ horizontalalignment='center',
96
+ verticalalignment='center') # draw above, centered
97
+ ax0[1].scattered_boxplot(area,labels=['Area'],showmeans=True)
98
+ ax1[0].scattered_boxplot(vol/1e6,labels=['Volume'],showmeans=True)
99
+ ax1[1].scattered_boxplot(sz,labels=['Stddev Z'],showmeans=True)
100
+ plt.tight_layout()
101
+
102
+ def plot_density_stats_sns(ds,objectID='dbscanClumpID'):
103
+ if not propcheck_density_stats(ds):
104
+ return
105
+ area, vol, dens, sz = density_stats(ds,objectID=objectID)
106
+ dfdens = pd.DataFrame.from_dict(dict(density=dens))
107
+ dfarea = pd.DataFrame.from_dict(dict(area=area))
108
+ dfvol = pd.DataFrame.from_dict(dict(volume=vol/1e6))
109
+ dfstdz = pd.DataFrame.from_dict(dict(std_z=sz))
110
+
111
+ fig, (ax0,ax1) = plt.subplots(2,2)
112
+ kwargs = dict(swarmsize=5,width=0.2,annotate_means=True,annotate_medians=True,swarmalpha=0.4)
113
+ boxswarmplot(dfdens,ax=ax0[0],format="%.0f",**kwargs)
114
+ ax0[0].set_ylim(0,1.2*dens.max())
115
+ ax0[0].set_ylabel("#/um^2")
116
+ boxswarmplot(dfarea,ax=ax0[1],format="%.0f",**kwargs)
117
+ ax0[1].set_ylabel("nm^2")
118
+ boxswarmplot(dfvol,ax=ax1[0],format="%.2f",**kwargs)
119
+ ax1[0].set_ylabel("10^-3 um^3")
120
+ boxswarmplot(dfstdz,ax=ax1[1],format="%.1f",**kwargs)
121
+ ax1[1].set_ylabel("nm")
122
+ fig.suptitle('Density stats for %d clusters' % dens.size)
123
+ plt.tight_layout()
124
+
125
+ return dens
126
+
127
+ def plot_stats_minflux(deltas, durations, tdintrace, efo_or_dtovertime, times,
128
+ showTimeAverages=False, dsKey=None, areaString=None):
129
+ from scipy.stats import iqr
130
+
131
+ fig, (ax1, ax2) = plt.subplots(2, 2)
132
+ dtmedian = np.median(deltas)
133
+ dtmean = np.mean(deltas)
134
+ dtiqr = iqr(deltas,rng=(10, 90)) # we are going for the 10 to 90 % range
135
+ h = ax1[0].hist(deltas,bins=40,range=(0,dtmean + 2*dtiqr))
136
+ ax1[0].plot([dtmedian,dtmedian],[0,h[0].max()])
137
+ # this is time between one dye molecule and the next dye molecule being seen
138
+ ax1[0].set_xlabel('time between traces (TBT) [s]')
139
+ ax1[0].text(0.95, 0.8, 'median %.2f s' % dtmedian, horizontalalignment='right',
140
+ verticalalignment='bottom', transform=ax1[0].transAxes)
141
+ ax1[0].text(0.95, 0.7, ' mean %.2f s' % dtmean, horizontalalignment='right',
142
+ verticalalignment='bottom', transform=ax1[0].transAxes)
143
+ if not areaString is None:
144
+ ax1[0].text(0.95, 0.6, areaString, horizontalalignment='right',
145
+ verticalalignment='bottom', transform=ax1[0].transAxes)
146
+
147
+ durmedian = np.median(durations)
148
+ durmean = np.mean(durations)
149
+ duriqr = iqr(durations,rng=(10, 90))
150
+ h = ax1[1].hist(durations,bins=40,range=(0,durmean + 2*duriqr))
151
+ ax1[1].plot([durmedian,durmedian],[0,h[0].max()])
152
+ ax1[1].set_xlabel('duration of "traces" [s]')
153
+ ax1[1].text(0.95, 0.8, 'median %.0f ms' % (1e3*durmedian), horizontalalignment='right',
154
+ verticalalignment='bottom', transform=ax1[1].transAxes)
155
+ ax1[1].text(0.95, 0.7, ' mean %.0f ms' % (1e3*durmean), horizontalalignment='right',
156
+ verticalalignment='bottom', transform=ax1[1].transAxes)
157
+ # ax1[1].set_xlim(0,durmean + 2*duriqr) # superfluous since we are using the range keyword in hist
158
+
159
+ tdintrace_ms = 1e3*tdintrace
160
+ tdmedian = np.median(tdintrace_ms)
161
+ tdmean = np.mean(tdintrace_ms)
162
+ tdiqr = iqr(tdintrace_ms,rng=(10, 90))
163
+ h = ax2[0].hist(tdintrace_ms,bins=50,range=(0,tdmean + 2*tdiqr))
164
+ ax2[0].plot([tdmedian,tdmedian],[0,h[0].max()])
165
+ # these are times between repeated localisations of the same dye molecule
166
+ ax2[0].set_xlabel('time between localisations in same trace [ms]')
167
+ ax2[0].text(0.95, 0.8, 'median %.0f ms' % (tdmedian), horizontalalignment='right',
168
+ verticalalignment='bottom', transform=ax2[0].transAxes)
169
+ ax2[0].text(0.95, 0.7, ' mean %.0f ms' % (tdmean), horizontalalignment='right',
170
+ verticalalignment='bottom', transform=ax2[0].transAxes)
171
+ # ax2[0].set_xlim(0,tdmean + 2*tdiqr) # superfluous since we are using the range keyword in hist
172
+
173
+ if showTimeAverages:
174
+ ax2[1].plot(times,efo_or_dtovertime)
175
+ ax2[1].set_xlabel('TBT running time average [s]')
176
+ ax2[1].set_ylim([0, None])
177
+ else:
178
+ h = ax2[1].hist(1e-3*efo_or_dtovertime,bins=100,range=(0,200))
179
+ # ax2[0].plot([tdmedian,tdmedian],[0,h[0].max()])
180
+ ax2[1].set_xlabel('efo (photon rate kHz)')
181
+ #ax2[0].text(0.95, 0.8, 'median %.2f' % tdmedian, horizontalalignment='right',
182
+ # verticalalignment='bottom', transform=ax2[0].transAxes)
183
+ if dsKey is not None:
184
+ plt.suptitle('Location rate analysis from datasource %s' % dsKey)
185
+ plt.tight_layout()
186
+
187
+
188
+ # this function assumes a pandas dataframe
189
+ # the pandas frame should generally be generated via the function minflux_npy2pyme from PYMEcs.IO.MINFLUX
190
+ def analyse_locrate_pdframe(datain,use_invalid=False,showTimeAverages=True):
191
+
192
+ if np.any(datain['vld'] < 1):
193
+ data = datain[datain['vld'] >= 1]
194
+ has_invalid = True
195
+ else:
196
+ data = datain
197
+ has_invalid = False
198
+
199
+ # we replace the possibly non-sequential trace ids from MINFLUX data with a set of sequential ids
200
+ # this works better for our calculations below when we assume contiguous indices
201
+ # for bin creation for binned_statistic calls
202
+ uids,revids = np.unique(data['tid'].values,return_inverse=True)
203
+ ids = np.arange(1,uids.size+1,dtype='int32')[revids]
204
+ counts = get_stddev_property(ids,data['tid'].values,statistic='count')
205
+
206
+ bins = np.arange(int(ids.max())+1) + 0.5
207
+ startindex, bin_edges, binnumber = binned_statistic(ids,data.index,statistic='min', bins=bins)
208
+ endindex, bin_edges, binnumber = binned_statistic(ids,data.index,statistic='max', bins=bins)
209
+
210
+ if has_invalid and use_invalid:
211
+ # this tries to implement the way to compute both trace durations and time between traces
212
+ # as described in Ostersehlt, L.M. et al. (2022) ‘DNA-PAINT MINFLUX nanoscopy’, Nature Methods, 19(9), pp. 1072–1075.
213
+ # Available at: https://doi.org/10.1038/s41592-022-01577-1.
214
+ # TODO: still needs proper checking if this is correct. Also needs potential BOUNDS CHECKING as we currently assume
215
+ # there are invalid localisations BOTH before the first valid locs and after the last valid locs
216
+ durations = datain.loc[endindex+1,'tim'].values - datain.loc[startindex,'tim'].values
217
+ deltas = datain.loc[startindex[1:],'tim'].values - datain.loc[endindex[:-1]+1,'tim'].values
218
+ else:
219
+ durations = data.loc[endindex,'tim'].values - data.loc[startindex,'tim'].values
220
+ deltas = data.loc[startindex,'tim'][1:].values-data.loc[endindex,'tim'][:-1].values
221
+ # note that we need to convert to numpy here using the values attribute, otherwise we run into an issue
222
+ # with the 'index' mucking up how the rows are subtracted against each other
223
+ tdiff = data['tim'].values[1:]-data['tim'].values[:-1]
224
+ tdsmall = tdiff[tdiff <= 0.1]
225
+ tdmedian = np.median(tdsmall)
226
+
227
+ start_times = data.loc[startindex,'tim'][:-1].values # we use those for binning the deltas, we discard final time to match size of deltas
228
+
229
+ if has_invalid and use_invalid:
230
+ durations_proper = durations
231
+ else:
232
+ durations_proper = durations + tdmedian # we count one extra localisation, using the median duration
233
+ # the extra is because we leave at least one localisation out from the total timing when we subtract ends-starts
234
+
235
+ if showTimeAverages:
236
+ delta_averages, bin_edges, binnumber = binned_statistic(start_times,deltas,statistic='mean', bins=50)
237
+ delta_av_times = 0.5*(bin_edges[:-1] + bin_edges[1:]) # bin centres
238
+ plot_stats_minflux(deltas, durations_proper, tdiff, tdmedian, delta_averages, delta_av_times, showTimeAverages=True)
239
+ else:
240
+ plot_stats_minflux(deltas, durations_proper, tdiff, tdmedian, data['efo'], None)
241
+
242
+
243
+ # similar version but now using a pipeline
244
+ def analyse_locrate(data,datasource='Localizations',showTimeAverages=True, plot=True):
245
+ curds = data.selectedDataSourceKey
246
+ data.selectDataSource(datasource)
247
+ bins = np.arange(int(data['clumpIndex'].max())+1) + 0.5
248
+ counts, bin_edges, binnumber = binned_statistic(data['clumpIndex'],data['tim'],statistic='count', bins=bins)
249
+ starts, bin_edges, binnumber = binned_statistic(data['clumpIndex'],data['tim'],statistic='min', bins=bins)
250
+ # for some reason we seem to get empty counts, i.e. the original clumpIndices are non-consecutive
251
+ # NOTE: investigate IO of NPY MINFLUX data why this can happen!
252
+ starts = starts[counts > 0]
253
+ ends, bin_edges, binnumber = binned_statistic(data['clumpIndex'],data['tim'],statistic='max', bins=bins)
254
+ ends = ends[counts > 0]
255
+
256
+ durations = ends - starts
257
+ deltas = starts[1:]-ends[:-1]
258
+ # now we specifically look for the deltas within a trace
259
+ tdiff = data['tim'][1:]-data['tim'][:-1]
260
+ tracejump = data['clumpIndex'][1:]-data['clumpIndex'][:-1] # find all positions where the trace ID changes
261
+ tdintrace = tdiff[tracejump < 0.1] # and now we exclude all tracejump deltas
262
+ tdmedian = np.median(tdintrace)
263
+ durations_proper = durations + tdmedian # we count one extra localisation, using the median duration
264
+ # the extra is because we leave at least one localisation out from the total timing when we subtract ends-starts
265
+
266
+ lenx_um = 1e-3*(data['x'].max()-data['x'].min())
267
+ leny_um = 1e-3*(data['y'].max()-data['y'].min())
268
+ area_string = 'area %.1fx%.1f um^2' % (lenx_um,leny_um)
269
+ data.selectDataSource(curds)
270
+
271
+ if plot:
272
+ if showTimeAverages:
273
+ delta_averages, bin_edges, binnumber = binned_statistic(starts[:-1],deltas,statistic='mean', bins=50)
274
+ delta_av_times = 0.5*(bin_edges[:-1] + bin_edges[1:]) # bin centres
275
+ plot_stats_minflux(deltas, durations_proper, tdintrace, delta_averages, delta_av_times,
276
+ showTimeAverages=True, dsKey = datasource, areaString=area_string)
277
+ else:
278
+ plot_stats_minflux(deltas, durations_proper, tdintrace, data['efo'], None, dsKey = datasource, areaString=area_string)
279
+
280
+ return (starts,ends,deltas,durations_proper,tdintrace)
@@ -0,0 +1,77 @@
1
+ import numpy as np
2
+ from PYME.IO.MetaDataHandler import NestedClassMDHandler
3
+ from PYME.IO.image import ImageStack
4
+
5
+
6
+ def defaultmapname(mdh):
7
+ if mdh['CameraMap.Type'] == 'mean':
8
+ prefix = 'dark'
9
+ else:
10
+ prefix = 'variance'
11
+
12
+ itime = int(1000*mdh['Camera.IntegrationTime'])
13
+
14
+ return "%s_%dms.%s" % (prefix, itime, 'tif')
15
+
16
+ def mkdestarr(img):
17
+ sensorSize = [2048,2048]
18
+ sensorSize[0] = int(img.mdh['Camera.SensorWidth'])
19
+ sensorSize[1] = int(img.mdh['Camera.SensorHeight'])
20
+
21
+ destmap = np.zeros(sensorSize,dtype='float64')
22
+
23
+
24
+ if img.mdh['CameraMap.Type'] == 'mean':
25
+ maptype = 'dark'
26
+ destmap.fill(img.mdh['Camera.ADOffset'])
27
+ else:
28
+ maptype = 'variance'
29
+ destmap.fill(img.mdh['Camera.ReadNoise']**2)
30
+
31
+ return destmap
32
+
33
+ def insertvmap(sourceim, destarr,validMap):
34
+ smdh = sourceim.mdh
35
+ px = int(smdh['CameraMap.ValidROI.ROIOriginX']) # zero based index
36
+ py = int(smdh['CameraMap.ValidROI.ROIOriginY']) # zero based index
37
+ wx = int(smdh['CameraMap.ValidROI.ROIWidth'])
38
+ wy = int(smdh['CameraMap.ValidROI.ROIHeight'])
39
+
40
+ destarr[px:px+wx,py:py+wy] = sourceim.data[px:px+wx,py:py+wy,:].squeeze()
41
+ validMap[px:px+wx,py:py+wy] = 1
42
+
43
+ # not implemented yet
44
+ def checkMapCompat(img,mdh):
45
+ pass
46
+
47
+ # we need to add some sanity checking so that the right maps are combined
48
+ # and not incorrect ones are combined by accident
49
+ def combine_maps(maps, return_validMap=False):
50
+ destarr = None
51
+ mapimgs = []
52
+ for map in maps:
53
+ mapimg = ImageStack(filename=map)
54
+ mapimgs.append(mapimg)
55
+ if destarr is None:
56
+ mdh = NestedClassMDHandler(mapimg.mdh)
57
+ destarr = mkdestarr(mapimg)
58
+ validMap = np.zeros_like(destarr,dtype='int')
59
+ else:
60
+ checkMapCompat(mapimg,mdh)
61
+ insertvmap(mapimg, destarr, validMap)
62
+
63
+ mdh.setEntry('CameraMap.combinedFromMaps', maps)
64
+ mdh.setEntry('CameraMap.ValidROI.ROIHeight', mapimgs[0].mdh['Camera.SensorHeight'])
65
+ mdh.setEntry('CameraMap.ValidROI.ROIWidth', mapimgs[0].mdh['Camera.SensorWidth'])
66
+ mdh.setEntry('CameraMap.ValidROI.ROIOriginX', 0)
67
+ mdh.setEntry('CameraMap.ValidROI.ROIOriginY', 0)
68
+
69
+ combinedMap = ImageStack(destarr, mdh=mdh)
70
+ if return_validMap:
71
+ vmdh = NestedClassMDHandler(mdh)
72
+ vmdh.setEntry('CameraMap.ValidMask', True)
73
+ return (combinedMap,ImageStack(validMap, mdh=vmdh))
74
+ else:
75
+ return combinedMap
76
+
77
+