PYME-extra 1.0.4.post0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- PYMEcs/Acquire/Actions/__init__.py +0 -0
- PYMEcs/Acquire/Actions/custom.py +167 -0
- PYMEcs/Acquire/Hardware/LPthreadedSimple.py +248 -0
- PYMEcs/Acquire/Hardware/LPthreadedSimpleSim.py +246 -0
- PYMEcs/Acquire/Hardware/NikonTiFlaskServer.py +45 -0
- PYMEcs/Acquire/Hardware/NikonTiFlaskServerT.py +59 -0
- PYMEcs/Acquire/Hardware/NikonTiRESTClient.py +73 -0
- PYMEcs/Acquire/Hardware/NikonTiSim.py +35 -0
- PYMEcs/Acquire/Hardware/__init__.py +0 -0
- PYMEcs/Acquire/Hardware/driftTrackGUI.py +329 -0
- PYMEcs/Acquire/Hardware/driftTrackGUI_n.py +472 -0
- PYMEcs/Acquire/Hardware/driftTracking.py +424 -0
- PYMEcs/Acquire/Hardware/driftTracking_n.py +433 -0
- PYMEcs/Acquire/Hardware/fakeCamX.py +15 -0
- PYMEcs/Acquire/Hardware/offsetPiezoRESTCorrelLog.py +38 -0
- PYMEcs/Acquire/__init__.py +0 -0
- PYMEcs/Analysis/MBMcollection.py +552 -0
- PYMEcs/Analysis/MINFLUX.py +280 -0
- PYMEcs/Analysis/MapUtils.py +77 -0
- PYMEcs/Analysis/NPC.py +1176 -0
- PYMEcs/Analysis/Paraflux.py +218 -0
- PYMEcs/Analysis/Simpler.py +81 -0
- PYMEcs/Analysis/Sofi.py +140 -0
- PYMEcs/Analysis/__init__.py +0 -0
- PYMEcs/Analysis/decSofi.py +211 -0
- PYMEcs/Analysis/eventProperties.py +50 -0
- PYMEcs/Analysis/fitDarkTimes.py +569 -0
- PYMEcs/Analysis/objectVolumes.py +20 -0
- PYMEcs/Analysis/offlineTracker.py +130 -0
- PYMEcs/Analysis/stackTracker.py +180 -0
- PYMEcs/Analysis/timeSeries.py +63 -0
- PYMEcs/Analysis/trackFiducials.py +186 -0
- PYMEcs/Analysis/zerocross.py +91 -0
- PYMEcs/IO/MINFLUX.py +851 -0
- PYMEcs/IO/NPC.py +117 -0
- PYMEcs/IO/__init__.py +0 -0
- PYMEcs/IO/darkTimes.py +19 -0
- PYMEcs/IO/picasso.py +219 -0
- PYMEcs/IO/tabular.py +11 -0
- PYMEcs/__init__.py +0 -0
- PYMEcs/experimental/CalcZfactor.py +51 -0
- PYMEcs/experimental/FRC.py +338 -0
- PYMEcs/experimental/ImageJROItools.py +49 -0
- PYMEcs/experimental/MINFLUX.py +1537 -0
- PYMEcs/experimental/NPCcalcLM.py +560 -0
- PYMEcs/experimental/Simpler.py +369 -0
- PYMEcs/experimental/Sofi.py +78 -0
- PYMEcs/experimental/__init__.py +0 -0
- PYMEcs/experimental/binEventProperty.py +187 -0
- PYMEcs/experimental/chaining.py +23 -0
- PYMEcs/experimental/clusterTrack.py +179 -0
- PYMEcs/experimental/combine_maps.py +104 -0
- PYMEcs/experimental/eventProcessing.py +93 -0
- PYMEcs/experimental/fiducials.py +323 -0
- PYMEcs/experimental/fiducialsNew.py +402 -0
- PYMEcs/experimental/mapTools.py +271 -0
- PYMEcs/experimental/meas2DplotDh5view.py +107 -0
- PYMEcs/experimental/mortensen.py +131 -0
- PYMEcs/experimental/ncsDenoise.py +158 -0
- PYMEcs/experimental/onTimes.py +295 -0
- PYMEcs/experimental/procPoints.py +77 -0
- PYMEcs/experimental/pyme2caml.py +73 -0
- PYMEcs/experimental/qPAINT.py +965 -0
- PYMEcs/experimental/randMap.py +188 -0
- PYMEcs/experimental/regExtraCmaps.py +11 -0
- PYMEcs/experimental/selectROIfilterTable.py +72 -0
- PYMEcs/experimental/showErrs.py +51 -0
- PYMEcs/experimental/showErrsDh5view.py +58 -0
- PYMEcs/experimental/showShiftMap.py +56 -0
- PYMEcs/experimental/snrEvents.py +188 -0
- PYMEcs/experimental/specLabeling.py +51 -0
- PYMEcs/experimental/splitRender.py +246 -0
- PYMEcs/experimental/testChannelByName.py +36 -0
- PYMEcs/experimental/timedSpecies.py +28 -0
- PYMEcs/experimental/utils.py +31 -0
- PYMEcs/misc/ExtraCmaps.py +177 -0
- PYMEcs/misc/__init__.py +0 -0
- PYMEcs/misc/configUtils.py +169 -0
- PYMEcs/misc/guiMsgBoxes.py +27 -0
- PYMEcs/misc/mapUtils.py +230 -0
- PYMEcs/misc/matplotlib.py +136 -0
- PYMEcs/misc/rectsFromSVG.py +182 -0
- PYMEcs/misc/shellutils.py +1110 -0
- PYMEcs/misc/utils.py +205 -0
- PYMEcs/misc/versionCheck.py +20 -0
- PYMEcs/misc/zcInfo.py +90 -0
- PYMEcs/pyme_warnings.py +4 -0
- PYMEcs/recipes/__init__.py +0 -0
- PYMEcs/recipes/base.py +75 -0
- PYMEcs/recipes/localisations.py +2380 -0
- PYMEcs/recipes/manipulate_yaml.py +83 -0
- PYMEcs/recipes/output.py +177 -0
- PYMEcs/recipes/processing.py +247 -0
- PYMEcs/recipes/simpler.py +290 -0
- PYMEcs/version.py +2 -0
- pyme_extra-1.0.4.post0.dist-info/METADATA +114 -0
- pyme_extra-1.0.4.post0.dist-info/RECORD +101 -0
- pyme_extra-1.0.4.post0.dist-info/WHEEL +5 -0
- pyme_extra-1.0.4.post0.dist-info/entry_points.txt +3 -0
- pyme_extra-1.0.4.post0.dist-info/licenses/LICENSE +674 -0
- pyme_extra-1.0.4.post0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import sys
|
|
3
|
+
from scipy import ndimage
|
|
4
|
+
import matplotlib.pyplot as plt
|
|
5
|
+
|
|
6
|
+
import logging
|
|
7
|
+
logger = logging.getLogger(__file__)
|
|
8
|
+
|
|
9
|
+
from traits.api import HasTraits, Str, Int, CStr, List, Enum, Float
|
|
10
|
+
from traitsui.api import View, Item, Group
|
|
11
|
+
from traitsui.menu import OKButton, CancelButton, OKCancelButtons
|
|
12
|
+
|
|
13
|
+
class FilterChoice(HasTraits):
|
|
14
|
+
windowSize = Int(11)
|
|
15
|
+
filterType = Enum(['Gaussian','Median'])
|
|
16
|
+
funcmap = {
|
|
17
|
+
'Gaussian' : ndimage.gaussian_filter1d,
|
|
18
|
+
'Median' : ndimage.median_filter}
|
|
19
|
+
|
|
20
|
+
def get_filter(self):
|
|
21
|
+
|
|
22
|
+
def filterfunc(data):
|
|
23
|
+
return self.funcmap[self.filterType](data,self.windowSize)
|
|
24
|
+
|
|
25
|
+
return filterfunc
|
|
26
|
+
|
|
27
|
+
class GetTime(HasTraits):
|
|
28
|
+
alignmentTime = Int(0)
|
|
29
|
+
averagePeriod = Int(50)
|
|
30
|
+
|
|
31
|
+
def zeroshift(t,data,navg=50, alignmentTime=0):
|
|
32
|
+
ti,idx = np.unique(t.astype('int'),return_index=True)
|
|
33
|
+
di = data[idx]
|
|
34
|
+
if alignmentTime<0:
|
|
35
|
+
alignmentTime = 0
|
|
36
|
+
nmin = min(alignmentTime,di.shape[0])
|
|
37
|
+
nmax = min(alignmentTime+navg,di.shape[0])
|
|
38
|
+
offset = di[nmin:nmax].mean()
|
|
39
|
+
return data - offset
|
|
40
|
+
|
|
41
|
+
def atLeastRange(rmin=-20,rmax=-20):
|
|
42
|
+
ymin,ymax = plt.ylim()
|
|
43
|
+
if ymin > rmin:
|
|
44
|
+
plt.ylim(ymin=rmin)
|
|
45
|
+
if ymax < rmax:
|
|
46
|
+
plt.ylim(ymax=rmax)
|
|
47
|
+
|
|
48
|
+
class ClusterTracker:
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
"""
|
|
52
|
+
def __init__(self, visFr):
|
|
53
|
+
self.visFr = visFr
|
|
54
|
+
self.pipeline = visFr.pipeline
|
|
55
|
+
self.clusterTracks = []
|
|
56
|
+
self.alignmentTime = 0
|
|
57
|
+
self.averagePeriod = 50
|
|
58
|
+
|
|
59
|
+
visFr.AddMenuItem('Experimental>Deprecated>Clusters', 'DBSCAN Clump', self.OnClumpDBSCAN,
|
|
60
|
+
helpText='Calculate ClumpIndex using DBSCAN algorithm')
|
|
61
|
+
visFr.AddMenuItem('Experimental>Deprecated>Clusters', 'Track Clumps', self.OnTrackClumps,
|
|
62
|
+
helpText='extract the tracks for all clusters (clumps) that we found')
|
|
63
|
+
visFr.AddMenuItem('Experimental>Deprecated>Clusters', 'Plot Tracks', self.OnShowTracks,
|
|
64
|
+
helpText='plot tracks of clusters (clumps) that we found')
|
|
65
|
+
visFr.AddMenuItem('Experimental>Deprecated>Clusters', 'Plot Tracks Filtered', self.OnShowTracksFiltered,
|
|
66
|
+
helpText='plot filtered tracks of clusters (clumps) that we found')
|
|
67
|
+
visFr.AddMenuItem('Experimental>Deprecated>Clusters', 'Clear Tracks', self.OnClearTracks,
|
|
68
|
+
helpText='clear tracks from memory')
|
|
69
|
+
visFr.AddMenuItem('Experimental>Deprecated>Clusters', 'Set Alignment Time', self.OnSetAlignmentTime,
|
|
70
|
+
helpText='set alignment time')
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def OnClumpDBSCAN(self, event=None):
|
|
74
|
+
"""
|
|
75
|
+
Runs sklearn DBSCAN clustering algorithm on pipeline filtered results using the GUI defined in the DBSCAN
|
|
76
|
+
recipe module.
|
|
77
|
+
|
|
78
|
+
Args are user defined through GUI
|
|
79
|
+
searchRadius: search radius for clustering
|
|
80
|
+
minClumpSize: number of points within eps required for a given point to be considered a core point
|
|
81
|
+
|
|
82
|
+
This version is generally used to identify clumps identifying fiduciaries and therefore the
|
|
83
|
+
default searchRadius is set fairly generous.
|
|
84
|
+
"""
|
|
85
|
+
from PYMEcs.recipes import localisations
|
|
86
|
+
|
|
87
|
+
clumper = localisations.DBSCANClustering2(minClumpSize = 50, searchRadius = 20.0)
|
|
88
|
+
if clumper.configure_traits(kind='modal'):
|
|
89
|
+
namespace = {clumper.inputName: self.pipeline}
|
|
90
|
+
clumper.execute(namespace)
|
|
91
|
+
|
|
92
|
+
self.pipeline.addColumn(clumper.outputName, namespace[clumper.outputName]['dbscanClumpID'])
|
|
93
|
+
|
|
94
|
+
def OnTrackClumps(self, event=None):
|
|
95
|
+
pipeline = self.pipeline
|
|
96
|
+
from PYMEcs.recipes import localisations
|
|
97
|
+
clumper = localisations.DBSCANClustering2()
|
|
98
|
+
clusterID = clumper.outputName
|
|
99
|
+
|
|
100
|
+
if not clusterID in pipeline.keys():
|
|
101
|
+
logger.error('Cannot find column %s in pipeline' % clusterID)
|
|
102
|
+
return
|
|
103
|
+
|
|
104
|
+
clusterIDs = pipeline[clusterID].astype('int')
|
|
105
|
+
idmax = max(clusterIDs)
|
|
106
|
+
|
|
107
|
+
for id in range(1,idmax+1):
|
|
108
|
+
thiscluster = clusterIDs == id
|
|
109
|
+
t_id = pipeline['t'][thiscluster]
|
|
110
|
+
x_id = pipeline['x'][thiscluster]
|
|
111
|
+
y_id = pipeline['y'][thiscluster]
|
|
112
|
+
z_id = pipeline['z'][thiscluster]
|
|
113
|
+
I = np.argsort(t_id)
|
|
114
|
+
self.clusterTracks.append([t_id[I],x_id[I],y_id[I],z_id[I]])
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def OnShowTracks(self, event=None):
|
|
118
|
+
import matplotlib.pyplot as plt
|
|
119
|
+
if len(self.clusterTracks) > 0:
|
|
120
|
+
navg = self.averagePeriod
|
|
121
|
+
atime = self.alignmentTime
|
|
122
|
+
plt.figure()
|
|
123
|
+
for entry in self.clusterTracks:
|
|
124
|
+
t,x,y,z = entry
|
|
125
|
+
plt.plot(t,zeroshift(t,x, navg, atime))
|
|
126
|
+
plt.title('x tracks')
|
|
127
|
+
atLeastRange(-20,20)
|
|
128
|
+
plt.figure()
|
|
129
|
+
for entry in self.clusterTracks:
|
|
130
|
+
t,x,y,z = entry
|
|
131
|
+
plt.plot(t,zeroshift(t,y, navg, atime))
|
|
132
|
+
plt.title('y tracks')
|
|
133
|
+
atLeastRange(-20,20)
|
|
134
|
+
plt.figure()
|
|
135
|
+
for entry in self.clusterTracks:
|
|
136
|
+
t,x,y,z = entry
|
|
137
|
+
plt.plot(t,zeroshift(t,z, navg, atime))
|
|
138
|
+
plt.title('z tracks')
|
|
139
|
+
atLeastRange(-20,20)
|
|
140
|
+
|
|
141
|
+
def OnShowTracksFiltered(self, event=None):
|
|
142
|
+
import matplotlib.pyplot as plt
|
|
143
|
+
fc = FilterChoice()
|
|
144
|
+
if len(self.clusterTracks) > 0 and fc.configure_traits(kind='modal'):
|
|
145
|
+
navg = self.averagePeriod
|
|
146
|
+
atime = self.alignmentTime
|
|
147
|
+
filterfunc = fc.get_filter()
|
|
148
|
+
plt.figure()
|
|
149
|
+
for entry in self.clusterTracks:
|
|
150
|
+
t,x,y,z = entry
|
|
151
|
+
plt.plot(t,filterfunc(zeroshift(t,x, navg, atime)))
|
|
152
|
+
plt.title('x tracks')
|
|
153
|
+
atLeastRange(-20,20)
|
|
154
|
+
plt.figure()
|
|
155
|
+
for entry in self.clusterTracks:
|
|
156
|
+
t,x,y,z = entry
|
|
157
|
+
plt.plot(t,filterfunc(zeroshift(t,y, navg, atime)))
|
|
158
|
+
plt.title('y tracks')
|
|
159
|
+
atLeastRange(-20,20)
|
|
160
|
+
plt.figure()
|
|
161
|
+
for entry in self.clusterTracks:
|
|
162
|
+
t,x,y,z = entry
|
|
163
|
+
plt.plot(t,filterfunc(zeroshift(t,z, navg, atime)))
|
|
164
|
+
plt.title('z tracks')
|
|
165
|
+
atLeastRange(-20,20)
|
|
166
|
+
|
|
167
|
+
def OnClearTracks(self, event=None):
|
|
168
|
+
self.clusterTracks = []
|
|
169
|
+
|
|
170
|
+
def OnSetAlignmentTime(self, event=None):
|
|
171
|
+
gtime = GetTime()
|
|
172
|
+
if gtime.configure_traits(kind='modal'):
|
|
173
|
+
self.alignmentTime = gtime.alignmentTime
|
|
174
|
+
self.averagePeriod = gtime.averagePeriod
|
|
175
|
+
|
|
176
|
+
def Plug(visFr):
|
|
177
|
+
"""Plugs this module into the gui"""
|
|
178
|
+
visFr.clusterTracker = ClusterTracker(visFr)
|
|
179
|
+
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import wx
|
|
2
|
+
import os.path
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def on_map(image, parentWindow=None, glCanvas=None):
|
|
6
|
+
from PYME.Analysis import gen_sCMOS_maps
|
|
7
|
+
from PYME.DSView import ViewIm3D
|
|
8
|
+
from PYMEcs.Analysis.MapUtils import combine_maps
|
|
9
|
+
|
|
10
|
+
# combine maps with dialogue here
|
|
11
|
+
# also show valid map
|
|
12
|
+
|
|
13
|
+
with wx.FileDialog(parentWindow, "Choose maps", wildcard='TIFF (*.tif)|*.tif',
|
|
14
|
+
style=wx.FD_OPEN | wx.FD_MULTIPLE) as dialog:
|
|
15
|
+
|
|
16
|
+
if dialog.ShowModal() == wx.ID_CANCEL:
|
|
17
|
+
return
|
|
18
|
+
|
|
19
|
+
filelist = dialog.GetPaths()
|
|
20
|
+
|
|
21
|
+
combinedMap, vMap = combine_maps(filelist,return_validMap=True)
|
|
22
|
+
|
|
23
|
+
if combinedMap.mdh['CameraMap.Type'] == 'mean':
|
|
24
|
+
mapType = 'dark'
|
|
25
|
+
elif combinedMap.mdh['CameraMap.Type'] == 'variance':
|
|
26
|
+
mapType = 'variance'
|
|
27
|
+
|
|
28
|
+
if mapType == 'dark':
|
|
29
|
+
ViewIm3D(combinedMap, title='Dark Map', parent=parentWindow, glCanvas=glCanvas)
|
|
30
|
+
else:
|
|
31
|
+
ViewIm3D(combinedMap, title='Variance Map', parent=parentWindow, glCanvas=glCanvas)
|
|
32
|
+
|
|
33
|
+
ViewIm3D(vMap, title='Valid Regions', parent=parentWindow, glCanvas=glCanvas)
|
|
34
|
+
|
|
35
|
+
if mapType == 'dark':
|
|
36
|
+
mapname = gen_sCMOS_maps.mkDefaultPath('dark', combinedMap.mdh)
|
|
37
|
+
else:
|
|
38
|
+
mapname = gen_sCMOS_maps.mkDefaultPath('variance', combinedMap.mdh)
|
|
39
|
+
|
|
40
|
+
# on windows we may need to pass the full path to defaultFile to force selecting the
|
|
41
|
+
# directory we want; otherwise the last used directory may be used on some
|
|
42
|
+
# windows 7+ installs, see also https://forums.wxwidgets.org/viewtopic.php?t=44404
|
|
43
|
+
import platform
|
|
44
|
+
if platform.system() == 'Windows':
|
|
45
|
+
fname = mapname
|
|
46
|
+
else:
|
|
47
|
+
fname = os.path.basename(mapname)
|
|
48
|
+
map_dlg = wx.FileDialog(parentWindow, message="Save dark map as...",
|
|
49
|
+
defaultDir=os.path.dirname(mapname),
|
|
50
|
+
defaultFile=fname,
|
|
51
|
+
wildcard='TIFF (*.tif)|*.tif',
|
|
52
|
+
style=wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)
|
|
53
|
+
|
|
54
|
+
if map_dlg.ShowModal() == wx.ID_OK:
|
|
55
|
+
mapfn = map_dlg.GetPath()
|
|
56
|
+
combinedMap.Save(filename=mapfn)
|
|
57
|
+
|
|
58
|
+
from PYME.recipes.batchProcess import bake
|
|
59
|
+
from PYME.recipes import modules
|
|
60
|
+
import os
|
|
61
|
+
|
|
62
|
+
def on_bake(image, parentWindow=None, glCanvas=None):
|
|
63
|
+
with wx.FileDialog(parentWindow, "Choose all series", wildcard='H5 (*.h5)|*.h5',
|
|
64
|
+
style=wx.FD_OPEN | wx.FD_MULTIPLE) as dialog:
|
|
65
|
+
|
|
66
|
+
if dialog.ShowModal() == wx.ID_CANCEL:
|
|
67
|
+
return
|
|
68
|
+
|
|
69
|
+
filelist = dialog.GetPaths()
|
|
70
|
+
|
|
71
|
+
inputGlobs = {'input' : filelist}
|
|
72
|
+
map_dir = os.path.dirname(filelist[0])
|
|
73
|
+
output_dir = os.path.join(map_dir,'analysis')
|
|
74
|
+
|
|
75
|
+
if not os.path.exists(output_dir):
|
|
76
|
+
os.makedirs(output_dir)
|
|
77
|
+
|
|
78
|
+
recipe_str = """
|
|
79
|
+
- processing.DarkAndVarianceMap:
|
|
80
|
+
input: input
|
|
81
|
+
output_dark: dark
|
|
82
|
+
output_variance: variance
|
|
83
|
+
- output.ImageOutput:
|
|
84
|
+
filePattern: '{output_dir}/{file_stub}_dark.tif'
|
|
85
|
+
inputName: dark
|
|
86
|
+
- output.ImageOutput:
|
|
87
|
+
filePattern: '{output_dir}/{file_stub}_variance.tif'
|
|
88
|
+
inputName: variance
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
try:
|
|
92
|
+
recipe = modules.ModuleCollection.fromYAML(recipe_str)
|
|
93
|
+
except AttributeError:
|
|
94
|
+
from PYME.recipes.recipe import Recipe
|
|
95
|
+
recipe = Recipe.fromYAML(recipe_str)
|
|
96
|
+
|
|
97
|
+
bake(recipe, inputGlobs, output_dir)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def Plug(dsviewer):
|
|
101
|
+
dsviewer.AddMenuItem(menuName='Experimental>Map Tools', itemName='Analyse tiled ROI Map Series',
|
|
102
|
+
itemCallback = lambda e : on_bake(dsviewer.image, dsviewer, dsviewer.glCanvas))
|
|
103
|
+
dsviewer.AddMenuItem(menuName='Experimental>Map Tools', itemName='Combine tiled ROI Maps',
|
|
104
|
+
itemCallback = lambda e : on_map(dsviewer.image, dsviewer, dsviewer.glCanvas))
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import matplotlib.pyplot as plt
|
|
2
|
+
from PYMEcs.pyme_warnings import warn
|
|
3
|
+
|
|
4
|
+
class EventProcessing:
|
|
5
|
+
"""
|
|
6
|
+
plugins to conduct some event processing from events in h5r data
|
|
7
|
+
currently mostly using events from Piezobased tracking
|
|
8
|
+
"""
|
|
9
|
+
def __init__(self, visFr):
|
|
10
|
+
self.visFr = visFr
|
|
11
|
+
self.pipeline = visFr.pipeline
|
|
12
|
+
|
|
13
|
+
visFr.AddMenuItem('Experimental',
|
|
14
|
+
'Display Piezo events in h5r file',
|
|
15
|
+
self.OnDisplayEvents,
|
|
16
|
+
helpText='display recorded events (in the PYMEAcquire sense) from an h5r file')
|
|
17
|
+
|
|
18
|
+
def OnDisplayEvents(self, event=None):
|
|
19
|
+
from PYME.Analysis import piecewiseMapping
|
|
20
|
+
p = self.pipeline
|
|
21
|
+
|
|
22
|
+
if p.events is None:
|
|
23
|
+
warn('No events in pipeline')
|
|
24
|
+
return
|
|
25
|
+
|
|
26
|
+
offupd = piecewiseMapping.GeneratePMFromEventList(p.events, p.mdh, p.mdh.getEntry('StartTime'), 0, b'PiezoOffsetUpdate',0)
|
|
27
|
+
tminutes = offupd.xvals * p.mdh['Camera.CycleTime'] / 60.0
|
|
28
|
+
|
|
29
|
+
offsets = piecewiseMapping.GeneratePMFromEventList(p.events, p.mdh, p.mdh.getEntry('StartTime'), 0, b'PiezoOffset',0)
|
|
30
|
+
correlamp = piecewiseMapping.GeneratePMFromEventList(p.events, p.mdh, p.mdh.getEntry('StartTime'), 0, b'CorrelationAmplitude',0)
|
|
31
|
+
|
|
32
|
+
has_offsets = offsets.yvals.size > 0
|
|
33
|
+
has_offupd = offupd.yvals.size > 0 # not using this one at the moment
|
|
34
|
+
has_correlamp = correlamp.yvals.size > 0
|
|
35
|
+
has_drift = 'driftx' in p.keys()
|
|
36
|
+
|
|
37
|
+
plot_rows = 0
|
|
38
|
+
if has_offsets:
|
|
39
|
+
plot_rows += 1
|
|
40
|
+
if has_correlamp:
|
|
41
|
+
plot_rows += 1
|
|
42
|
+
if has_drift:
|
|
43
|
+
plot_rows += 3
|
|
44
|
+
|
|
45
|
+
row = 1
|
|
46
|
+
fig, axs = plt.subplots(nrows=plot_rows,figsize=(6.4,6.4), num='OffsetPiezo Event Analysis')
|
|
47
|
+
if has_drift:
|
|
48
|
+
plt.subplot(plot_rows,1,row)
|
|
49
|
+
plt.plot(p['t'],p['driftx'])
|
|
50
|
+
plt.title('Drift in x (nm)')
|
|
51
|
+
plt.ylabel('Drift')
|
|
52
|
+
plt.subplot(plot_rows,1,row+1)
|
|
53
|
+
plt.plot(p['t'],p['drifty'])
|
|
54
|
+
plt.title('Drift in y (nm)')
|
|
55
|
+
plt.ylabel('Drift')
|
|
56
|
+
plt.subplot(plot_rows,1,row+2)
|
|
57
|
+
plt.plot(p['t'],1e3*p['driftz']) # is driftz in um?
|
|
58
|
+
plt.title('Drift in z (nm)')
|
|
59
|
+
plt.ylabel('Drift')
|
|
60
|
+
row += 3
|
|
61
|
+
|
|
62
|
+
if has_offsets:
|
|
63
|
+
offsVSt = offsets(p['t']-0.01)
|
|
64
|
+
plt.subplot(plot_rows,1,row)
|
|
65
|
+
plt.plot(p['t'],offsVSt)
|
|
66
|
+
plt.xlabel('time (frame number)')
|
|
67
|
+
plt.ylabel('offset (um)')
|
|
68
|
+
plt.title('OffsetPiezo offsets from PiezoOffset events')
|
|
69
|
+
row += 1
|
|
70
|
+
|
|
71
|
+
if has_correlamp:
|
|
72
|
+
campVSt = correlamp(p['t']-0.01)
|
|
73
|
+
plt.subplot(plot_rows,1,row)
|
|
74
|
+
plt.plot(p['t'],campVSt)
|
|
75
|
+
plt.xlabel('time (frame number)')
|
|
76
|
+
plt.ylabel('amp')
|
|
77
|
+
plt.title('normalised correlation amplitude')
|
|
78
|
+
plt.ylim(0.3,1.2)
|
|
79
|
+
axs[-1].set_yticks([0.75],minor=True)
|
|
80
|
+
plt.grid(which='both',axis='y')
|
|
81
|
+
row += 1
|
|
82
|
+
|
|
83
|
+
plt.tight_layout()
|
|
84
|
+
|
|
85
|
+
#plt.step(tminutes,offupd.yvals,where='post')
|
|
86
|
+
#plt.xlabel('time (minutes)')
|
|
87
|
+
#plt.ylabel('OffsetPiezo offset (um)')
|
|
88
|
+
#plt.title('OffsetPiezo offsets from PiezoOffsetUpdate events')
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def Plug(visFr):
|
|
92
|
+
"""Plugs this module into the gui"""
|
|
93
|
+
EventProcessing(visFr)
|
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
Created on Tue Jun 30 10:24:45 2015
|
|
4
|
+
|
|
5
|
+
@author: Kenny
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
# this is Kenny's code with minimal changes to run as standonlone plugin
|
|
9
|
+
# using the new PYME.config system
|
|
10
|
+
|
|
11
|
+
# mostly used as a comparison to the newer recipe based implementation
|
|
12
|
+
# will be dropped once the recipe based version is complete
|
|
13
|
+
|
|
14
|
+
import wx
|
|
15
|
+
import numpy as np
|
|
16
|
+
from scipy import ndimage
|
|
17
|
+
from collections import OrderedDict
|
|
18
|
+
|
|
19
|
+
import logging
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
def foffset(t,ft,navg=100):
|
|
23
|
+
tu,idx = np.unique(t.astype('int'), return_index=True)
|
|
24
|
+
fu = ft[idx]
|
|
25
|
+
offs = fu[0:min(navg,fu.shape[0])].mean()
|
|
26
|
+
return offs
|
|
27
|
+
|
|
28
|
+
def makeFilter(filtFunc):
|
|
29
|
+
'''wrapper function for different filters'''
|
|
30
|
+
def ffcn(t, data, scale):
|
|
31
|
+
out = {}
|
|
32
|
+
for k, v in data.items():
|
|
33
|
+
r_v = v[~np.isnan(v)]
|
|
34
|
+
r_t = t[~np.isnan(v)]
|
|
35
|
+
out[k] = filtFunc(np.interp(t, r_t, r_v), scale)
|
|
36
|
+
return out
|
|
37
|
+
return ffcn
|
|
38
|
+
|
|
39
|
+
FILTER_FUNCS = {
|
|
40
|
+
'Gaussian' : makeFilter(ndimage.gaussian_filter),
|
|
41
|
+
'Uniform' : makeFilter(ndimage.uniform_filter),
|
|
42
|
+
'Median' : makeFilter(ndimage.median_filter)
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
def _extractAverageTrajectory(pipeline, clumpRadiusVar = 'error_x', clumpRadiusMultiplier=5.0,
|
|
46
|
+
timeWindow=25, filter='Gaussian', filterScale=10.0):
|
|
47
|
+
|
|
48
|
+
#import PYME.Analysis.trackUtils as trackUtils
|
|
49
|
+
import PYME.Analysis.points.DeClump.deClump as deClump
|
|
50
|
+
from scipy.optimize import fmin
|
|
51
|
+
#track beads through frames
|
|
52
|
+
if clumpRadiusVar == '1.0':
|
|
53
|
+
delta_x = 0*pipeline['x'] + clumpRadiusMultiplier
|
|
54
|
+
else:
|
|
55
|
+
delta_x = clumpRadiusMultiplier*pipeline[clumpRadiusVar]
|
|
56
|
+
|
|
57
|
+
t = pipeline['t'].astype('i')
|
|
58
|
+
x = pipeline['x'].astype('f4')
|
|
59
|
+
y = pipeline['y'].astype('f4')
|
|
60
|
+
delta_x = delta_x.astype('f4')
|
|
61
|
+
|
|
62
|
+
I = np.argsort(t)
|
|
63
|
+
|
|
64
|
+
clumpIndex = np.zeros(len(x), dtype='i')
|
|
65
|
+
clumpIndex[I] = deClump.findClumpsN(t[I], x[I], y[I], delta_x[I], timeWindow)
|
|
66
|
+
#trackUtils.findTracks(pipeline, clumpRadiusVar,clumpRadiusMultiplier, timeWindow)
|
|
67
|
+
|
|
68
|
+
#longTracks = pipeline['clumpSize'] > 50
|
|
69
|
+
|
|
70
|
+
#x = x[longTracks].copy()
|
|
71
|
+
#y = pipeline['y_raw'][longTracks].copy()
|
|
72
|
+
#t = pipeline['t'][longTracks].copy() #.astype('i')
|
|
73
|
+
#clumpIndex = pipeline['clumpIndex'][longTracks].copy()
|
|
74
|
+
|
|
75
|
+
tMax = t.max()
|
|
76
|
+
|
|
77
|
+
clumpIndices = list(set(clumpIndex))
|
|
78
|
+
|
|
79
|
+
x_f = []
|
|
80
|
+
y_f = []
|
|
81
|
+
clump_sizes = []
|
|
82
|
+
|
|
83
|
+
t_f = np.arange(0, tMax + 1, dtype='i')
|
|
84
|
+
|
|
85
|
+
#loop over all our clumps and extract trajectories
|
|
86
|
+
for ci in clumpIndices:
|
|
87
|
+
if ci > 0:
|
|
88
|
+
clump_mask = (clumpIndex == ci)
|
|
89
|
+
x_i = x[clump_mask]
|
|
90
|
+
clump_size = len(x_i)
|
|
91
|
+
|
|
92
|
+
if clump_size > 50:
|
|
93
|
+
y_i = y[clump_mask]
|
|
94
|
+
t_i = t[clump_mask].astype('i')
|
|
95
|
+
|
|
96
|
+
x_i_f = np.NaN*np.ones_like(t_f)
|
|
97
|
+
x_i_f[t_i]= x_i - x_i.mean()
|
|
98
|
+
|
|
99
|
+
y_i_f = np.NaN*np.ones_like(t_f)
|
|
100
|
+
y_i_f[t_i]= y_i - y_i.mean()
|
|
101
|
+
|
|
102
|
+
#clumps.append((x_i_f, y_i_f))
|
|
103
|
+
x_f.append(x_i_f)
|
|
104
|
+
y_f.append(y_i_f)
|
|
105
|
+
clump_sizes.append(len(x_i))
|
|
106
|
+
|
|
107
|
+
#re-order to start with the largest clump
|
|
108
|
+
clumpOrder = np.argsort(clump_sizes)[::-1]
|
|
109
|
+
x_f = np.array(x_f)[clumpOrder,:]
|
|
110
|
+
y_f = np.array(y_f)[clumpOrder,:]
|
|
111
|
+
|
|
112
|
+
def _mf(p, meas):
|
|
113
|
+
'''calculate the offset between trajectories'''
|
|
114
|
+
m_adj = meas + np.hstack([[0], p])[:,None]
|
|
115
|
+
|
|
116
|
+
return np.nansum(np.nanvar(m_adj, axis=0))
|
|
117
|
+
|
|
118
|
+
#print x_f.shape, np.hstack([[0], np.random.randn(x_f.shape[0]-1)]).shape
|
|
119
|
+
|
|
120
|
+
def _align(meas, tol=.1):
|
|
121
|
+
n_iters = 0
|
|
122
|
+
|
|
123
|
+
dm_old = 5e12
|
|
124
|
+
dm = 4e12
|
|
125
|
+
|
|
126
|
+
mm = np.nanmean(meas, 0)
|
|
127
|
+
|
|
128
|
+
while ((dm_old - dm) > tol) and (n_iters < 50):
|
|
129
|
+
dm_old = dm
|
|
130
|
+
mm = np.nanmean(meas, 0)
|
|
131
|
+
d = np.nanmean(meas - mm, 1)
|
|
132
|
+
dm = sum(d**2)
|
|
133
|
+
meas = meas - d[:,None]
|
|
134
|
+
n_iters +=1
|
|
135
|
+
print(n_iters, dm)
|
|
136
|
+
|
|
137
|
+
mm = np.nanmean(meas, 0)
|
|
138
|
+
print('Finished:', n_iters, dm)
|
|
139
|
+
return mm
|
|
140
|
+
|
|
141
|
+
x_corr = _align(x_f)
|
|
142
|
+
y_corr = _align(y_f)
|
|
143
|
+
|
|
144
|
+
filtered_corr = FILTER_FUNCS[filter](t_f, {'x' : x_corr, 'y':y_corr}, filterScale)
|
|
145
|
+
|
|
146
|
+
return t_f, filtered_corr
|
|
147
|
+
|
|
148
|
+
class FiducialAnalyser:
|
|
149
|
+
def __init__(self, visFr):
|
|
150
|
+
self.visFr = visFr
|
|
151
|
+
|
|
152
|
+
visFr.AddMenuItem('Experimental>Deprecated>FiducialsOld', "Estimate drift from Fiducials", self.FindBeadsAndTrack)
|
|
153
|
+
visFr.AddMenuItem('Experimental>Deprecated>FiducialsOld', 'Apply fiducial correction', self.OnApplyFiducial,
|
|
154
|
+
helpText='Apply fiducial to x, y, z')
|
|
155
|
+
visFr.AddMenuItem('Experimental>Deprecated>FiducialsOld', 'Revert fiducial correction', self.OnRevertFiducial,
|
|
156
|
+
helpText='Revert fiducial correction to x, y, z')
|
|
157
|
+
|
|
158
|
+
def FindBeadsAndTrack(self, event):
|
|
159
|
+
dlg = ExtractTrajectoriesDialog(self.visFr)
|
|
160
|
+
succ = dlg.ShowModal()
|
|
161
|
+
if succ == wx.ID_OK:
|
|
162
|
+
pipeline = self.visFr.pipeline
|
|
163
|
+
|
|
164
|
+
beadTraj = _extractAverageTrajectory(pipeline, dlg.GetClumpRadiusVariable(),
|
|
165
|
+
dlg.GetClumpRadiusMultiplier(), dlg.GetClumpTimeWindow(),
|
|
166
|
+
filter=dlg.GetFilterMethod(),filterScale=dlg.GetFilterScale())
|
|
167
|
+
|
|
168
|
+
self.ApplyCorrections([beadTraj,])
|
|
169
|
+
|
|
170
|
+
def ApplyCorrections(self, interpInfos):
|
|
171
|
+
"""Averages drift from multiple files.
|
|
172
|
+
Adds the drift as fiducial_? to pipeline.inputMapping.
|
|
173
|
+
Overwrites the drift panel with ? + fiducial_?
|
|
174
|
+
"""
|
|
175
|
+
pipeline = self.visFr.pipeline
|
|
176
|
+
count = len(interpInfos)
|
|
177
|
+
#master dim key list assuming first file is correct
|
|
178
|
+
dims = interpInfos[0][1].keys()
|
|
179
|
+
|
|
180
|
+
#loop over data sources and add drift info to each
|
|
181
|
+
logger.debug('about to update datasources...')
|
|
182
|
+
curds = pipeline.selectedDataSourceKey
|
|
183
|
+
for dsname in pipeline.dataSources:
|
|
184
|
+
logger.debug('updating datasource %s' % dsname)
|
|
185
|
+
pipeline.selectDataSource(dsname)
|
|
186
|
+
tCache = pipeline['t']
|
|
187
|
+
fudical_multi = np.zeros((count, len(dims), len(tCache))) #num of files, num of dims, num of time points
|
|
188
|
+
for i, j in enumerate(interpInfos):
|
|
189
|
+
realTime, filtered = j
|
|
190
|
+
for k, l in enumerate(dims):
|
|
191
|
+
print(l)
|
|
192
|
+
fudical_multi[i, k, :] = np.interp(tCache, realTime, filtered[l])
|
|
193
|
+
|
|
194
|
+
fuducial_mean = fudical_multi.mean(0)
|
|
195
|
+
for i, dim in enumerate(dims):
|
|
196
|
+
fiducial = 'fiducial_%s' % dim
|
|
197
|
+
pipeline.addColumn(fiducial, fuducial_mean[i,:]-foffset(tCache,fuducial_mean[i,:]))
|
|
198
|
+
logger.debug('setting attribute %s' % fiducial)
|
|
199
|
+
pipeline.Rebuild()
|
|
200
|
+
pipeline.selectDataSource(curds)
|
|
201
|
+
|
|
202
|
+
def OnApplyFiducial(self, event=None):
|
|
203
|
+
pipeline = self.visFr.pipeline
|
|
204
|
+
for dim in ('x','y','z'):
|
|
205
|
+
fiducial = 'fiducial_%s' % dim
|
|
206
|
+
if fiducial in pipeline.keys():
|
|
207
|
+
pipeline.mapping.setMapping(dim,'%s - %s' % (dim, fiducial))
|
|
208
|
+
pipeline.Rebuild()
|
|
209
|
+
|
|
210
|
+
def OnRevertFiducial(self, event=None):
|
|
211
|
+
pipeline = self.visFr.pipeline
|
|
212
|
+
changed = False
|
|
213
|
+
for dim in ('x','y','z'):
|
|
214
|
+
if dim in pipeline.mapping.mappings:
|
|
215
|
+
pipeline.mapping.mappings.pop(dim)
|
|
216
|
+
changed = True
|
|
217
|
+
if changed:
|
|
218
|
+
pipeline.Rebuild()
|
|
219
|
+
|
|
220
|
+
def Calculate(self, func, arg):
|
|
221
|
+
"""Returns tuple of 3 objects.
|
|
222
|
+
realTime: arange from 0 to max time
|
|
223
|
+
interp: interpolated x, y, z
|
|
224
|
+
filtered: 'interp' filtered using the given filter function
|
|
225
|
+
"""
|
|
226
|
+
pipeline = self.visFr.pipeline
|
|
227
|
+
|
|
228
|
+
realTime = np.arange(0, pipeline['t'].max())
|
|
229
|
+
|
|
230
|
+
interp = OrderedDict()
|
|
231
|
+
filtered = OrderedDict()
|
|
232
|
+
for dim in ['x', 'y', 'z']:
|
|
233
|
+
# print(dim)
|
|
234
|
+
if dim in pipeline.keys():
|
|
235
|
+
interp[dim] = np.interp(realTime, pipeline['t'], pipeline[dim])
|
|
236
|
+
filtered[dim] = func(interp[dim], arg)
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
return (realTime, interp, filtered)
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
class ExtractTrajectoriesDialog(wx.Dialog):
|
|
243
|
+
def __init__(self, *args, **kwargs):
|
|
244
|
+
wx.Dialog.__init__(self, *args, **kwargs)
|
|
245
|
+
|
|
246
|
+
vsizer = wx.BoxSizer(wx.VERTICAL)
|
|
247
|
+
|
|
248
|
+
hsizer = wx.BoxSizer(wx.HORIZONTAL)
|
|
249
|
+
|
|
250
|
+
hsizer.Add(wx.StaticText(self, -1, 'Clump Radius: '), 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
|
|
251
|
+
|
|
252
|
+
self.tClumpRadMult = wx.TextCtrl(self, -1, '5.0', size=[30,-1])
|
|
253
|
+
hsizer.Add(self.tClumpRadMult, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
|
|
254
|
+
|
|
255
|
+
hsizer.Add(wx.StaticText(self, -1, 'X'), 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
|
|
256
|
+
|
|
257
|
+
self.cClumpRadVar = wx.Choice(self, -1, choices=['1.0', 'error_x'])
|
|
258
|
+
self.cClumpRadVar.SetSelection(1)
|
|
259
|
+
hsizer.Add(self.cClumpRadVar,1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
|
|
260
|
+
|
|
261
|
+
vsizer.Add(hsizer, 0, wx.ALL, 5)
|
|
262
|
+
|
|
263
|
+
hsizer = wx.BoxSizer(wx.HORIZONTAL)
|
|
264
|
+
hsizer.Add(wx.StaticText(self, -1, 'Time Window: '), 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
|
|
265
|
+
|
|
266
|
+
self.tClumpTime = wx.TextCtrl(self, -1, '25')
|
|
267
|
+
hsizer.Add(self.tClumpTime,1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
|
|
268
|
+
|
|
269
|
+
vsizer.Add(hsizer, 0, wx.ALL, 5)
|
|
270
|
+
|
|
271
|
+
hsizer = wx.BoxSizer(wx.HORIZONTAL)
|
|
272
|
+
hsizer.Add(wx.StaticText(self, wx.ID_ANY, 'filter:'), 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
|
|
273
|
+
self.filter = wx.Choice(self, wx.ID_ANY, choices=['Gaussian', 'Uniform', 'Median'])
|
|
274
|
+
self.filter.SetSelection(0)
|
|
275
|
+
#self.filter.Bind(wx.EVT_CHOICE, self.OnFilterSelected)
|
|
276
|
+
hsizer.Add(self.filter, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
vsizer.Add(hsizer)
|
|
280
|
+
|
|
281
|
+
hsizer = wx.BoxSizer(wx.HORIZONTAL)
|
|
282
|
+
self.argText = wx.StaticText(self, wx.ID_ANY, 'Filter scale:')
|
|
283
|
+
hsizer.Add(self.argText, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
|
|
284
|
+
self.filtScale = wx.TextCtrl(self, wx.ID_ANY, '10')
|
|
285
|
+
hsizer.Add(self.filtScale, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
|
|
286
|
+
vsizer.Add(hsizer)
|
|
287
|
+
|
|
288
|
+
btSizer = wx.StdDialogButtonSizer()
|
|
289
|
+
|
|
290
|
+
btn = wx.Button(self, wx.ID_OK)
|
|
291
|
+
btn.SetDefault()
|
|
292
|
+
|
|
293
|
+
btSizer.AddButton(btn)
|
|
294
|
+
|
|
295
|
+
btn = wx.Button(self, wx.ID_CANCEL)
|
|
296
|
+
|
|
297
|
+
btSizer.AddButton(btn)
|
|
298
|
+
|
|
299
|
+
btSizer.Realize()
|
|
300
|
+
|
|
301
|
+
vsizer.Add(btSizer, 0, wx.ALL, 5)
|
|
302
|
+
|
|
303
|
+
self.SetSizerAndFit(vsizer)
|
|
304
|
+
|
|
305
|
+
def GetClumpRadiusMultiplier(self):
|
|
306
|
+
return float(self.tClumpRadMult.GetValue())
|
|
307
|
+
|
|
308
|
+
def GetClumpRadiusVariable(self):
|
|
309
|
+
return self.cClumpRadVar.GetStringSelection()
|
|
310
|
+
|
|
311
|
+
def GetClumpTimeWindow(self):
|
|
312
|
+
return int(self.tClumpTime.GetValue())
|
|
313
|
+
|
|
314
|
+
def GetFilterMethod(self):
|
|
315
|
+
return self.filter.GetStringSelection()
|
|
316
|
+
|
|
317
|
+
def GetFilterScale(self):
|
|
318
|
+
return float(self.filtScale.GetValue())
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
def Plug(visFr):
|
|
322
|
+
'''Plugs this module into the gui'''
|
|
323
|
+
visFr.experimentalFiducialAnalyzer = FiducialAnalyser(visFr)
|