PYME-extra 1.0.4.post0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- PYMEcs/Acquire/Actions/__init__.py +0 -0
- PYMEcs/Acquire/Actions/custom.py +167 -0
- PYMEcs/Acquire/Hardware/LPthreadedSimple.py +248 -0
- PYMEcs/Acquire/Hardware/LPthreadedSimpleSim.py +246 -0
- PYMEcs/Acquire/Hardware/NikonTiFlaskServer.py +45 -0
- PYMEcs/Acquire/Hardware/NikonTiFlaskServerT.py +59 -0
- PYMEcs/Acquire/Hardware/NikonTiRESTClient.py +73 -0
- PYMEcs/Acquire/Hardware/NikonTiSim.py +35 -0
- PYMEcs/Acquire/Hardware/__init__.py +0 -0
- PYMEcs/Acquire/Hardware/driftTrackGUI.py +329 -0
- PYMEcs/Acquire/Hardware/driftTrackGUI_n.py +472 -0
- PYMEcs/Acquire/Hardware/driftTracking.py +424 -0
- PYMEcs/Acquire/Hardware/driftTracking_n.py +433 -0
- PYMEcs/Acquire/Hardware/fakeCamX.py +15 -0
- PYMEcs/Acquire/Hardware/offsetPiezoRESTCorrelLog.py +38 -0
- PYMEcs/Acquire/__init__.py +0 -0
- PYMEcs/Analysis/MBMcollection.py +552 -0
- PYMEcs/Analysis/MINFLUX.py +280 -0
- PYMEcs/Analysis/MapUtils.py +77 -0
- PYMEcs/Analysis/NPC.py +1176 -0
- PYMEcs/Analysis/Paraflux.py +218 -0
- PYMEcs/Analysis/Simpler.py +81 -0
- PYMEcs/Analysis/Sofi.py +140 -0
- PYMEcs/Analysis/__init__.py +0 -0
- PYMEcs/Analysis/decSofi.py +211 -0
- PYMEcs/Analysis/eventProperties.py +50 -0
- PYMEcs/Analysis/fitDarkTimes.py +569 -0
- PYMEcs/Analysis/objectVolumes.py +20 -0
- PYMEcs/Analysis/offlineTracker.py +130 -0
- PYMEcs/Analysis/stackTracker.py +180 -0
- PYMEcs/Analysis/timeSeries.py +63 -0
- PYMEcs/Analysis/trackFiducials.py +186 -0
- PYMEcs/Analysis/zerocross.py +91 -0
- PYMEcs/IO/MINFLUX.py +851 -0
- PYMEcs/IO/NPC.py +117 -0
- PYMEcs/IO/__init__.py +0 -0
- PYMEcs/IO/darkTimes.py +19 -0
- PYMEcs/IO/picasso.py +219 -0
- PYMEcs/IO/tabular.py +11 -0
- PYMEcs/__init__.py +0 -0
- PYMEcs/experimental/CalcZfactor.py +51 -0
- PYMEcs/experimental/FRC.py +338 -0
- PYMEcs/experimental/ImageJROItools.py +49 -0
- PYMEcs/experimental/MINFLUX.py +1537 -0
- PYMEcs/experimental/NPCcalcLM.py +560 -0
- PYMEcs/experimental/Simpler.py +369 -0
- PYMEcs/experimental/Sofi.py +78 -0
- PYMEcs/experimental/__init__.py +0 -0
- PYMEcs/experimental/binEventProperty.py +187 -0
- PYMEcs/experimental/chaining.py +23 -0
- PYMEcs/experimental/clusterTrack.py +179 -0
- PYMEcs/experimental/combine_maps.py +104 -0
- PYMEcs/experimental/eventProcessing.py +93 -0
- PYMEcs/experimental/fiducials.py +323 -0
- PYMEcs/experimental/fiducialsNew.py +402 -0
- PYMEcs/experimental/mapTools.py +271 -0
- PYMEcs/experimental/meas2DplotDh5view.py +107 -0
- PYMEcs/experimental/mortensen.py +131 -0
- PYMEcs/experimental/ncsDenoise.py +158 -0
- PYMEcs/experimental/onTimes.py +295 -0
- PYMEcs/experimental/procPoints.py +77 -0
- PYMEcs/experimental/pyme2caml.py +73 -0
- PYMEcs/experimental/qPAINT.py +965 -0
- PYMEcs/experimental/randMap.py +188 -0
- PYMEcs/experimental/regExtraCmaps.py +11 -0
- PYMEcs/experimental/selectROIfilterTable.py +72 -0
- PYMEcs/experimental/showErrs.py +51 -0
- PYMEcs/experimental/showErrsDh5view.py +58 -0
- PYMEcs/experimental/showShiftMap.py +56 -0
- PYMEcs/experimental/snrEvents.py +188 -0
- PYMEcs/experimental/specLabeling.py +51 -0
- PYMEcs/experimental/splitRender.py +246 -0
- PYMEcs/experimental/testChannelByName.py +36 -0
- PYMEcs/experimental/timedSpecies.py +28 -0
- PYMEcs/experimental/utils.py +31 -0
- PYMEcs/misc/ExtraCmaps.py +177 -0
- PYMEcs/misc/__init__.py +0 -0
- PYMEcs/misc/configUtils.py +169 -0
- PYMEcs/misc/guiMsgBoxes.py +27 -0
- PYMEcs/misc/mapUtils.py +230 -0
- PYMEcs/misc/matplotlib.py +136 -0
- PYMEcs/misc/rectsFromSVG.py +182 -0
- PYMEcs/misc/shellutils.py +1110 -0
- PYMEcs/misc/utils.py +205 -0
- PYMEcs/misc/versionCheck.py +20 -0
- PYMEcs/misc/zcInfo.py +90 -0
- PYMEcs/pyme_warnings.py +4 -0
- PYMEcs/recipes/__init__.py +0 -0
- PYMEcs/recipes/base.py +75 -0
- PYMEcs/recipes/localisations.py +2380 -0
- PYMEcs/recipes/manipulate_yaml.py +83 -0
- PYMEcs/recipes/output.py +177 -0
- PYMEcs/recipes/processing.py +247 -0
- PYMEcs/recipes/simpler.py +290 -0
- PYMEcs/version.py +2 -0
- pyme_extra-1.0.4.post0.dist-info/METADATA +114 -0
- pyme_extra-1.0.4.post0.dist-info/RECORD +101 -0
- pyme_extra-1.0.4.post0.dist-info/WHEEL +5 -0
- pyme_extra-1.0.4.post0.dist-info/entry_points.txt +3 -0
- pyme_extra-1.0.4.post0.dist-info/licenses/LICENSE +674 -0
- pyme_extra-1.0.4.post0.dist-info/top_level.txt +1 -0
PYMEcs/misc/utils.py
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
logger = logging.getLogger(__file__)
|
|
3
|
+
|
|
4
|
+
# filtering for a few sources of messages that we can generally blend out in notenooks
|
|
5
|
+
|
|
6
|
+
def pyme_logging_filter(loglevel=logging.WARN,
|
|
7
|
+
filterTextFileSource=True,
|
|
8
|
+
filterDictMDHandlerWarning=True,
|
|
9
|
+
filterTrackUtilsWarning=True):
|
|
10
|
+
if filterTextFileSource:
|
|
11
|
+
from PYME.IO.tabular import logger as tabular_logger
|
|
12
|
+
# this will filter our logging message as long as it uses logging
|
|
13
|
+
def textFileSource_filter(record):
|
|
14
|
+
if 'TextFileSource-use_pandas' in record.msg:
|
|
15
|
+
return False
|
|
16
|
+
return True
|
|
17
|
+
tabular_logger.addFilter(textFileSource_filter)
|
|
18
|
+
|
|
19
|
+
if filterDictMDHandlerWarning:
|
|
20
|
+
import warnings
|
|
21
|
+
# supress warnings from the DictMDHandler about inability to handle localisations
|
|
22
|
+
warnings.filterwarnings("ignore",message=r'DictMDHandler')
|
|
23
|
+
if filterTrackUtilsWarning:
|
|
24
|
+
import warnings
|
|
25
|
+
# supress warnings from trackutils about lacking mpld3 (which we do not really need)
|
|
26
|
+
warnings.filterwarnings("ignore",message=r'Could not import mpld3')
|
|
27
|
+
|
|
28
|
+
logging.basicConfig()
|
|
29
|
+
logging.getLogger().setLevel(loglevel)
|
|
30
|
+
|
|
31
|
+
# get unique name for recipe output
|
|
32
|
+
def unique_name(stem,names):
|
|
33
|
+
if stem not in names:
|
|
34
|
+
return stem
|
|
35
|
+
for i in range(1,11):
|
|
36
|
+
stem2 = "%s_%d" % (stem,i)
|
|
37
|
+
if stem2 not in names:
|
|
38
|
+
return stem2
|
|
39
|
+
|
|
40
|
+
return stem2 # here we just give up and accept a duplicate name
|
|
41
|
+
|
|
42
|
+
import pandas as pd
|
|
43
|
+
|
|
44
|
+
# makes the reading a little more flexible
|
|
45
|
+
# contributed by Alex B
|
|
46
|
+
def read_temperature_csv(filename, timeformat=['%d.%m.%Y %H:%M:%S', # Newest format
|
|
47
|
+
'%d/%m/%Y %H:%M:%S' # Original format
|
|
48
|
+
]):
|
|
49
|
+
import re
|
|
50
|
+
def remap_names(name):
|
|
51
|
+
if re.search(r'\bRack\b', name, re.IGNORECASE):
|
|
52
|
+
return 'Rack'
|
|
53
|
+
elif re.search(r'\bBox\b', name, re.IGNORECASE):
|
|
54
|
+
return 'Box'
|
|
55
|
+
elif re.search(r'\bStativ\b', name, re.IGNORECASE):
|
|
56
|
+
return 'Stand'
|
|
57
|
+
elif re.search(r'\bTime\b', name, re.IGNORECASE):
|
|
58
|
+
return 'Time'
|
|
59
|
+
else:
|
|
60
|
+
return name
|
|
61
|
+
|
|
62
|
+
trec = pd.read_csv(filename, encoding="ISO-8859-1")
|
|
63
|
+
trec.columns = [remap_names(col) for col in trec.columns]
|
|
64
|
+
|
|
65
|
+
# Ensure timeformat is a list (even if only one format is provided)
|
|
66
|
+
if isinstance(timeformat, str):
|
|
67
|
+
timeformat = [timeformat]
|
|
68
|
+
|
|
69
|
+
# Try all provided time formats
|
|
70
|
+
for fmt in timeformat:
|
|
71
|
+
try:
|
|
72
|
+
trec['datetime'] = pd.to_datetime(trec['Time'], format=fmt)
|
|
73
|
+
break
|
|
74
|
+
except ValueError:
|
|
75
|
+
continue
|
|
76
|
+
else: # we get here if the for cloop terminates without breaking implying no format matched
|
|
77
|
+
raise ValueError("None of the provided time formats matched the 'Time' column.")
|
|
78
|
+
|
|
79
|
+
return trec
|
|
80
|
+
|
|
81
|
+
def set_diff(trec,t0):
|
|
82
|
+
trec['tdiff'] = trec['datetime'] - t0
|
|
83
|
+
trec['tdiff_s'] = trec['tdiff'].dt.total_seconds().astype('f')
|
|
84
|
+
|
|
85
|
+
from PYMEcs.pyme_warnings import warn
|
|
86
|
+
def get_timestamp_from_filename(fname):
|
|
87
|
+
from pathlib import Path
|
|
88
|
+
import re
|
|
89
|
+
|
|
90
|
+
basename = Path(fname).name
|
|
91
|
+
match = re.search(r'2[3-5]\d{4}-\d{6}',basename)
|
|
92
|
+
if match:
|
|
93
|
+
timestamp = match.group()
|
|
94
|
+
return timestamp
|
|
95
|
+
else:
|
|
96
|
+
warn("no timestamp match found in %s" % basename)
|
|
97
|
+
return None
|
|
98
|
+
|
|
99
|
+
def get_timestamp_from_mdh_acqdate(mdh):
|
|
100
|
+
from datetime import datetime
|
|
101
|
+
acqdate = mdh.get('MINFLUX.AcquisitionDate')
|
|
102
|
+
if acqdate is not None:
|
|
103
|
+
ti = datetime.strptime(acqdate,'%Y-%m-%dT%H:%M:%S%z')
|
|
104
|
+
return ti.strftime('%y%m%d-%H%M%S')
|
|
105
|
+
else:
|
|
106
|
+
return None
|
|
107
|
+
|
|
108
|
+
def compare_timestamps_s(ts1,ts2):
|
|
109
|
+
t1 = timestamp_to_datetime(ts1)
|
|
110
|
+
t2 = timestamp_to_datetime(ts2)
|
|
111
|
+
if t1 > t2:
|
|
112
|
+
delta = t1-t2
|
|
113
|
+
else:
|
|
114
|
+
delta = t2-t1
|
|
115
|
+
return delta.seconds
|
|
116
|
+
|
|
117
|
+
def timestamp_to_datetime(ts):
|
|
118
|
+
t0 = pd.to_datetime(ts,format="%y%m%d-%H%M%S")
|
|
119
|
+
return t0
|
|
120
|
+
|
|
121
|
+
def parse_timestamp_from_filename(fname):
|
|
122
|
+
timestamp = get_timestamp_from_filename(fname)
|
|
123
|
+
if timestamp is None:
|
|
124
|
+
return None
|
|
125
|
+
t0 = timestamp_to_datetime(timestamp)
|
|
126
|
+
return t0
|
|
127
|
+
|
|
128
|
+
def recipe_from_mdh(mdh):
|
|
129
|
+
import re
|
|
130
|
+
separator = '|'.join([ # we "or"-combine the following regexs
|
|
131
|
+
'(?<=:) (?= )', # a space preceeded by a colon AND also followed by another space ; this is therefore not a "key: value" type YAML line
|
|
132
|
+
'(?<![ :-]) ' # a space NOT preceded by a colon, dash or another space
|
|
133
|
+
])
|
|
134
|
+
recipe = mdh.get('Pipeline.Recipe')
|
|
135
|
+
if recipe is not None:
|
|
136
|
+
return('\n'.join(re.split(separator,recipe)))
|
|
137
|
+
else:
|
|
138
|
+
warn("could not retrieve Pipeline.Recipe")
|
|
139
|
+
return None
|
|
140
|
+
|
|
141
|
+
def load_sessionfile(filename,substitute=True):
|
|
142
|
+
import yaml
|
|
143
|
+
from PYME.LMVis.sessionpaths import substitute_sessiondir
|
|
144
|
+
|
|
145
|
+
with open(filename, 'r') as f:
|
|
146
|
+
session_txt = f.read()
|
|
147
|
+
|
|
148
|
+
if substitute:
|
|
149
|
+
session = yaml.safe_load(substitute_sessiondir(session_txt, filename)) # replace any possibly present SESSIONDIR_TOKEN
|
|
150
|
+
else:
|
|
151
|
+
session = yaml.safe_load(session_txt)
|
|
152
|
+
|
|
153
|
+
return session
|
|
154
|
+
|
|
155
|
+
from pathlib import Path
|
|
156
|
+
def zarrtozipstore(zarr_root,archive_name,verbose=False):
|
|
157
|
+
zarr_root = Path(zarr_root)
|
|
158
|
+
archive_name = Path(archive_name)
|
|
159
|
+
|
|
160
|
+
from shutil import get_archive_formats
|
|
161
|
+
if 'zip' not in dict(get_archive_formats()):
|
|
162
|
+
raise RuntimeError('shutil.make_archive does not support zip format, aborting')
|
|
163
|
+
|
|
164
|
+
if not (zarr_root.exists() and zarr_root.is_dir()):
|
|
165
|
+
raise RuntimeError('path "%s" does not exist or is not a directory' % (zarr_root))
|
|
166
|
+
if not (zarr_root / '.zgroup').exists():
|
|
167
|
+
warn("did not find .zgroup file in directory, this may not be a zarr directory")
|
|
168
|
+
|
|
169
|
+
if verbose:
|
|
170
|
+
warn("zarr file archive at\n'%s'\n, zipping to dir\n'%s'\n with name '%s'" % (zarr_root,archive_name.parent,archive_name.name))
|
|
171
|
+
|
|
172
|
+
from shutil import make_archive
|
|
173
|
+
created = make_archive(archive_name,
|
|
174
|
+
'zip',
|
|
175
|
+
root_dir=zarr_root)
|
|
176
|
+
return created
|
|
177
|
+
|
|
178
|
+
def get_ds_path(pipeline,ds='FitResults'):
|
|
179
|
+
if 'filename' in dir(pipeline):
|
|
180
|
+
return pipeline.filename()
|
|
181
|
+
try:
|
|
182
|
+
fpath = pipeline.get_session()['datasources'][ds]
|
|
183
|
+
except AttributeError:
|
|
184
|
+
fpath = None
|
|
185
|
+
|
|
186
|
+
return fpath
|
|
187
|
+
|
|
188
|
+
def fname_from_timestamp(datapath,mdh,stemsuffix,ext='.csv'):
|
|
189
|
+
from pathlib import Path
|
|
190
|
+
storagepath = Path(datapath).parent
|
|
191
|
+
tstamp = mdh.get('MINFLUX.TimeStamp','timestamp_unknown')
|
|
192
|
+
|
|
193
|
+
fname = (storagepath / (tstamp + stemsuffix)).with_suffix(ext)
|
|
194
|
+
return fname
|
|
195
|
+
|
|
196
|
+
def autosave_csv(df,datapath,mdh,suffix):
|
|
197
|
+
from pathlib import Path
|
|
198
|
+
fname = fname_from_timestamp(datapath,mdh,suffix,ext='.csv')
|
|
199
|
+
logger.debug(f"autosaving file {fname}...")
|
|
200
|
+
df.to_csv(fname, index=False, header=True)
|
|
201
|
+
|
|
202
|
+
def autosave_check():
|
|
203
|
+
import PYME.config
|
|
204
|
+
return PYME.config.get('MINFLUX-autosave',False)
|
|
205
|
+
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import PYME.version as PYMEver
|
|
2
|
+
from packaging import version as pv
|
|
3
|
+
|
|
4
|
+
Features = {
|
|
5
|
+
'PluginClass' : {'doc':'Plugin base class that implements weak refs for plugins',
|
|
6
|
+
'minVersion' : '20.07.08'}
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def PYMEversionCheck(feature=None):
|
|
11
|
+
if feature is None:
|
|
12
|
+
return True
|
|
13
|
+
if feature in Features:
|
|
14
|
+
if pv.parse(PYMEver.version) >= pv.parse(Features[feature]['minVersion']):
|
|
15
|
+
return True
|
|
16
|
+
else:
|
|
17
|
+
raise RuntimeError("PYME upgrade required! We need the PYME '%s' feature which is available since PYME version %s, you have version %s" %
|
|
18
|
+
(feature,Features[feature]['minVersion'],PYMEver.version))
|
|
19
|
+
else:
|
|
20
|
+
raise ValueError("checking for unknown feature '%s'" % feature)
|
PYMEcs/misc/zcInfo.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
from zeroconf import *
|
|
2
|
+
import PYME.misc.pyme_zeroconf as pzc
|
|
3
|
+
import time
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
logging.basicConfig(level=logging.INFO)
|
|
7
|
+
|
|
8
|
+
# logger = logging.getLogger(__name__)
|
|
9
|
+
# logger.setLevel(logging.DEBUG)
|
|
10
|
+
|
|
11
|
+
# # create console handler and set level to debug
|
|
12
|
+
# ch = logging.StreamHandler()
|
|
13
|
+
# ch.setLevel(logging.INFO)
|
|
14
|
+
|
|
15
|
+
# # add ch to logger
|
|
16
|
+
# logger.addHandler(ch)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
TIMEOUTDEFAULT = 10
|
|
20
|
+
class ZeroconfServiceTypes(object):
|
|
21
|
+
"""
|
|
22
|
+
Return all of the advertised services on any local networks
|
|
23
|
+
"""
|
|
24
|
+
def __init__(self):
|
|
25
|
+
self.found_services = set()
|
|
26
|
+
|
|
27
|
+
def add_service(self, zc, type_, name):
|
|
28
|
+
self.found_services.add(name)
|
|
29
|
+
|
|
30
|
+
def remove_service(self, zc, type_, name):
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
@classmethod
|
|
34
|
+
def find(cls, zc=None, timeout=TIMEOUTDEFAULT):
|
|
35
|
+
"""
|
|
36
|
+
Return all of the advertised services on any local networks.
|
|
37
|
+
|
|
38
|
+
:param zc: Zeroconf() instance. Pass in if already have an
|
|
39
|
+
instance running or if non-default interfaces are needed
|
|
40
|
+
:param timeout: seconds to wait for any responses
|
|
41
|
+
:return: tuple of service type strings
|
|
42
|
+
"""
|
|
43
|
+
local_zc = zc or Zeroconf()
|
|
44
|
+
listener = cls()
|
|
45
|
+
browser = ServiceBrowser(
|
|
46
|
+
local_zc, '_services._dns-sd._udp.local.', listener=listener)
|
|
47
|
+
|
|
48
|
+
# wait for responses
|
|
49
|
+
time.sleep(timeout)
|
|
50
|
+
|
|
51
|
+
# close down anything we opened
|
|
52
|
+
if zc is None:
|
|
53
|
+
local_zc.close()
|
|
54
|
+
else:
|
|
55
|
+
browser.cancel()
|
|
56
|
+
|
|
57
|
+
return tuple(sorted(listener.found_services))
|
|
58
|
+
|
|
59
|
+
try:
|
|
60
|
+
zt = zeroconf.ZeroconfServiceTypes()
|
|
61
|
+
except:
|
|
62
|
+
zt = ZeroconfServiceTypes()
|
|
63
|
+
|
|
64
|
+
def servicesPresent(timeOut=TIMEOUTDEFAULT, showServices=False):
|
|
65
|
+
services = zt.find(timeout=timeOut)
|
|
66
|
+
if showServices:
|
|
67
|
+
print("Available Services: %s" % repr(services))
|
|
68
|
+
|
|
69
|
+
return len(services) > 0
|
|
70
|
+
|
|
71
|
+
def checkServer(timeOut=TIMEOUTDEFAULT, showServices=False):
|
|
72
|
+
if servicesPresent(timeOut=timeOut, showServices=showServices):
|
|
73
|
+
logging.info('zeroconf services detected')
|
|
74
|
+
else:
|
|
75
|
+
logging.error('no zeroconf services detected - this should not happen')
|
|
76
|
+
|
|
77
|
+
ns = pzc.getNS()
|
|
78
|
+
adserv = get_advertised_services(ns)
|
|
79
|
+
if len(adserv) > 0:
|
|
80
|
+
logging.info(repr(adserv))
|
|
81
|
+
else:
|
|
82
|
+
logging.error('no advertised pyro services - apparently there is no server running on this network')
|
|
83
|
+
|
|
84
|
+
def get_advertised_services(ns):
|
|
85
|
+
try:
|
|
86
|
+
services = ns.get_advertised_services()
|
|
87
|
+
except:
|
|
88
|
+
services = ns.advertised_services
|
|
89
|
+
|
|
90
|
+
return services
|
PYMEcs/pyme_warnings.py
ADDED
|
File without changes
|
PYMEcs/recipes/base.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
from PYME.recipes.base import register_module, ModuleBase, Filter
|
|
2
|
+
from PYME.recipes.traits import HasTraits, Float, List, Bool, Int, CStr, Enum, on_trait_change, Input, Output, FileOrURI
|
|
3
|
+
|
|
4
|
+
from PYME.IO.image import ImageStack
|
|
5
|
+
import numpy as np
|
|
6
|
+
from PYMEcs.pyme_warnings import warn
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
@register_module('ExtractChannelByName')
|
|
13
|
+
class ExtractChannelByName(ModuleBase):
|
|
14
|
+
"""Extract one channel from an image using regular expression matching to image channel names - by default this is case insensitive"""
|
|
15
|
+
inputName = Input('input')
|
|
16
|
+
outputName = Output('filtered_image')
|
|
17
|
+
|
|
18
|
+
channelNamePattern = CStr('channel0')
|
|
19
|
+
caseInsensitive = Bool(True)
|
|
20
|
+
|
|
21
|
+
def _matchChannels(self,channelNames):
|
|
22
|
+
# we put this into its own function so that we can call it externally for testing
|
|
23
|
+
import re
|
|
24
|
+
flags = 0
|
|
25
|
+
if self.caseInsensitive:
|
|
26
|
+
flags |= re.I
|
|
27
|
+
idxs = [i for i, c in enumerate(channelNames) if re.search(self.channelNamePattern,c,flags)]
|
|
28
|
+
return idxs
|
|
29
|
+
|
|
30
|
+
def _pickChannel(self, image):
|
|
31
|
+
channelNames = image.mdh['ChannelNames']
|
|
32
|
+
idxs = self._matchChannels(channelNames)
|
|
33
|
+
if len(idxs) < 1:
|
|
34
|
+
raise RuntimeError("Expression '%s' did not match any channel names" % self.channelNamePattern)
|
|
35
|
+
if len(idxs) > 1:
|
|
36
|
+
raise RuntimeError(("Expression '%s' did match more than one channel name: " % self.channelNamePattern) +
|
|
37
|
+
', '.join([channelNames[i] for i in idxs]))
|
|
38
|
+
idx = idxs[0]
|
|
39
|
+
|
|
40
|
+
chan = image.data[:,:,:,idx]
|
|
41
|
+
|
|
42
|
+
im = ImageStack(chan, titleStub = 'Filtered Image')
|
|
43
|
+
im.mdh.copyEntriesFrom(image.mdh)
|
|
44
|
+
im.mdh['ChannelNames'] = [channelNames[idx]]
|
|
45
|
+
im.mdh['Parent'] = image.filename
|
|
46
|
+
|
|
47
|
+
return im
|
|
48
|
+
|
|
49
|
+
def execute(self, namespace):
|
|
50
|
+
namespace[self.outputName] = self._pickChannel(namespace[self.inputName])
|
|
51
|
+
|
|
52
|
+
from PYME.IO.image import ImageStack
|
|
53
|
+
|
|
54
|
+
@register_module('LoadMask')
|
|
55
|
+
class LoadMask(ModuleBase):
|
|
56
|
+
|
|
57
|
+
inputName = Input('FitResults') # we use this as a dummy input since we need at least one input
|
|
58
|
+
outputImageStack = Output('image_mask')
|
|
59
|
+
|
|
60
|
+
maskfile = FileOrURI('')
|
|
61
|
+
|
|
62
|
+
def run(self,inputName):
|
|
63
|
+
if self.maskfile != '':
|
|
64
|
+
fp = Path(self.maskfile)
|
|
65
|
+
if not fp.exists():
|
|
66
|
+
warn("trying to load file '%s' that does not exist" % self.maskfile)
|
|
67
|
+
return None
|
|
68
|
+
mask = ImageStack(filename=self.maskfile)
|
|
69
|
+
return mask
|
|
70
|
+
else:
|
|
71
|
+
return None # this will trigger an error as it tries to attach the mdh; may be better to have an empty container type
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
|