dcnum 0.11.0__py3-none-any.whl → 0.11.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dcnum might be problematic. Click here for more details.
- dcnum/_version.py +2 -2
- dcnum/feat/event_extractor_manager_thread.py +7 -1
- dcnum/feat/feat_moments/mt_legacy.py +16 -13
- dcnum/feat/feat_texture/tex_all.py +7 -4
- dcnum/meta/ppid.py +1 -1
- dcnum/read/hdf5_data.py +22 -13
- dcnum/segm/segmenter.py +5 -3
- dcnum/segm/segmenter_cpu.py +7 -1
- dcnum/segm/segmenter_gpu.py +16 -3
- dcnum/segm/segmenter_manager_thread.py +8 -0
- {dcnum-0.11.0.dist-info → dcnum-0.11.2.dist-info}/METADATA +1 -1
- {dcnum-0.11.0.dist-info → dcnum-0.11.2.dist-info}/RECORD +22 -21
- tests/conftest.py +3 -0
- tests/test_feat_brightness.py +23 -0
- tests/test_feat_haralick.py +87 -0
- tests/test_feat_moments_based.py +56 -0
- tests/test_read_hdf5.py +37 -10
- tests/test_segm_thresh.py +0 -138
- tests/test_segmenter.py +225 -0
- {dcnum-0.11.0.dist-info → dcnum-0.11.2.dist-info}/LICENSE +0 -0
- {dcnum-0.11.0.dist-info → dcnum-0.11.2.dist-info}/WHEEL +0 -0
- {dcnum-0.11.0.dist-info → dcnum-0.11.2.dist-info}/top_level.txt +0 -0
dcnum/_version.py
CHANGED
|
@@ -65,7 +65,8 @@ class EventExtractorManagerThread(threading.Thread):
|
|
|
65
65
|
self.label_array = np.ctypeslib.as_array(
|
|
66
66
|
self.fe_kwargs["label_array"]).reshape(
|
|
67
67
|
self.data.image.chunk_shape)
|
|
68
|
-
|
|
68
|
+
#: Time counter for feature extraction
|
|
69
|
+
self.t_count = 0
|
|
69
70
|
#: Whether debugging is enabled
|
|
70
71
|
self.debug = debug
|
|
71
72
|
|
|
@@ -100,6 +101,8 @@ class EventExtractorManagerThread(threading.Thread):
|
|
|
100
101
|
unavailable_slots = 0
|
|
101
102
|
time.sleep(.1)
|
|
102
103
|
|
|
104
|
+
t1 = time.monotonic()
|
|
105
|
+
|
|
103
106
|
# We have a chunk, process it!
|
|
104
107
|
chunk = self.slot_chunks[cur_slot]
|
|
105
108
|
# Populate the labeling array for the workers
|
|
@@ -123,8 +126,10 @@ class EventExtractorManagerThread(threading.Thread):
|
|
|
123
126
|
self.slot_states[cur_slot] = "w"
|
|
124
127
|
|
|
125
128
|
self.logger.debug(f"Extracted one chunk: {chunk}")
|
|
129
|
+
self.t_count += time.monotonic() - t1
|
|
126
130
|
|
|
127
131
|
chunks_processed += 1
|
|
132
|
+
|
|
128
133
|
if chunks_processed == self.data.image.num_chunks:
|
|
129
134
|
break
|
|
130
135
|
|
|
@@ -132,3 +137,4 @@ class EventExtractorManagerThread(threading.Thread):
|
|
|
132
137
|
self.fe_kwargs["finalize_extraction"].value = True
|
|
133
138
|
[w.join() for w in workers]
|
|
134
139
|
self.logger.debug("Finished extraction.")
|
|
140
|
+
self.logger.info(f"Extraction time: {self.t_count:.1f}s")
|
|
@@ -10,22 +10,25 @@ def moments_based_features(mask, pixel_size):
|
|
|
10
10
|
|
|
11
11
|
size = mask.shape[0]
|
|
12
12
|
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
13
|
+
empty = np.full(size, np.nan, dtype=np.float64)
|
|
14
|
+
deform = np.copy(empty)
|
|
15
|
+
size_x = np.copy(empty)
|
|
16
|
+
size_y = np.copy(empty)
|
|
17
|
+
pos_x = np.copy(empty)
|
|
18
|
+
pos_y = np.copy(empty)
|
|
19
|
+
area_msd = np.copy(empty)
|
|
20
|
+
area_ratio = np.copy(empty)
|
|
21
|
+
area_um = np.copy(empty)
|
|
22
|
+
aspect = np.copy(empty)
|
|
23
|
+
tilt = np.copy(empty)
|
|
24
|
+
inert_ratio_cvx = np.copy(empty)
|
|
25
|
+
inert_ratio_raw = np.copy(empty)
|
|
26
|
+
inert_ratio_prnc = np.copy(empty)
|
|
26
27
|
|
|
27
28
|
for ii in range(size):
|
|
28
29
|
cont_raw = contour_single_opencv(mask[ii])
|
|
30
|
+
if len(cont_raw.shape) < 2:
|
|
31
|
+
continue
|
|
29
32
|
mu_raw = cv2.moments(cont_raw)
|
|
30
33
|
|
|
31
34
|
# convex hull
|
|
@@ -4,16 +4,16 @@ import numpy as np
|
|
|
4
4
|
from .common import haralick_names
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
def haralick_texture_features(
|
|
7
|
+
def haralick_texture_features(
|
|
8
|
+
mask, image=None, image_bg=None, image_corr=None):
|
|
8
9
|
# make sure we have a boolean array
|
|
9
10
|
mask = np.array(mask, dtype=bool)
|
|
10
11
|
size = mask.shape[0]
|
|
11
12
|
|
|
12
13
|
# compute features if necessary
|
|
13
|
-
if image_bg is not None
|
|
14
|
+
if image_bg is not None and image is not None and image_corr is None:
|
|
14
15
|
# Background-corrected brightness values
|
|
15
|
-
|
|
16
|
-
image_corr = np.array(image, dtype=np.int16) - image_bg
|
|
16
|
+
image_corr = np.array(image, dtype=np.int16) - image_bg
|
|
17
17
|
|
|
18
18
|
tex_dict = {}
|
|
19
19
|
empty = np.full(size, np.nan, dtype=np.float64)
|
|
@@ -29,6 +29,9 @@ def haralick_texture_features(image, mask, image_bg=None, image_corr=None):
|
|
|
29
29
|
# -> maximum value should be as small as possible
|
|
30
30
|
# - set pixels outside contour to zero (ignored areas, see mahotas)
|
|
31
31
|
maski = mask[ii]
|
|
32
|
+
if not np.any(maski):
|
|
33
|
+
# The mask is empty (nan values)
|
|
34
|
+
continue
|
|
32
35
|
if image_corr.shape[0] == 1:
|
|
33
36
|
# We have several masks for one image.
|
|
34
37
|
imcoi = image_corr[0]
|
dcnum/meta/ppid.py
CHANGED
dcnum/read/hdf5_data.py
CHANGED
|
@@ -31,9 +31,6 @@ class HDF5Data:
|
|
|
31
31
|
if isinstance(path, h5py.File):
|
|
32
32
|
self.h5 = path
|
|
33
33
|
path = path.filename
|
|
34
|
-
else:
|
|
35
|
-
self.h5 = None # is set in __setstate__
|
|
36
|
-
self._cache_scalar = {}
|
|
37
34
|
self.__setstate__({"path": path,
|
|
38
35
|
"pixel_size": pixel_size,
|
|
39
36
|
"md5_5m": md5_5m,
|
|
@@ -69,7 +66,25 @@ class HDF5Data:
|
|
|
69
66
|
warnings.warn(f"Feature {feat} not cached (possibly slow)")
|
|
70
67
|
return self.h5["events"][feat]
|
|
71
68
|
|
|
69
|
+
def __getstate__(self):
|
|
70
|
+
return {"path": self.path,
|
|
71
|
+
"pixel_size": self.pixel_size,
|
|
72
|
+
"md5_5m": self.md5_5m,
|
|
73
|
+
"meta": self.meta,
|
|
74
|
+
"logs": self.logs,
|
|
75
|
+
"tables": self.tables,
|
|
76
|
+
"image_cache_size": self.image.cache_size
|
|
77
|
+
}
|
|
78
|
+
|
|
72
79
|
def __setstate__(self, state):
|
|
80
|
+
# Make sure these properties exist (we rely on __init__, because
|
|
81
|
+
# we want this class to be pickable and __init__ is not called by
|
|
82
|
+
# `pickle.load`.
|
|
83
|
+
if not hasattr(self, "_cache_scalar"):
|
|
84
|
+
self._cache_scalar = {}
|
|
85
|
+
if not hasattr(self, "h5"):
|
|
86
|
+
self.h5 = None
|
|
87
|
+
|
|
73
88
|
self.path = state["path"]
|
|
74
89
|
|
|
75
90
|
self.md5_5m = state["md5_5m"]
|
|
@@ -100,7 +115,10 @@ class HDF5Data:
|
|
|
100
115
|
alog = [ll.decode("utf") for ll in alog]
|
|
101
116
|
self.logs[key] = alog
|
|
102
117
|
for tab in h5.get("tables", []):
|
|
103
|
-
|
|
118
|
+
tabdict = {}
|
|
119
|
+
for tkey in h5["tables"][tab].dtype.fields.keys():
|
|
120
|
+
tabdict[tkey] = h5["tables"][tab][tkey]
|
|
121
|
+
self.tables[tab] = tabdict
|
|
104
122
|
|
|
105
123
|
if state["pixel_size"] is not None:
|
|
106
124
|
self.pixel_size = state["pixel_size"]
|
|
@@ -137,15 +155,6 @@ class HDF5Data:
|
|
|
137
155
|
|
|
138
156
|
self.image_corr = ImageCorrCache(self.image, self.image_bg)
|
|
139
157
|
|
|
140
|
-
def __getstate__(self):
|
|
141
|
-
return {"path": self.path,
|
|
142
|
-
"pixel_size": self.pixel_size,
|
|
143
|
-
"md5_5m": self.md5_5m,
|
|
144
|
-
"meta": self.meta,
|
|
145
|
-
"logs": self.logs,
|
|
146
|
-
"tables": self.tables,
|
|
147
|
-
}
|
|
148
|
-
|
|
149
158
|
@functools.cache
|
|
150
159
|
def __len__(self):
|
|
151
160
|
return self.h5.attrs["experiment:event count"]
|
dcnum/segm/segmenter.py
CHANGED
|
@@ -173,11 +173,13 @@ class Segmenter(abc.ABC):
|
|
|
173
173
|
labels_uint8 = np.array(labels, dtype=np.uint8)
|
|
174
174
|
labels_dilated = cv2.dilate(labels_uint8, element)
|
|
175
175
|
labels_eroded = cv2.erode(labels_dilated, element)
|
|
176
|
-
labels, _ = ndi.label(
|
|
176
|
+
labels, _ = ndi.label(
|
|
177
|
+
input=labels_eroded > 0,
|
|
178
|
+
structure=ndi.generate_binary_structure(2, 2))
|
|
177
179
|
|
|
178
180
|
if fill_holes:
|
|
179
181
|
# Floodfill only works with uint8 (too small) or int32
|
|
180
|
-
if
|
|
182
|
+
if labels.dtype != np.int32:
|
|
181
183
|
labels = np.array(labels, dtype=np.int32)
|
|
182
184
|
#
|
|
183
185
|
# from scipy import ndimage
|
|
@@ -206,7 +208,7 @@ class Segmenter(abc.ABC):
|
|
|
206
208
|
mol = segm_wrap(image)
|
|
207
209
|
if mol.dtype == bool:
|
|
208
210
|
# convert mask to label
|
|
209
|
-
labels,
|
|
211
|
+
labels, _ = ndi.label(
|
|
210
212
|
input=mol,
|
|
211
213
|
structure=ndi.generate_binary_structure(2, 2))
|
|
212
214
|
else:
|
dcnum/segm/segmenter_cpu.py
CHANGED
|
@@ -31,6 +31,12 @@ class CPUSegmenter(Segmenter, abc.ABC):
|
|
|
31
31
|
# Tells the workers to stop
|
|
32
32
|
self.mp_shutdown = mp.Value("i", 0)
|
|
33
33
|
|
|
34
|
+
def __enter__(self):
|
|
35
|
+
return self
|
|
36
|
+
|
|
37
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
38
|
+
self.join_workers()
|
|
39
|
+
|
|
34
40
|
def __getstate__(self):
|
|
35
41
|
# Copy the object's state from self.__dict__ which contains
|
|
36
42
|
# all our instance attributes. Always use the dict.copy()
|
|
@@ -47,7 +53,7 @@ class CPUSegmenter(Segmenter, abc.ABC):
|
|
|
47
53
|
return state
|
|
48
54
|
|
|
49
55
|
def __setstate__(self, state):
|
|
50
|
-
# Restore instance attributes
|
|
56
|
+
# Restore instance attributes
|
|
51
57
|
self.__dict__.update(state)
|
|
52
58
|
|
|
53
59
|
@staticmethod
|
dcnum/segm/segmenter_gpu.py
CHANGED
|
@@ -2,6 +2,7 @@ import abc
|
|
|
2
2
|
import pathlib
|
|
3
3
|
|
|
4
4
|
import numpy as np
|
|
5
|
+
import scipy.ndimage as ndi
|
|
5
6
|
|
|
6
7
|
|
|
7
8
|
from .segmenter import Segmenter
|
|
@@ -10,7 +11,7 @@ from .segmenter import Segmenter
|
|
|
10
11
|
class GPUSegmenter(Segmenter, abc.ABC):
|
|
11
12
|
mask_postprocessing = False
|
|
12
13
|
|
|
13
|
-
def __init__(self, model_file, *args, **kwargs):
|
|
14
|
+
def __init__(self, model_file=None, *args, **kwargs):
|
|
14
15
|
super(GPUSegmenter, self).__init__(*args, **kwargs)
|
|
15
16
|
self.model_path = self._get_model_path(model_file)
|
|
16
17
|
|
|
@@ -28,6 +29,18 @@ class GPUSegmenter(Segmenter, abc.ABC):
|
|
|
28
29
|
stop = len(image_data)
|
|
29
30
|
|
|
30
31
|
image_slice = image_data[start:stop]
|
|
31
|
-
segm = self.segment_frame_wrapper(
|
|
32
|
+
segm = self.segment_frame_wrapper()
|
|
32
33
|
|
|
33
|
-
|
|
34
|
+
labels = segm(image_slice)
|
|
35
|
+
|
|
36
|
+
# Make sure we have integer labels
|
|
37
|
+
if labels.dtype == bool:
|
|
38
|
+
new_labels = np.zeros_like(labels, dtype=np.uint16)
|
|
39
|
+
for ii in range(len(labels)):
|
|
40
|
+
ndi.label(
|
|
41
|
+
input=labels[ii],
|
|
42
|
+
output=new_labels[ii],
|
|
43
|
+
structure=ndi.generate_binary_structure(2, 2))
|
|
44
|
+
labels = new_labels
|
|
45
|
+
|
|
46
|
+
return labels
|
|
@@ -71,6 +71,8 @@ class SegmenterManagerThread(threading.Thread):
|
|
|
71
71
|
self.slot_chunks = slot_chunks
|
|
72
72
|
#: List containing the segmented labels of each slot
|
|
73
73
|
self.labels_list = [None] * len(self.slot_states)
|
|
74
|
+
#: Time counter for segmentation
|
|
75
|
+
self.t_count = 0
|
|
74
76
|
#: Whether running in debugging mode
|
|
75
77
|
self.debug = debug
|
|
76
78
|
|
|
@@ -96,6 +98,8 @@ class SegmenterManagerThread(threading.Thread):
|
|
|
96
98
|
empty_slots = 0
|
|
97
99
|
time.sleep(.01)
|
|
98
100
|
|
|
101
|
+
t1 = time.monotonic()
|
|
102
|
+
|
|
99
103
|
# We have a free slot to compute the segmentation
|
|
100
104
|
labels = self.segmenter.segment_chunk(
|
|
101
105
|
image_data=self.image_data,
|
|
@@ -111,7 +115,11 @@ class SegmenterManagerThread(threading.Thread):
|
|
|
111
115
|
self.slot_states[cur_slot] = "e"
|
|
112
116
|
self.logger.debug(f"Segmented one chunk: {chunk}")
|
|
113
117
|
|
|
118
|
+
self.t_count += time.monotonic() - t1
|
|
119
|
+
|
|
114
120
|
# Cleanup
|
|
115
121
|
if isinstance(self.segmenter, CPUSegmenter):
|
|
116
122
|
# Join the segmentation workers.
|
|
117
123
|
self.segmenter.join_workers()
|
|
124
|
+
|
|
125
|
+
self.logger.info(f"Segmentation time: {self.t_count:.1f}s")
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
dcnum/__init__.py,sha256=hcawIKS7utYiOyVhOAX9t7K3xYzP1b9862VV0b6qSrQ,74
|
|
2
|
-
dcnum/_version.py,sha256=
|
|
2
|
+
dcnum/_version.py,sha256=jNTsW7w_cV00kI4XQuVNdxy3zty3vBGoedJmCKxUwlo,162
|
|
3
3
|
dcnum/feat/__init__.py,sha256=0oX765AyfL1BcVt-FI6R_i6x5LgYYLeyO5lkxSauI0Y,299
|
|
4
|
-
dcnum/feat/event_extractor_manager_thread.py,sha256=
|
|
4
|
+
dcnum/feat/event_extractor_manager_thread.py,sha256=54eCKbLBkv3U7RLGfqtiMB1CnqHNKcTm9vJE84qu9X8,5681
|
|
5
5
|
dcnum/feat/gate.py,sha256=UEHbj3hkMWNm4tlY8Tz8sOsruhByjJxgO1s-ztQ7WTw,6235
|
|
6
6
|
dcnum/feat/queue_event_extractor.py,sha256=_CwtEcJ-zzChkiPbRHC-WdVNy9sNWOYTzZPd2oEvtrU,11493
|
|
7
7
|
dcnum/feat/feat_background/__init__.py,sha256=mL8QJYK6m3hxTqF6Cuosu__Fm5tZUMa-hTgSGcNw9AE,458
|
|
@@ -13,22 +13,22 @@ dcnum/feat/feat_brightness/bright_all.py,sha256=Z5b-xkw7g7ejMpbGmdUqrxGRymqFhAQs
|
|
|
13
13
|
dcnum/feat/feat_brightness/common.py,sha256=JX49EszYDmnvoOKXFVV1CalEIWRmOuY5EryNbqGbdac,156
|
|
14
14
|
dcnum/feat/feat_moments/__init__.py,sha256=RxDTbl-XVVk8HIgihTuqWdmD0ciNGdfg715ShHEGUHs,68
|
|
15
15
|
dcnum/feat/feat_moments/ct_opencv.py,sha256=_qyHCGvylVxruMWafvVbVOzhWGXLoFi10LReNxGcWhY,463
|
|
16
|
-
dcnum/feat/feat_moments/mt_legacy.py,sha256=
|
|
16
|
+
dcnum/feat/feat_moments/mt_legacy.py,sha256=Z6h2ag3SmOVWT9nRtKg7EqqvS4RBOnKrLm2P1Sm51aY,3409
|
|
17
17
|
dcnum/feat/feat_texture/__init__.py,sha256=SjYRb917PrFN231M2nVxq12DDH2y72WocsS9yY7xqaI,84
|
|
18
18
|
dcnum/feat/feat_texture/common.py,sha256=COXHpXS-7DMouGu3WF83I76L02Sr7P9re4lxajh6g0E,439
|
|
19
|
-
dcnum/feat/feat_texture/tex_all.py,sha256=
|
|
19
|
+
dcnum/feat/feat_texture/tex_all.py,sha256=eGjjNfPpfZw7FA_VNFCIMiU38KD0qcGbxLciYy-tCiA,4097
|
|
20
20
|
dcnum/meta/__init__.py,sha256=cQT_HN5yDKzMnZM8CUyNmeA68OhE3ENO_rvFmgDj95c,40
|
|
21
|
-
dcnum/meta/ppid.py,sha256=
|
|
21
|
+
dcnum/meta/ppid.py,sha256=YZD2ErfnShLsJDKZOWQg298Nmk2okuwzqOMUrlJ5nSo,5951
|
|
22
22
|
dcnum/read/__init__.py,sha256=iV2wrBMdwJgpXaphNiiAVybndDzTTv0CAGRNXyvxcLY,157
|
|
23
23
|
dcnum/read/cache.py,sha256=mr2DBJZYgNIAiz64TQ4cgkPmRt8nJWBvgkOpaz-p6Yg,5467
|
|
24
24
|
dcnum/read/const.py,sha256=SVlvEJiRIHyTyUlWG24_ogcnT5nTxCi0CRslNuNP56I,282
|
|
25
|
-
dcnum/read/hdf5_data.py,sha256=
|
|
25
|
+
dcnum/read/hdf5_data.py,sha256=qrkT7577zA86nnsaXiL_KqT0elVeDo8Uqj20jTyAnzo,12135
|
|
26
26
|
dcnum/segm/__init__.py,sha256=BNFn7VvWsRmOzxCtvr4AXi3mmWsJSsQlEmGlq7umCac,510
|
|
27
27
|
dcnum/segm/segm_thresh.py,sha256=fim5HRNWq0DUhVRwLf6nmu4gOHAFtydGDRS1Ww_XzFo,1134
|
|
28
|
-
dcnum/segm/segmenter.py,sha256=
|
|
29
|
-
dcnum/segm/segmenter_cpu.py,sha256=
|
|
30
|
-
dcnum/segm/segmenter_gpu.py,sha256=
|
|
31
|
-
dcnum/segm/segmenter_manager_thread.py,sha256
|
|
28
|
+
dcnum/segm/segmenter.py,sha256=o5G9JnHG-dHnNwG8IdPK8MideF0IlSDbuYwCdrSr31w,9002
|
|
29
|
+
dcnum/segm/segmenter_cpu.py,sha256=dJC6GCHVpTC5uDy0tieRfpO87weDdao-I9fIFub3_jU,9723
|
|
30
|
+
dcnum/segm/segmenter_gpu.py,sha256=F-6H425eQc9B2-k5PURJziU5uQubdF96GTkoysD2JDM,1312
|
|
31
|
+
dcnum/segm/segmenter_manager_thread.py,sha256=xtuk7gnk7xhoRoV_J97rrv7IR3JgeRvVewCDT-chqpk,5172
|
|
32
32
|
dcnum/write/__init__.py,sha256=Oy-ORTyzUUswsaJvd0C6LyXtOgAY0iTIRqFNU9d7M8Y,160
|
|
33
33
|
dcnum/write/deque_writer_thread.py,sha256=UUn5OYxDvckvhLw3llLYu7y8MI7RfsOhdJhMonKKB3k,1625
|
|
34
34
|
dcnum/write/queue_collector_thread.py,sha256=Iw83KAcZu13elorHUOfJT6cVQntAKBLm6OVgkldjtL0,11088
|
|
@@ -37,25 +37,26 @@ docs/conf.py,sha256=VqB1WtClmmAVdfQ45SLa3aG7t6g2AcDG_BI6MO7j4wI,3022
|
|
|
37
37
|
docs/index.rst,sha256=eWjHCDrw_VEC885Z3ON_wX5p2FRn_DI12WjSt0WcU5g,431
|
|
38
38
|
docs/requirements.txt,sha256=KA1AT05zfznj2eE8ixs9cikcl_MtWplqbiXhp4h0YsI,87
|
|
39
39
|
docs/extensions/github_changelog.py,sha256=SEJkjEvVtJ-42daYkNbcr9mHmmO6ZjDs6phvnySlNAE,2375
|
|
40
|
-
tests/conftest.py,sha256=
|
|
40
|
+
tests/conftest.py,sha256=bYh2DmQRP2TbkmLBdeiKF-nSv5IX1oUNWcwT6w_bEUw,577
|
|
41
41
|
tests/helper_methods.py,sha256=MsCppTICPqv4vq-DhUBIKFyOhvRO_oS2O0Fb3OYhNxs,1855
|
|
42
42
|
tests/requirements.txt,sha256=Mfj5F30ZlweJpp9LC64TVta_lq6G5AINIfJxVxTK-2c,20
|
|
43
43
|
tests/test_feat_background_bg_roll_median.py,sha256=FtrVcKumIctUrCWtNimGicExsouFxoZpbCwPnmeY1BU,4809
|
|
44
|
-
tests/test_feat_brightness.py,sha256=
|
|
45
|
-
tests/test_feat_haralick.py,sha256=
|
|
46
|
-
tests/test_feat_moments_based.py,sha256=
|
|
44
|
+
tests/test_feat_brightness.py,sha256=V8F_zhxYvS0rrZl_SKZwVwpPLI6jq0J7NR-eOdNS6qg,2082
|
|
45
|
+
tests/test_feat_haralick.py,sha256=_00p24WOzIOAtfKiXWcd8XoYBQKt3DbYdSijHZYzi64,3499
|
|
46
|
+
tests/test_feat_moments_based.py,sha256=wRlqM8sGSPIzUB95gUuixNH1hTLgD7otj15-53CRxcE,3037
|
|
47
47
|
tests/test_init.py,sha256=umUGuhCJ4iCsI5qjoNtrIAW_3xFfI3rDEGk8BKgzekc,73
|
|
48
48
|
tests/test_ppid.py,sha256=gyrFLROG9IZIcHb0YQCHujTw-LdzGYTKZhJRAFgza6c,2785
|
|
49
49
|
tests/test_ppid_segm.py,sha256=4NzGXns3lMetH970SKMnzizbnThx8ku4A1MiTozXMlA,238
|
|
50
50
|
tests/test_read_concat_hdf5.py,sha256=jeOxG6T_Z8wtPVK_UbEa-X-VQtYdFCLE2oxFbWjN2iU,1951
|
|
51
|
-
tests/test_read_hdf5.py,sha256=
|
|
52
|
-
tests/test_segm_thresh.py,sha256=
|
|
51
|
+
tests/test_read_hdf5.py,sha256=v8o-jMp8ycDwF--7Qf1mQsRRU2V8RDlOwsiP4zUg6pg,6309
|
|
52
|
+
tests/test_segm_thresh.py,sha256=ecHbhwy8_qVJTdpDJlo_sIiQPSIi_xKXM2EqUJXRU20,5101
|
|
53
|
+
tests/test_segmenter.py,sha256=qjHTTUmaR08X4ATjndxJr9eZWFvpPQwJ_fSCw8-tr9Y,9307
|
|
53
54
|
tests/test_write_deque_writer_thread.py,sha256=EAnqKayr4_jskv_599QYD3gdBZhtyVM7-MuqvtLHYAI,1140
|
|
54
55
|
tests/test_write_writer.py,sha256=SzNTLsHz4RZceRwqflc4Wfn02vYc4Hb4WQVk1X8dmiw,1107
|
|
55
56
|
tests/data/fmt-hdf5_cytoshot_full-features_2023.zip,sha256=LfkFxAXTIkcqxrJYYNMC364Q1x5HT5X9cTHuNz5eeuk,650653
|
|
56
57
|
tests/data/fmt-hdf5_cytoshot_full-features_legacy_allev_2023.zip,sha256=z2Bk6u3wjr-bJa7sOxBcNKOQ0Zoi3Xmf_cMi6d-3CMk,154010
|
|
57
|
-
dcnum-0.11.
|
|
58
|
-
dcnum-0.11.
|
|
59
|
-
dcnum-0.11.
|
|
60
|
-
dcnum-0.11.
|
|
61
|
-
dcnum-0.11.
|
|
58
|
+
dcnum-0.11.2.dist-info/LICENSE,sha256=YRChA1C8A2E-amJbudwMcbTCZy_HzmeY0hMIvduh1MM,1089
|
|
59
|
+
dcnum-0.11.2.dist-info/METADATA,sha256=IY1M8PxGxukdhWm-_5bxwLw-rNqkBHduJLTGzt4bytA,2180
|
|
60
|
+
dcnum-0.11.2.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
|
|
61
|
+
dcnum-0.11.2.dist-info/top_level.txt,sha256=Utc_P-_-7hbtniTp00IsHTry4h3rY5KFmwHfYM9g44k,22
|
|
62
|
+
dcnum-0.11.2.dist-info/RECORD,,
|
tests/conftest.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import atexit
|
|
2
|
+
import os
|
|
2
3
|
import shutil
|
|
3
4
|
import tempfile
|
|
4
5
|
import time
|
|
@@ -15,3 +16,5 @@ def pytest_configure(config):
|
|
|
15
16
|
"""
|
|
16
17
|
tempfile.tempdir = TMPDIR
|
|
17
18
|
atexit.register(shutil.rmtree, TMPDIR, ignore_errors=True)
|
|
19
|
+
# Disable JIT compiler during testing for coverage
|
|
20
|
+
os.environ.setdefault("NUMBA_DISABLE_JIT", "1")
|
tests/test_feat_brightness.py
CHANGED
|
@@ -31,3 +31,26 @@ def test_basic_brightness():
|
|
|
31
31
|
# control test
|
|
32
32
|
assert not np.allclose(h5["events"]["bright_perc_10"][:],
|
|
33
33
|
data["bright_perc_90"])
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def test_basic_brightness_single_image():
|
|
37
|
+
# This original file was generated with dcevent for reference.
|
|
38
|
+
path = retrieve_data(data_path /
|
|
39
|
+
"fmt-hdf5_cytoshot_full-features_2023.zip")
|
|
40
|
+
# Make data available
|
|
41
|
+
with h5py.File(path) as h5:
|
|
42
|
+
data = feat_brightness.brightness_features(
|
|
43
|
+
image=h5["events/image"][1][np.newaxis],
|
|
44
|
+
image_bg=h5["events/image_bg"][1][np.newaxis],
|
|
45
|
+
mask=h5["events/mask"][1][np.newaxis],
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
assert np.allclose(data["bright_bc_avg"][0],
|
|
49
|
+
-43.75497215592681,
|
|
50
|
+
atol=0, rtol=1e-10)
|
|
51
|
+
for feat in feat_brightness.brightness_names:
|
|
52
|
+
assert np.allclose(h5["events"][feat][1],
|
|
53
|
+
data[feat][0]), f"Feature {feat} mismatch!"
|
|
54
|
+
# control test
|
|
55
|
+
assert not np.allclose(h5["events"]["bright_perc_10"][1],
|
|
56
|
+
data["bright_perc_90"][0])
|
tests/test_feat_haralick.py
CHANGED
|
@@ -31,3 +31,90 @@ def test_basic_haralick():
|
|
|
31
31
|
# control test
|
|
32
32
|
assert not np.allclose(h5["events"]["tex_asm_avg"],
|
|
33
33
|
ret_arr["tex_asm_ptp"])
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def test_empty_image():
|
|
37
|
+
masks = np.array([
|
|
38
|
+
[0, 0, 0, 0, 0, 0],
|
|
39
|
+
[0, 0, 0, 0, 0, 0],
|
|
40
|
+
[0, 0, 1, 1, 0, 0],
|
|
41
|
+
[0, 0, 1, 1, 0, 0],
|
|
42
|
+
[0, 0, 0, 0, 0, 0],
|
|
43
|
+
[0, 0, 0, 0, 0, 0],
|
|
44
|
+
], dtype=bool)[np.newaxis]
|
|
45
|
+
image_corr = np.zeros(6*6, dtype=np.int16).reshape(1, 6, 6)
|
|
46
|
+
tex = feat_texture.haralick_texture_features(
|
|
47
|
+
image_corr=image_corr,
|
|
48
|
+
mask=masks,
|
|
49
|
+
)
|
|
50
|
+
assert np.allclose(tex["tex_con_avg"][0], 0)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def test_empty_mask():
|
|
54
|
+
masks = np.array([
|
|
55
|
+
[0, 0, 0, 0, 0, 0],
|
|
56
|
+
[0, 0, 0, 0, 0, 0],
|
|
57
|
+
[0, 0, 0, 0, 0, 0],
|
|
58
|
+
[0, 0, 0, 0, 0, 0],
|
|
59
|
+
[0, 0, 0, 0, 0, 0],
|
|
60
|
+
[0, 0, 0, 0, 0, 0],
|
|
61
|
+
], dtype=bool)[np.newaxis]
|
|
62
|
+
image_corr = np.arange(6*6, dtype=np.int16).reshape(1, 6, 6)
|
|
63
|
+
tex = feat_texture.haralick_texture_features(
|
|
64
|
+
image_corr=image_corr,
|
|
65
|
+
mask=masks,
|
|
66
|
+
)
|
|
67
|
+
assert np.isnan(tex["tex_con_avg"][0])
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def test_1d_mask_image():
|
|
71
|
+
masks = np.array([
|
|
72
|
+
[0, 0, 0, 0, 0, 0],
|
|
73
|
+
[0, 0, 0, 0, 0, 0],
|
|
74
|
+
[0, 0, 1, 0, 0, 0],
|
|
75
|
+
[0, 0, 1, 0, 0, 0],
|
|
76
|
+
[0, 0, 0, 0, 0, 0],
|
|
77
|
+
[0, 0, 0, 0, 0, 0],
|
|
78
|
+
], dtype=bool)[np.newaxis]
|
|
79
|
+
image_corr = np.arange(6*6, dtype=np.int16).reshape(1, 6, 6)
|
|
80
|
+
tex = feat_texture.haralick_texture_features(
|
|
81
|
+
image_corr=image_corr,
|
|
82
|
+
mask=masks,
|
|
83
|
+
)
|
|
84
|
+
assert np.isnan(tex["tex_con_avg"][0])
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def test_nd_mask_with_1d_image():
|
|
88
|
+
mask = np.array([
|
|
89
|
+
[0, 0, 0, 0, 0, 0],
|
|
90
|
+
[0, 0, 0, 0, 0, 0],
|
|
91
|
+
[0, 0, 1, 1, 0, 0],
|
|
92
|
+
[0, 0, 1, 1, 0, 0],
|
|
93
|
+
[0, 0, 0, 0, 0, 0],
|
|
94
|
+
[0, 0, 0, 0, 0, 0],
|
|
95
|
+
], dtype=bool)
|
|
96
|
+
masks = np.stack([mask, mask, mask, mask])
|
|
97
|
+
image_corr = np.arange(6*6, dtype=np.int16).reshape(1, 6, 6)
|
|
98
|
+
tex = feat_texture.haralick_texture_features(
|
|
99
|
+
image_corr=image_corr,
|
|
100
|
+
mask=masks,
|
|
101
|
+
)
|
|
102
|
+
assert len(tex["tex_con_avg"]) == 4
|
|
103
|
+
assert np.allclose(tex["tex_con_avg"][0], 27.75)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def test_simple_mask_image():
|
|
107
|
+
masks = np.array([
|
|
108
|
+
[0, 0, 0, 0, 0, 0],
|
|
109
|
+
[0, 0, 0, 0, 0, 0],
|
|
110
|
+
[0, 0, 1, 1, 0, 0],
|
|
111
|
+
[0, 0, 1, 1, 0, 0],
|
|
112
|
+
[0, 0, 0, 0, 0, 0],
|
|
113
|
+
[0, 0, 0, 0, 0, 0],
|
|
114
|
+
], dtype=bool)[np.newaxis]
|
|
115
|
+
image_corr = np.arange(6*6, dtype=np.int16).reshape(1, 6, 6)
|
|
116
|
+
tex = feat_texture.haralick_texture_features(
|
|
117
|
+
image_corr=image_corr,
|
|
118
|
+
mask=masks,
|
|
119
|
+
)
|
|
120
|
+
assert np.allclose(tex["tex_con_avg"][0], 27.75)
|
tests/test_feat_moments_based.py
CHANGED
|
@@ -50,3 +50,59 @@ def test_moments_based_features():
|
|
|
50
50
|
# control test
|
|
51
51
|
assert not np.allclose(h5["events"]["inert_ratio_cvx"][:],
|
|
52
52
|
data["tilt"])
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def test_mask_0d():
|
|
56
|
+
masks = np.array([
|
|
57
|
+
[0, 0, 0, 0, 0, 0],
|
|
58
|
+
[0, 0, 0, 0, 0, 0],
|
|
59
|
+
[0, 0, 1, 0, 0, 0],
|
|
60
|
+
[0, 0, 0, 0, 0, 0],
|
|
61
|
+
[0, 0, 0, 0, 0, 0],
|
|
62
|
+
[0, 0, 0, 0, 0, 0],
|
|
63
|
+
], dtype=bool)[np.newaxis]
|
|
64
|
+
data = feat_moments.moments_based_features(
|
|
65
|
+
mask=masks,
|
|
66
|
+
pixel_size=0.2645
|
|
67
|
+
)
|
|
68
|
+
assert data["deform"].shape == (1,)
|
|
69
|
+
assert np.isnan(data["deform"][0])
|
|
70
|
+
assert np.isnan(data["area_um"][0])
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def test_mask_1d():
|
|
74
|
+
masks = np.array([
|
|
75
|
+
[0, 0, 0, 0, 0, 0],
|
|
76
|
+
[0, 0, 0, 0, 0, 0],
|
|
77
|
+
[0, 0, 1, 0, 0, 0],
|
|
78
|
+
[0, 0, 1, 0, 0, 0],
|
|
79
|
+
[0, 0, 0, 0, 0, 0],
|
|
80
|
+
[0, 0, 0, 0, 0, 0],
|
|
81
|
+
], dtype=bool)[np.newaxis]
|
|
82
|
+
data = feat_moments.moments_based_features(
|
|
83
|
+
mask=masks,
|
|
84
|
+
pixel_size=0.2645
|
|
85
|
+
)
|
|
86
|
+
assert data["deform"].shape == (1,)
|
|
87
|
+
assert np.isnan(data["deform"][0])
|
|
88
|
+
assert np.isnan(data["area_um"][0])
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def test_mask_2d():
|
|
92
|
+
masks = np.array([
|
|
93
|
+
[0, 0, 0, 0, 0, 0],
|
|
94
|
+
[0, 0, 0, 0, 0, 0],
|
|
95
|
+
[0, 0, 1, 1, 0, 0],
|
|
96
|
+
[0, 0, 1, 1, 0, 0],
|
|
97
|
+
[0, 0, 0, 0, 0, 0],
|
|
98
|
+
[0, 0, 0, 0, 0, 0],
|
|
99
|
+
], dtype=bool)[np.newaxis]
|
|
100
|
+
data = feat_moments.moments_based_features(
|
|
101
|
+
mask=masks,
|
|
102
|
+
pixel_size=0.2645
|
|
103
|
+
)
|
|
104
|
+
assert data["deform"].shape == (1,)
|
|
105
|
+
# This is the deformation of a square (compared to circle)
|
|
106
|
+
assert np.allclose(data["deform"][0], 0.11377307454724206)
|
|
107
|
+
# Without moments-based computation, this would be 4*pxsize=0.066125
|
|
108
|
+
assert np.allclose(data["area_um"][0], 0.06996025)
|
tests/test_read_hdf5.py
CHANGED
|
@@ -134,11 +134,8 @@ def test_pickling_state():
|
|
|
134
134
|
|
|
135
135
|
h5d1 = read.HDF5Data(path)
|
|
136
136
|
h5d1.pixel_size = 0.124
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
state2 = pickle.loads(pstate)
|
|
141
|
-
h5d2 = read.HDF5Data(**state2)
|
|
137
|
+
pstate = pickle.dumps(h5d1)
|
|
138
|
+
h5d2 = pickle.loads(pstate)
|
|
142
139
|
assert h5d1.md5_5m == h5d2.md5_5m
|
|
143
140
|
assert h5d1.md5_5m == h5d2.md5_5m
|
|
144
141
|
assert h5d1.pixel_size == h5d2.pixel_size
|
|
@@ -150,10 +147,40 @@ def test_pickling_state_logs():
|
|
|
150
147
|
data_path / "fmt-hdf5_cytoshot_full-features_legacy_allev_2023.zip")
|
|
151
148
|
h5d1 = read.HDF5Data(path)
|
|
152
149
|
h5d1.pixel_size = 0.124
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
state2 = pickle.loads(pstate)
|
|
157
|
-
h5d2 = read.HDF5Data(**state2)
|
|
150
|
+
pstate = pickle.dumps(h5d1)
|
|
151
|
+
h5d2 = pickle.loads(pstate)
|
|
152
|
+
assert h5d1.logs
|
|
158
153
|
for lk in h5d1.logs:
|
|
159
154
|
assert h5d1.logs[lk] == h5d2.logs[lk]
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def test_pickling_state_tables():
|
|
158
|
+
path = retrieve_data(
|
|
159
|
+
data_path / "fmt-hdf5_cytoshot_full-features_legacy_allev_2023.zip")
|
|
160
|
+
# The original file does not contain any tables, so we write
|
|
161
|
+
# generate a table
|
|
162
|
+
columns = ["alot", "of", "tables"]
|
|
163
|
+
ds_dt = np.dtype({'names': columns,
|
|
164
|
+
'formats': [float] * len(columns)})
|
|
165
|
+
tab_data = np.zeros((11, len(columns)))
|
|
166
|
+
tab_data[:, 0] = np.arange(11)
|
|
167
|
+
tab_data[:, 1] = 1000
|
|
168
|
+
tab_data[:, 2] = np.linspace(1, np.sqrt(2), 11)
|
|
169
|
+
rec_arr = np.rec.array(tab_data, dtype=ds_dt)
|
|
170
|
+
|
|
171
|
+
# add table to source file
|
|
172
|
+
with h5py.File(path, "a") as h5:
|
|
173
|
+
h5tab = h5.require_group("tables")
|
|
174
|
+
h5tab.create_dataset(name="sample_table",
|
|
175
|
+
data=rec_arr)
|
|
176
|
+
|
|
177
|
+
h5d1 = read.HDF5Data(path)
|
|
178
|
+
h5d1.pixel_size = 0.124
|
|
179
|
+
pstate = pickle.dumps(h5d1)
|
|
180
|
+
h5d2 = pickle.loads(pstate)
|
|
181
|
+
assert h5d1.tables
|
|
182
|
+
table = h5d1.tables["sample_table"]
|
|
183
|
+
assert len(table) == 3
|
|
184
|
+
for lk in table:
|
|
185
|
+
assert np.allclose(h5d1.tables["sample_table"][lk],
|
|
186
|
+
h5d2.tables["sample_table"][lk])
|
tests/test_segm_thresh.py
CHANGED
|
@@ -137,141 +137,3 @@ def test_segm_thresh_segment_batch_large(worker_type):
|
|
|
137
137
|
for jj in range(101, 121):
|
|
138
138
|
mask_seg = np.array(labels_seg_2[jj - 101], dtype=bool)
|
|
139
139
|
assert np.all(mask_seg == mask[jj]), f"masks not matching at {jj}"
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
def test_segm_thresh_labeled_mask():
|
|
143
|
-
mask = np.array([
|
|
144
|
-
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
145
|
-
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
146
|
-
[0, 0, 1, 0, 1, 0, 0, 0], # filled, 1
|
|
147
|
-
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
148
|
-
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
149
|
-
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
150
|
-
[0, 0, 0, 0, 1, 1, 1, 1],
|
|
151
|
-
[0, 0, 0, 0, 0, 1, 1, 1], # border, 2
|
|
152
|
-
[0, 0, 0, 0, 0, 1, 1, 1],
|
|
153
|
-
[0, 1, 1, 1, 0, 0, 0, 0],
|
|
154
|
-
[0, 0, 1, 1, 1, 0, 0, 0], # other, 3
|
|
155
|
-
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
156
|
-
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
157
|
-
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
158
|
-
], dtype=bool)
|
|
159
|
-
|
|
160
|
-
sm1 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
161
|
-
kwargs_mask={"clear_border": True,
|
|
162
|
-
"fill_holes": True,
|
|
163
|
-
"closing_disk": 0,
|
|
164
|
-
})
|
|
165
|
-
labels1 = sm1.segment_frame(-10 * mask)
|
|
166
|
-
assert np.sum(labels1 != 0) == 21
|
|
167
|
-
assert len(np.unique(labels1)) == 3 # (bg, filled, other)
|
|
168
|
-
assert np.sum(labels1 == 1) == 9
|
|
169
|
-
# due to the relabeling done in `fill_holes`, the index of "other" is "3"
|
|
170
|
-
assert np.sum(labels1 == 2) == 12
|
|
171
|
-
|
|
172
|
-
sm2 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
173
|
-
kwargs_mask={"clear_border": True,
|
|
174
|
-
"fill_holes": False,
|
|
175
|
-
"closing_disk": 0,
|
|
176
|
-
})
|
|
177
|
-
labels2 = sm2.segment_frame(-10 * mask)
|
|
178
|
-
_, l2a, l2b = np.unique(labels2)
|
|
179
|
-
assert np.sum(labels2 != 0) == 20
|
|
180
|
-
assert len(np.unique(labels2)) == 3 # (bg, filled, other)
|
|
181
|
-
assert np.sum(labels2 == l2a) == 8
|
|
182
|
-
assert np.sum(labels2 == l2b) == 12
|
|
183
|
-
|
|
184
|
-
sm3 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
185
|
-
kwargs_mask={"clear_border": False,
|
|
186
|
-
"fill_holes": False,
|
|
187
|
-
"closing_disk": 0,
|
|
188
|
-
})
|
|
189
|
-
labels3 = sm3.segment_frame(-10 * mask)
|
|
190
|
-
assert np.sum(labels3 != 0) == 30
|
|
191
|
-
assert len(np.unique(labels3)) == 4 # (bg, filled, border, other)
|
|
192
|
-
assert np.sum(labels3 == 1) == 8
|
|
193
|
-
assert np.sum(labels3 == 2) == 10
|
|
194
|
-
assert np.sum(labels3 == 3) == 12
|
|
195
|
-
|
|
196
|
-
sm4 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
197
|
-
kwargs_mask={"clear_border": False,
|
|
198
|
-
"fill_holes": True,
|
|
199
|
-
"closing_disk": 0,
|
|
200
|
-
})
|
|
201
|
-
labels4 = sm4.segment_frame(-10 * mask)
|
|
202
|
-
assert np.sum(labels4 != 0) == 31
|
|
203
|
-
assert len(np.unique(labels4)) == 4 # (bg, filled, border, other)
|
|
204
|
-
assert np.sum(labels4 == 1) == 9
|
|
205
|
-
assert np.sum(labels4 == 2) == 10
|
|
206
|
-
assert np.sum(labels4 == 3) == 12
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
def test_segm_thresh_labeled_mask_closing_disk():
|
|
210
|
-
mask = np.array([
|
|
211
|
-
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
212
|
-
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
213
|
-
[0, 0, 1, 1, 1, 0, 0, 0, 0],
|
|
214
|
-
[0, 0, 1, 0, 1, 0, 0, 0, 0], # filled, 1
|
|
215
|
-
[0, 0, 1, 1, 1, 0, 0, 0, 0],
|
|
216
|
-
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
217
|
-
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
218
|
-
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
219
|
-
[0, 0, 0, 0, 0, 1, 1, 1, 1],
|
|
220
|
-
[0, 0, 0, 0, 0, 0, 1, 1, 1], # border, 2
|
|
221
|
-
[0, 0, 0, 0, 0, 0, 0, 0, 1],
|
|
222
|
-
[0, 0, 1, 1, 1, 0, 0, 0, 0],
|
|
223
|
-
[0, 0, 1, 0, 0, 1, 1, 0, 0], # other, 3
|
|
224
|
-
[0, 0, 1, 0, 0, 0, 1, 0, 0],
|
|
225
|
-
[0, 0, 1, 0, 0, 0, 1, 0, 0],
|
|
226
|
-
[0, 0, 1, 1, 1, 1, 1, 0, 0],
|
|
227
|
-
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
228
|
-
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
229
|
-
], dtype=bool)
|
|
230
|
-
|
|
231
|
-
sm1 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
232
|
-
kwargs_mask={"clear_border": True,
|
|
233
|
-
"fill_holes": True,
|
|
234
|
-
"closing_disk": 1,
|
|
235
|
-
})
|
|
236
|
-
labels1 = sm1.segment_frame(-10 * mask)
|
|
237
|
-
assert np.sum(labels1 != 0) == 32
|
|
238
|
-
assert len(np.unique(labels1)) == 3 # (bg, filled, other)
|
|
239
|
-
assert np.sum(labels1 == 1) == 9
|
|
240
|
-
# due to the relabeling done in `fill_holes`, the index of "other" is "3"
|
|
241
|
-
assert np.sum(labels1 == 2) == 23
|
|
242
|
-
|
|
243
|
-
sm2 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
244
|
-
kwargs_mask={"clear_border": True,
|
|
245
|
-
"fill_holes": False,
|
|
246
|
-
"closing_disk": 1,
|
|
247
|
-
})
|
|
248
|
-
labels2 = sm2.segment_frame(-10 * mask)
|
|
249
|
-
_, l2a, l2b = np.unique(labels2)
|
|
250
|
-
assert np.sum(labels2 != 0) == 27
|
|
251
|
-
assert len(np.unique(labels2)) == 3 # (bg, filled, other)
|
|
252
|
-
assert np.sum(labels2 == l2a) == 9
|
|
253
|
-
assert np.sum(labels2 == l2b) == 18
|
|
254
|
-
|
|
255
|
-
sm3 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
256
|
-
kwargs_mask={"clear_border": False,
|
|
257
|
-
"fill_holes": False,
|
|
258
|
-
"closing_disk": 1,
|
|
259
|
-
})
|
|
260
|
-
labels3 = sm3.segment_frame(-10 * mask)
|
|
261
|
-
assert np.sum(labels3 != 0) == 35
|
|
262
|
-
assert len(np.unique(labels3)) == 4 # (bg, filled, border, other)
|
|
263
|
-
assert np.sum(labels3 == 1) == 9
|
|
264
|
-
assert np.sum(labels3 == 2) == 8
|
|
265
|
-
assert np.sum(labels3 == 3) == 18
|
|
266
|
-
|
|
267
|
-
sm4 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
268
|
-
kwargs_mask={"clear_border": False,
|
|
269
|
-
"fill_holes": True,
|
|
270
|
-
"closing_disk": 1,
|
|
271
|
-
})
|
|
272
|
-
labels4 = sm4.segment_frame(-10 * mask)
|
|
273
|
-
assert np.sum(labels4 != 0) == 40
|
|
274
|
-
assert len(np.unique(labels4)) == 4 # (bg, filled, border, other)
|
|
275
|
-
assert np.sum(labels4 == 1) == 9
|
|
276
|
-
assert np.sum(labels4 == 2) == 8
|
|
277
|
-
assert np.sum(labels4 == 3) == 23
|
tests/test_segmenter.py
ADDED
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
import pathlib
|
|
2
|
+
|
|
3
|
+
from dcnum import segm
|
|
4
|
+
import numpy as np
|
|
5
|
+
|
|
6
|
+
data_path = pathlib.Path(__file__).parent / "data"
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class MockImageData:
|
|
10
|
+
mask = np.array([
|
|
11
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
12
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
13
|
+
[0, 0, 1, 0, 1, 0, 0, 0], # filled, 1
|
|
14
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
15
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
16
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
17
|
+
[0, 0, 0, 0, 1, 1, 1, 1],
|
|
18
|
+
[0, 0, 0, 0, 0, 1, 1, 1], # border, 2
|
|
19
|
+
[0, 0, 0, 0, 0, 1, 1, 1],
|
|
20
|
+
[0, 1, 1, 1, 0, 0, 0, 0],
|
|
21
|
+
[0, 0, 1, 1, 1, 0, 0, 0], # other, 3
|
|
22
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
23
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
24
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
25
|
+
], dtype=bool)
|
|
26
|
+
|
|
27
|
+
def get_chunk(self, chunk_index):
|
|
28
|
+
image = np.array(-(10 + chunk_index) * self.mask, dtype=np.int16)
|
|
29
|
+
chunk = np.stack([image] * 100, dtype=np.int16)
|
|
30
|
+
return chunk
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def test_segmenter_labeled_mask():
|
|
34
|
+
mask = np.array([
|
|
35
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
36
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
37
|
+
[0, 0, 1, 0, 1, 0, 0, 0], # filled, 1
|
|
38
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
39
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
40
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
41
|
+
[0, 0, 0, 0, 1, 1, 1, 1],
|
|
42
|
+
[0, 0, 0, 0, 0, 1, 1, 1], # border, 2
|
|
43
|
+
[0, 0, 0, 0, 0, 1, 1, 1],
|
|
44
|
+
[0, 1, 1, 1, 0, 0, 0, 0],
|
|
45
|
+
[0, 0, 1, 1, 1, 0, 0, 0], # other, 3
|
|
46
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
47
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
48
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
49
|
+
], dtype=bool)
|
|
50
|
+
|
|
51
|
+
sm1 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
52
|
+
kwargs_mask={"clear_border": True,
|
|
53
|
+
"fill_holes": True,
|
|
54
|
+
"closing_disk": 0,
|
|
55
|
+
})
|
|
56
|
+
labels1 = sm1.segment_frame(-10 * mask)
|
|
57
|
+
assert np.sum(labels1 != 0) == 21
|
|
58
|
+
assert len(np.unique(labels1)) == 3 # (bg, filled, other)
|
|
59
|
+
assert np.sum(labels1 == 1) == 9
|
|
60
|
+
# due to the relabeling done in `fill_holes`, the index of "other" is "3"
|
|
61
|
+
assert np.sum(labels1 == 2) == 12
|
|
62
|
+
|
|
63
|
+
sm2 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
64
|
+
kwargs_mask={"clear_border": True,
|
|
65
|
+
"fill_holes": False,
|
|
66
|
+
"closing_disk": 0,
|
|
67
|
+
})
|
|
68
|
+
labels2 = sm2.segment_frame(-10 * mask)
|
|
69
|
+
_, l2a, l2b = np.unique(labels2)
|
|
70
|
+
assert np.sum(labels2 != 0) == 20
|
|
71
|
+
assert len(np.unique(labels2)) == 3 # (bg, filled, other)
|
|
72
|
+
assert np.sum(labels2 == l2a) == 8
|
|
73
|
+
assert np.sum(labels2 == l2b) == 12
|
|
74
|
+
|
|
75
|
+
sm3 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
76
|
+
kwargs_mask={"clear_border": False,
|
|
77
|
+
"fill_holes": False,
|
|
78
|
+
"closing_disk": 0,
|
|
79
|
+
})
|
|
80
|
+
labels3 = sm3.segment_frame(-10 * mask)
|
|
81
|
+
assert np.sum(labels3 != 0) == 30
|
|
82
|
+
assert len(np.unique(labels3)) == 4 # (bg, filled, border, other)
|
|
83
|
+
assert np.sum(labels3 == 1) == 8
|
|
84
|
+
assert np.sum(labels3 == 2) == 10
|
|
85
|
+
assert np.sum(labels3 == 3) == 12
|
|
86
|
+
|
|
87
|
+
sm4 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
88
|
+
kwargs_mask={"clear_border": False,
|
|
89
|
+
"fill_holes": True,
|
|
90
|
+
"closing_disk": 0,
|
|
91
|
+
})
|
|
92
|
+
labels4 = sm4.segment_frame(-10 * mask)
|
|
93
|
+
assert np.sum(labels4 != 0) == 31
|
|
94
|
+
assert len(np.unique(labels4)) == 4 # (bg, filled, border, other)
|
|
95
|
+
assert np.sum(labels4 == 1) == 9
|
|
96
|
+
assert np.sum(labels4 == 2) == 10
|
|
97
|
+
assert np.sum(labels4 == 3) == 12
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def test_segmenter_labeled_mask_closing_disk():
|
|
101
|
+
mask = np.array([
|
|
102
|
+
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
103
|
+
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
104
|
+
[0, 0, 1, 1, 1, 0, 0, 0, 0],
|
|
105
|
+
[0, 0, 1, 0, 1, 0, 0, 0, 0], # filled, 1
|
|
106
|
+
[0, 0, 1, 1, 1, 0, 0, 0, 0],
|
|
107
|
+
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
108
|
+
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
109
|
+
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
110
|
+
[0, 0, 0, 0, 0, 1, 1, 1, 1],
|
|
111
|
+
[0, 0, 0, 0, 0, 0, 1, 1, 1], # border, 2
|
|
112
|
+
[0, 0, 0, 0, 0, 0, 0, 0, 1],
|
|
113
|
+
[0, 0, 1, 1, 1, 0, 0, 0, 0],
|
|
114
|
+
[0, 0, 1, 0, 0, 1, 1, 0, 0], # other, 3
|
|
115
|
+
[0, 0, 1, 0, 0, 0, 1, 0, 0],
|
|
116
|
+
[0, 0, 1, 0, 0, 0, 1, 0, 0],
|
|
117
|
+
[0, 0, 1, 1, 1, 1, 1, 0, 0],
|
|
118
|
+
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
119
|
+
[0, 0, 0, 0, 0, 0, 0, 0, 0],
|
|
120
|
+
], dtype=bool)
|
|
121
|
+
|
|
122
|
+
sm1 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
123
|
+
kwargs_mask={"clear_border": True,
|
|
124
|
+
"fill_holes": True,
|
|
125
|
+
"closing_disk": 1,
|
|
126
|
+
})
|
|
127
|
+
labels1 = sm1.segment_frame(-10 * mask)
|
|
128
|
+
assert np.sum(labels1 != 0) == 32
|
|
129
|
+
assert len(np.unique(labels1)) == 3 # (bg, filled, other)
|
|
130
|
+
assert np.sum(labels1 == 1) == 9
|
|
131
|
+
# due to the relabeling done in `fill_holes`, the index of "other" is "3"
|
|
132
|
+
assert np.sum(labels1 == 2) == 23
|
|
133
|
+
|
|
134
|
+
sm2 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
135
|
+
kwargs_mask={"clear_border": True,
|
|
136
|
+
"fill_holes": False,
|
|
137
|
+
"closing_disk": 1,
|
|
138
|
+
})
|
|
139
|
+
labels2 = sm2.segment_frame(-10 * mask)
|
|
140
|
+
_, l2a, l2b = np.unique(labels2)
|
|
141
|
+
assert np.sum(labels2 != 0) == 27
|
|
142
|
+
assert len(np.unique(labels2)) == 3 # (bg, filled, other)
|
|
143
|
+
assert np.sum(labels2 == l2a) == 9
|
|
144
|
+
assert np.sum(labels2 == l2b) == 18
|
|
145
|
+
|
|
146
|
+
sm3 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
147
|
+
kwargs_mask={"clear_border": False,
|
|
148
|
+
"fill_holes": False,
|
|
149
|
+
"closing_disk": 1,
|
|
150
|
+
})
|
|
151
|
+
labels3 = sm3.segment_frame(-10 * mask)
|
|
152
|
+
assert np.sum(labels3 != 0) == 35
|
|
153
|
+
assert len(np.unique(labels3)) == 4 # (bg, filled, border, other)
|
|
154
|
+
assert np.sum(labels3 == 1) == 9
|
|
155
|
+
assert np.sum(labels3 == 2) == 8
|
|
156
|
+
assert np.sum(labels3 == 3) == 18
|
|
157
|
+
|
|
158
|
+
sm4 = segm.segm_thresh.SegmentThresh(thresh=-6,
|
|
159
|
+
kwargs_mask={"clear_border": False,
|
|
160
|
+
"fill_holes": True,
|
|
161
|
+
"closing_disk": 1,
|
|
162
|
+
})
|
|
163
|
+
labels4 = sm4.segment_frame(-10 * mask)
|
|
164
|
+
assert np.sum(labels4 != 0) == 40
|
|
165
|
+
assert len(np.unique(labels4)) == 4 # (bg, filled, border, other)
|
|
166
|
+
assert np.sum(labels4 == 1) == 9
|
|
167
|
+
assert np.sum(labels4 == 2) == 8
|
|
168
|
+
assert np.sum(labels4 == 3) == 23
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def test_segmenter_labeled_mask_fill_holes_int32():
|
|
172
|
+
mask = np.array([
|
|
173
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
174
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
175
|
+
[0, 0, 1, 0, 1, 0, 0, 0], # filled, 1
|
|
176
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
177
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
178
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
179
|
+
[0, 0, 0, 0, 1, 1, 1, 1],
|
|
180
|
+
[0, 0, 0, 0, 0, 1, 1, 1], # border, 2
|
|
181
|
+
[0, 0, 0, 0, 0, 1, 1, 1],
|
|
182
|
+
[0, 1, 1, 1, 0, 0, 0, 0],
|
|
183
|
+
[0, 0, 1, 1, 1, 0, 0, 0], # other, 3
|
|
184
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
185
|
+
[0, 0, 1, 1, 1, 0, 0, 0],
|
|
186
|
+
[0, 0, 0, 0, 0, 0, 0, 0],
|
|
187
|
+
], dtype=bool)
|
|
188
|
+
|
|
189
|
+
sm1 = segm.segm_thresh.SegmentThresh(thresh=-6)
|
|
190
|
+
labels = np.array(sm1.segment_frame(-10 * mask), dtype=np.int64)
|
|
191
|
+
# sanity checks
|
|
192
|
+
assert labels.dtype == np.int64
|
|
193
|
+
assert labels.dtype != np.int32
|
|
194
|
+
labels_2 = sm1.process_mask(labels,
|
|
195
|
+
clear_border=False,
|
|
196
|
+
fill_holes=True,
|
|
197
|
+
closing_disk=False)
|
|
198
|
+
assert np.allclose(labels, labels_2)
|
|
199
|
+
assert labels_2.dtype == np.int32
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def test_segmenter_segment_chunk():
|
|
203
|
+
with segm.segm_thresh.SegmentThresh(thresh=-12, debug=True) as sm:
|
|
204
|
+
image_data = MockImageData()
|
|
205
|
+
labels_1 = np.copy(sm.segment_chunk(image_data, 0)) # below threshold
|
|
206
|
+
assert sm.image_array.min() == -10
|
|
207
|
+
labels_2 = np.copy(sm.segment_chunk(image_data, 10)) # above threshold
|
|
208
|
+
assert sm.image_array.min() == -20
|
|
209
|
+
assert np.all(labels_1 == 0)
|
|
210
|
+
assert not np.all(labels_2 == 0)
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def test_cpu_segmenter_getsetstate():
|
|
214
|
+
sm1 = segm.segm_thresh.SegmentThresh(thresh=-12, debug=True)
|
|
215
|
+
with segm.segm_thresh.SegmentThresh(thresh=-12, debug=True) as sm2:
|
|
216
|
+
image_data = MockImageData()
|
|
217
|
+
# Do some processing so that we have workers
|
|
218
|
+
sm2.segment_chunk(image_data, 0)
|
|
219
|
+
# get the state
|
|
220
|
+
state = sm2.__getstate__()
|
|
221
|
+
# set the state
|
|
222
|
+
sm1.__setstate__(state)
|
|
223
|
+
# and here we test for the raw data that was transferred
|
|
224
|
+
assert not np.all(sm1.image_array == sm2.image_array)
|
|
225
|
+
assert np.all(sm1.mp_image_raw == sm2.mp_image_raw)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|