senoquant 1.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (148) hide show
  1. senoquant/__init__.py +6 -0
  2. senoquant/_reader.py +7 -0
  3. senoquant/_widget.py +33 -0
  4. senoquant/napari.yaml +83 -0
  5. senoquant/reader/__init__.py +5 -0
  6. senoquant/reader/core.py +369 -0
  7. senoquant/tabs/__init__.py +15 -0
  8. senoquant/tabs/batch/__init__.py +10 -0
  9. senoquant/tabs/batch/backend.py +641 -0
  10. senoquant/tabs/batch/config.py +270 -0
  11. senoquant/tabs/batch/frontend.py +1283 -0
  12. senoquant/tabs/batch/io.py +326 -0
  13. senoquant/tabs/batch/layers.py +86 -0
  14. senoquant/tabs/quantification/__init__.py +1 -0
  15. senoquant/tabs/quantification/backend.py +228 -0
  16. senoquant/tabs/quantification/features/__init__.py +80 -0
  17. senoquant/tabs/quantification/features/base.py +142 -0
  18. senoquant/tabs/quantification/features/marker/__init__.py +5 -0
  19. senoquant/tabs/quantification/features/marker/config.py +69 -0
  20. senoquant/tabs/quantification/features/marker/dialog.py +437 -0
  21. senoquant/tabs/quantification/features/marker/export.py +879 -0
  22. senoquant/tabs/quantification/features/marker/feature.py +119 -0
  23. senoquant/tabs/quantification/features/marker/morphology.py +285 -0
  24. senoquant/tabs/quantification/features/marker/rows.py +654 -0
  25. senoquant/tabs/quantification/features/marker/thresholding.py +46 -0
  26. senoquant/tabs/quantification/features/roi.py +346 -0
  27. senoquant/tabs/quantification/features/spots/__init__.py +5 -0
  28. senoquant/tabs/quantification/features/spots/config.py +62 -0
  29. senoquant/tabs/quantification/features/spots/dialog.py +477 -0
  30. senoquant/tabs/quantification/features/spots/export.py +1292 -0
  31. senoquant/tabs/quantification/features/spots/feature.py +112 -0
  32. senoquant/tabs/quantification/features/spots/morphology.py +279 -0
  33. senoquant/tabs/quantification/features/spots/rows.py +241 -0
  34. senoquant/tabs/quantification/frontend.py +815 -0
  35. senoquant/tabs/segmentation/__init__.py +1 -0
  36. senoquant/tabs/segmentation/backend.py +131 -0
  37. senoquant/tabs/segmentation/frontend.py +1009 -0
  38. senoquant/tabs/segmentation/models/__init__.py +5 -0
  39. senoquant/tabs/segmentation/models/base.py +146 -0
  40. senoquant/tabs/segmentation/models/cpsam/details.json +65 -0
  41. senoquant/tabs/segmentation/models/cpsam/model.py +150 -0
  42. senoquant/tabs/segmentation/models/default_2d/details.json +69 -0
  43. senoquant/tabs/segmentation/models/default_2d/model.py +664 -0
  44. senoquant/tabs/segmentation/models/default_3d/details.json +69 -0
  45. senoquant/tabs/segmentation/models/default_3d/model.py +682 -0
  46. senoquant/tabs/segmentation/models/hf.py +71 -0
  47. senoquant/tabs/segmentation/models/nuclear_dilation/__init__.py +1 -0
  48. senoquant/tabs/segmentation/models/nuclear_dilation/details.json +26 -0
  49. senoquant/tabs/segmentation/models/nuclear_dilation/model.py +96 -0
  50. senoquant/tabs/segmentation/models/perinuclear_rings/__init__.py +1 -0
  51. senoquant/tabs/segmentation/models/perinuclear_rings/details.json +34 -0
  52. senoquant/tabs/segmentation/models/perinuclear_rings/model.py +132 -0
  53. senoquant/tabs/segmentation/stardist_onnx_utils/__init__.py +2 -0
  54. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/__init__.py +3 -0
  55. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/data/__init__.py +6 -0
  56. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/data/generate.py +470 -0
  57. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/data/prepare.py +273 -0
  58. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/data/rawdata.py +112 -0
  59. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/data/transform.py +384 -0
  60. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/internals/__init__.py +0 -0
  61. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/internals/blocks.py +184 -0
  62. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/internals/losses.py +79 -0
  63. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/internals/nets.py +165 -0
  64. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/internals/predict.py +467 -0
  65. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/internals/probability.py +67 -0
  66. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/internals/train.py +148 -0
  67. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/io/__init__.py +163 -0
  68. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/models/__init__.py +52 -0
  69. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/models/base_model.py +329 -0
  70. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/models/care_isotropic.py +160 -0
  71. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/models/care_projection.py +178 -0
  72. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/models/care_standard.py +446 -0
  73. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/models/care_upsampling.py +54 -0
  74. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/models/config.py +254 -0
  75. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/models/pretrained.py +119 -0
  76. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/scripts/__init__.py +0 -0
  77. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/scripts/care_predict.py +180 -0
  78. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/utils/__init__.py +5 -0
  79. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/utils/plot_utils.py +159 -0
  80. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/utils/six.py +18 -0
  81. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/utils/tf.py +644 -0
  82. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/utils/utils.py +272 -0
  83. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/csbdeep/version.py +1 -0
  84. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/docs/source/conf.py +368 -0
  85. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/setup.py +68 -0
  86. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/tests/test_datagen.py +169 -0
  87. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/tests/test_models.py +462 -0
  88. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/tests/test_utils.py +166 -0
  89. senoquant/tabs/segmentation/stardist_onnx_utils/_csbdeep/tools/create_zip_contents.py +34 -0
  90. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/__init__.py +30 -0
  91. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/big.py +624 -0
  92. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/bioimageio_utils.py +494 -0
  93. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/data/__init__.py +39 -0
  94. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/geometry/__init__.py +10 -0
  95. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/geometry/geom2d.py +215 -0
  96. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/geometry/geom3d.py +349 -0
  97. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/matching.py +483 -0
  98. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/models/__init__.py +28 -0
  99. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/models/base.py +1217 -0
  100. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/models/model2d.py +594 -0
  101. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/models/model3d.py +696 -0
  102. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/nms.py +384 -0
  103. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/plot/__init__.py +2 -0
  104. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/plot/plot.py +74 -0
  105. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/plot/render.py +298 -0
  106. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/rays3d.py +373 -0
  107. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/sample_patches.py +65 -0
  108. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/scripts/__init__.py +0 -0
  109. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/scripts/predict2d.py +90 -0
  110. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/scripts/predict3d.py +93 -0
  111. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/utils.py +408 -0
  112. senoquant/tabs/segmentation/stardist_onnx_utils/_stardist/version.py +1 -0
  113. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/__init__.py +45 -0
  114. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/convert/__init__.py +17 -0
  115. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/convert/cli.py +55 -0
  116. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/convert/core.py +285 -0
  117. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/inspect/__init__.py +15 -0
  118. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/inspect/cli.py +36 -0
  119. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/inspect/divisibility.py +193 -0
  120. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/inspect/probe.py +100 -0
  121. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/inspect/receptive_field.py +182 -0
  122. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/inspect/rf_cli.py +48 -0
  123. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/inspect/valid_sizes.py +278 -0
  124. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/post/__init__.py +8 -0
  125. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/post/core.py +157 -0
  126. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/pre/__init__.py +17 -0
  127. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/pre/core.py +226 -0
  128. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/predict/__init__.py +5 -0
  129. senoquant/tabs/segmentation/stardist_onnx_utils/onnx_framework/predict/core.py +401 -0
  130. senoquant/tabs/settings/__init__.py +1 -0
  131. senoquant/tabs/settings/backend.py +29 -0
  132. senoquant/tabs/settings/frontend.py +19 -0
  133. senoquant/tabs/spots/__init__.py +1 -0
  134. senoquant/tabs/spots/backend.py +139 -0
  135. senoquant/tabs/spots/frontend.py +800 -0
  136. senoquant/tabs/spots/models/__init__.py +5 -0
  137. senoquant/tabs/spots/models/base.py +94 -0
  138. senoquant/tabs/spots/models/rmp/details.json +61 -0
  139. senoquant/tabs/spots/models/rmp/model.py +499 -0
  140. senoquant/tabs/spots/models/udwt/details.json +103 -0
  141. senoquant/tabs/spots/models/udwt/model.py +482 -0
  142. senoquant/utils.py +25 -0
  143. senoquant-1.0.0b1.dist-info/METADATA +193 -0
  144. senoquant-1.0.0b1.dist-info/RECORD +148 -0
  145. senoquant-1.0.0b1.dist-info/WHEEL +5 -0
  146. senoquant-1.0.0b1.dist-info/entry_points.txt +2 -0
  147. senoquant-1.0.0b1.dist-info/licenses/LICENSE +28 -0
  148. senoquant-1.0.0b1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,624 @@
1
+ import numpy as np
2
+ import warnings
3
+ import math
4
+ from tqdm import tqdm
5
+ from skimage.measure import regionprops
6
+ from skimage.draw import polygon
7
+ from csbdeep.utils import _raise, axes_check_and_normalize, axes_dict
8
+ from itertools import product
9
+
10
+ from .geometry import polygons_to_label_coord, polyhedron_to_label
11
+
12
+
13
+
14
+ OBJECT_KEYS = set(('prob', 'points', 'coord', 'dist', 'class_prob', 'class_id'))
15
+ COORD_KEYS = set(('points', 'coord'))
16
+
17
+
18
+
19
+ class Block:
20
+ """One-dimensional block as part of a chain.
21
+
22
+ There are no explicit start and end positions. Instead, each block is
23
+ aware of its predecessor and successor and derives such things (recursively)
24
+ based on its neighbors.
25
+
26
+ Blocks overlap with one another (at least min_overlap + 2*context) and
27
+ have a read region (the entire block) and a write region (ignoring context).
28
+ Given a query interval, Block.is_responsible will return true for only one
29
+ block of a chain (or raise an exception if the interval is larger than
30
+ min_overlap or even the entire block without context).
31
+
32
+ """
33
+ def __init__(self, size, min_overlap, context, pred):
34
+ self.size = int(size)
35
+ self.min_overlap = int(min_overlap)
36
+ self.context = int(context)
37
+ self.pred = pred
38
+ self.succ = None
39
+ assert 0 <= self.min_overlap + 2*self.context < self.size
40
+ self.stride = self.size - (self.min_overlap + 2*self.context)
41
+ self._start = 0
42
+ self._frozen = False
43
+ self._extra_context_start = 0
44
+ self._extra_context_end = 0
45
+
46
+ @property
47
+ def start(self):
48
+ return self._start if (self.frozen or self.at_begin) else self.pred.succ_start
49
+
50
+ @property
51
+ def end(self):
52
+ return self.start + self.size
53
+
54
+ @property
55
+ def succ_start(self):
56
+ return self.start + self.stride
57
+
58
+ def add_succ(self):
59
+ assert self.succ is None and not self.frozen
60
+ self.succ = Block(self.size, self.min_overlap, self.context, self)
61
+ return self.succ
62
+
63
+ def decrease_stride(self, amount):
64
+ amount = int(amount)
65
+ assert 0 <= amount < self.stride and not self.frozen
66
+ self.stride -= amount
67
+
68
+ def freeze(self):
69
+ """Call on first block to freeze entire chain (after construction is done)"""
70
+ assert not self.frozen and (self.at_begin or self.pred.frozen)
71
+ self._start = self.start
72
+ self._frozen = True
73
+ if not self.at_end:
74
+ self.succ.freeze()
75
+
76
+ @property
77
+ def slice_read(self):
78
+ return slice(self.start, self.end)
79
+
80
+ @property
81
+ def slice_crop_context(self):
82
+ """Crop context relative to read region"""
83
+ return slice(self.context_start, self.size - self.context_end)
84
+
85
+ @property
86
+ def slice_write(self):
87
+ return slice(self.start + self.context_start, self.end - self.context_end)
88
+
89
+ def is_responsible(self, bbox):
90
+ """Responsibility for query interval bbox, which is assumed to be smaller than min_overlap.
91
+
92
+ If the assumption is met, only one block of a chain will return true.
93
+ If violated, one or more blocks of a chain may raise a NotFullyVisible exception.
94
+ The exception will have an argument that is
95
+ False if bbox is larger than min_overlap, and
96
+ True if bbox is even larger than the entire block without context.
97
+
98
+ bbox: (int,int)
99
+ 1D bounding box interval with coordinates relative to size without context
100
+
101
+ """
102
+ bmin, bmax = bbox
103
+
104
+ r_start = 0 if self.at_begin else (self.pred.overlap - self.pred.context_end - self.context_start)
105
+ r_end = self.size - self.context_start - self.context_end
106
+ assert 0 <= bmin < bmax <= r_end
107
+
108
+ # assert not (bmin == 0 and bmax >= r_start and not self.at_begin), [(r_start,r_end), bbox, self]
109
+
110
+ if bmin == 0 and bmax >= r_start:
111
+ if bmax == r_end:
112
+ # object spans the entire block, i.e. is probably larger than size (minus the context)
113
+ raise NotFullyVisible(True)
114
+ if not self.at_begin:
115
+ # object spans the entire overlap region, i.e. is only partially visible here and also by the predecessor block
116
+ raise NotFullyVisible(False)
117
+
118
+ # object ends before responsible region start
119
+ if bmax < r_start: return False
120
+ # object touches the end of the responsible region (only take if at end)
121
+ if bmax == r_end and not self.at_end: return False
122
+ return True
123
+
124
+ # ------------------------
125
+
126
+ @property
127
+ def frozen(self):
128
+ return self._frozen
129
+
130
+ @property
131
+ def at_begin(self):
132
+ return self.pred is None
133
+
134
+ @property
135
+ def at_end(self):
136
+ return self.succ is None
137
+
138
+ @property
139
+ def overlap(self):
140
+ return self.size - self.stride
141
+
142
+ @property
143
+ def context_start(self):
144
+ return 0 if self.at_begin else self.context + self._extra_context_start
145
+
146
+ @property
147
+ def context_end(self):
148
+ return 0 if self.at_end else self.context + self._extra_context_end
149
+
150
+ def __repr__(self):
151
+ text = f'{self.start:03}:{self.end:03}'
152
+ text += f', write={self.slice_write.start:03}:{self.slice_write.stop:03}'
153
+ text += f', size={self.context_start}+{self.size-self.context_start-self.context_end}+{self.context_end}'
154
+ if not self.at_end:
155
+ text += f', overlap={self.overlap}R/{self.overlap-self.context_end-self.succ.context_start}W'
156
+ return f'{self.__class__.__name__}({text})'
157
+
158
+ @property
159
+ def chain(self):
160
+ blocks = [self]
161
+ while not blocks[-1].at_end:
162
+ blocks.append(blocks[-1].succ)
163
+ return blocks
164
+
165
+ def __iter__(self):
166
+ return iter(self.chain)
167
+
168
+ # ------------------------
169
+
170
+ @staticmethod
171
+ def cover(size, block_size, min_overlap, context, grid=1, verbose=True):
172
+ """Return chain of grid-aligned blocks to cover the interval [0,size].
173
+
174
+ Parameters block_size, min_overlap, and context will be used
175
+ for all blocks of the chain. Only the size of the last block
176
+ may differ.
177
+
178
+ Except for the last block, start and end positions of all blocks will
179
+ be multiples of grid. To that end, the provided block parameters may
180
+ be increased to achieve that.
181
+
182
+ Note that parameters must be chosen such that the write regions of only
183
+ neighboring blocks are overlapping.
184
+
185
+ """
186
+ assert 0 <= min_overlap+2*context < block_size <= size
187
+ assert 0 < grid <= block_size
188
+ block_size = _grid_divisible(grid, block_size, name='block_size', verbose=verbose)
189
+ min_overlap = _grid_divisible(grid, min_overlap, name='min_overlap', verbose=verbose)
190
+ context = _grid_divisible(grid, context, name='context', verbose=verbose)
191
+
192
+ # allow size not to be divisible by grid
193
+ size_orig = size
194
+ size = _grid_divisible(grid, size, name='size', verbose=False)
195
+
196
+ # divide all sizes by grid
197
+ assert all(v % grid == 0 for v in (size, block_size, min_overlap, context))
198
+ size //= grid
199
+ block_size //= grid
200
+ min_overlap //= grid
201
+ context //= grid
202
+
203
+ # compute cover in grid-multiples
204
+ t = first = Block(block_size, min_overlap, context, None)
205
+ while t.end < size:
206
+ t = t.add_succ()
207
+ last = t
208
+
209
+ # print(); [print(t) for t in first]
210
+
211
+ # move blocks around to make it fit
212
+ excess = last.end - size
213
+ t = first
214
+ while excess > 0:
215
+ t.decrease_stride(1)
216
+ excess -= 1
217
+ t = t.succ
218
+ if (t == last): t = first
219
+ # print(); [print(t) for t in first]
220
+
221
+ # add extra context to avoid overlapping write regions of non-neighboring blocks
222
+ t = first
223
+ while not t.at_end:
224
+ if (t.succ is not None and t.succ.succ is not None):
225
+ overlap_write = t.slice_write.stop - t.succ.succ.slice_write.start
226
+ if overlap_write > 0:
227
+ overlap_split1, overlap_split2 = overlap_write // 2, overlap_write - overlap_write // 2
228
+ t._extra_context_end += overlap_split1
229
+ t.succ.succ._extra_context_start += overlap_split2
230
+ t = t.succ
231
+ # print(); [print(t) for t in first]
232
+
233
+
234
+ # make a copy of the cover and multiply sizes by grid
235
+ if grid > 1:
236
+ size *= grid
237
+ block_size *= grid
238
+ min_overlap *= grid
239
+ context *= grid
240
+ #
241
+ _t = _first = first
242
+ t = first = Block(block_size, min_overlap, context, None)
243
+ t.stride = _t.stride*grid
244
+ t._extra_context_start = _t._extra_context_start*grid
245
+ t._extra_context_end = _t._extra_context_end*grid
246
+ while not _t.at_end:
247
+ _t = _t.succ
248
+ t = t.add_succ()
249
+ t.stride = _t.stride*grid
250
+ t._extra_context_start = _t._extra_context_start*grid
251
+ t._extra_context_end = _t._extra_context_end*grid
252
+ last = t
253
+
254
+ # change size of last block
255
+ # will be padded internally to the same size
256
+ # as the others by model.predict_instances
257
+ size_delta = size - size_orig
258
+ last.size -= size_delta
259
+ assert 0 <= size_delta < grid
260
+
261
+ # for efficiency (to not determine starts recursively from now on)
262
+ first.freeze()
263
+
264
+ blocks = first.chain
265
+ # print(); [print(t) for t in first]
266
+
267
+ # sanity checks
268
+ assert first.start == 0 and last.end == size_orig
269
+ assert all(t.overlap-2*context >= min_overlap for t in blocks if t != last)
270
+ assert all(t.slice_write.stop-t.succ.slice_write.start >= min_overlap for t in blocks if t != last)
271
+ assert all(t.start % grid == 0 and t.end % grid == 0 for t in blocks if t != last)
272
+ # print(); [print(t) for t in first]
273
+
274
+ # only neighboring blocks should be overlapping
275
+ if len(blocks) >= 3:
276
+ for t in blocks[:-2]:
277
+ assert t.slice_write.stop <= t.succ.succ.slice_write.start
278
+
279
+ return blocks
280
+
281
+
282
+
283
+ class BlockND:
284
+ """N-dimensional block.
285
+
286
+ Each BlockND simply consists of a 1-dimensional Block per axis and also
287
+ has an id (which should be unique). The n-dimensional region represented
288
+ by each BlockND is the intersection of all 1D Blocks per axis.
289
+
290
+ Also see `Block`.
291
+
292
+ """
293
+ def __init__(self, id, blocks, axes):
294
+ self.id = id
295
+ self.blocks = tuple(blocks)
296
+ self.axes = axes_check_and_normalize(axes, length=len(self.blocks))
297
+ self.axis_to_block = dict(zip(self.axes,self.blocks))
298
+
299
+ def blocks_for_axes(self, axes=None):
300
+ axes = self.axes if axes is None else axes_check_and_normalize(axes)
301
+ return tuple(self.axis_to_block[a] for a in axes)
302
+
303
+ def slice_read(self, axes=None):
304
+ return tuple(t.slice_read for t in self.blocks_for_axes(axes))
305
+
306
+ def slice_crop_context(self, axes=None):
307
+ return tuple(t.slice_crop_context for t in self.blocks_for_axes(axes))
308
+
309
+ def slice_write(self, axes=None):
310
+ return tuple(t.slice_write for t in self.blocks_for_axes(axes))
311
+
312
+ def read(self, x, axes=None):
313
+ """Read block "read region" from x (numpy.ndarray or similar)"""
314
+ return x[self.slice_read(axes)]
315
+
316
+ def crop_context(self, labels, axes=None):
317
+ return labels[self.slice_crop_context(axes)]
318
+
319
+ def write(self, x, labels, axes=None):
320
+ """Write (only entries > 0 of) labels to block "write region" of x (numpy.ndarray or similar)"""
321
+ s = self.slice_write(axes)
322
+ mask = labels > 0
323
+ # x[s][mask] = labels[mask] # doesn't work with zarr
324
+ region = x[s]
325
+ region[mask] = labels[mask]
326
+ x[s] = region
327
+
328
+ def is_responsible(self, slices, axes=None):
329
+ return all(t.is_responsible((s.start,s.stop)) for t,s in zip(self.blocks_for_axes(axes),slices))
330
+
331
+ def __repr__(self):
332
+ slices = ','.join(f'{a}={t.start:03}:{t.end:03}' for t,a in zip(self.blocks,self.axes))
333
+ return f'{self.__class__.__name__}({self.id}|{slices})'
334
+
335
+ def __iter__(self):
336
+ return iter(self.blocks)
337
+
338
+ # ------------------------
339
+
340
+ def filter_objects(self, labels, polys, axes=None):
341
+ """Filter out objects that block is not responsible for.
342
+
343
+ Given label image 'labels' and dictionary 'polys' of polygon/polyhedron objects,
344
+ only retain those objects that this block is responsible for.
345
+
346
+ This function will return a pair (labels, polys) of the modified label image and dictionary.
347
+ It will raise a RuntimeError if an object is found in the overlap area
348
+ of neighboring blocks that violates the assumption to be smaller than 'min_overlap'.
349
+
350
+ If parameter 'polys' is None, only the filtered label image will be returned.
351
+
352
+ Notes
353
+ -----
354
+ - Important: It is assumed that the object label ids in 'labels' and
355
+ the entries in 'polys' are sorted in the same way.
356
+ - Does not modify 'labels' and 'polys', but returns modified copies.
357
+
358
+ Example
359
+ -------
360
+ >>> labels, polys = model.predict_instances(block.read(img))
361
+ >>> labels = block.crop_context(labels)
362
+ >>> labels, polys = block.filter_objects(labels, polys)
363
+
364
+ """
365
+ # TODO: option to update labels in-place
366
+ assert np.issubdtype(labels.dtype, np.integer)
367
+ ndim = len(self.blocks_for_axes(axes))
368
+ assert ndim in (2,3)
369
+ assert labels.ndim == ndim and labels.shape == tuple(s.stop-s.start for s in self.slice_crop_context(axes))
370
+
371
+ labels_filtered = np.zeros_like(labels)
372
+ # problem_ids = []
373
+ for r in regionprops(labels):
374
+ slices = tuple(slice(r.bbox[i],r.bbox[i+labels.ndim]) for i in range(labels.ndim))
375
+ try:
376
+ if self.is_responsible(slices, axes):
377
+ labels_filtered[slices][r.image] = r.label
378
+ except NotFullyVisible as e:
379
+ # shape_block_write = tuple(s.stop-s.start for s in self.slice_write(axes))
380
+ shape_object = tuple(s.stop-s.start for s in slices)
381
+ shape_min_overlap = tuple(t.min_overlap for t in self.blocks_for_axes(axes))
382
+ raise RuntimeError(f"Found object of shape {shape_object}, which violates the assumption of being smaller than 'min_overlap' {shape_min_overlap}. Increase 'min_overlap' to avoid this problem.")
383
+
384
+ # if e.args[0]: # object larger than block write region
385
+ # assert any(o >= b for o,b in zip(shape_object,shape_block_write))
386
+ # # problem, since this object will probably be saved by another block too
387
+ # raise RuntimeError(f"Found object of shape {shape_object}, larger than an entire block's write region of shape {shape_block_write}. Increase 'block_size' to avoid this problem.")
388
+ # # print("found object larger than 'block_size'")
389
+ # else:
390
+ # assert any(o >= b for o,b in zip(shape_object,shape_min_overlap))
391
+ # # print("found object larger than 'min_overlap'")
392
+
393
+ # # keep object, because will be dealt with later, i.e.
394
+ # # render the poly again into the label image, but this is not
395
+ # # ideal since the assumption is that the object outside that
396
+ # # region is not reliable because it's in the context
397
+ # labels_filtered[slices][r.image] = r.label
398
+ # problem_ids.append(r.label)
399
+
400
+ if polys is None:
401
+ # assert len(problem_ids) == 0
402
+ return labels_filtered
403
+ else:
404
+ # it is assumed that ids in 'labels' map to entries in 'polys'
405
+ assert isinstance(polys,dict) and any(k in polys for k in COORD_KEYS)
406
+ filtered_labels = np.unique(labels_filtered)
407
+ filtered_ind = [i-1 for i in filtered_labels if i > 0]
408
+ polys_out = {k: (v[filtered_ind] if k in OBJECT_KEYS else v) for k,v in polys.items()}
409
+ for k in COORD_KEYS:
410
+ if k in polys_out.keys():
411
+ polys_out[k] = self.translate_coordinates(polys_out[k], axes=axes)
412
+
413
+ return labels_filtered, polys_out#, tuple(problem_ids)
414
+
415
+ def translate_coordinates(self, coordinates, axes=None):
416
+ """Translate local block coordinates (of read region) to global ones based on block position"""
417
+ ndim = len(self.blocks_for_axes(axes))
418
+ assert isinstance(coordinates, np.ndarray) and coordinates.ndim >= 2 and coordinates.shape[1] == ndim
419
+ start = [s.start for s in self.slice_read(axes)]
420
+ shape = tuple(1 if d!=1 else ndim for d in range(coordinates.ndim))
421
+ start = np.array(start).reshape(shape)
422
+ return coordinates + start
423
+
424
+ # ------------------------
425
+
426
+ @staticmethod
427
+ def cover(shape, axes, block_size, min_overlap, context, grid=1):
428
+ """Return grid-aligned n-dimensional blocks to cover region
429
+ of the given shape with axes semantics.
430
+
431
+ Parameters block_size, min_overlap, and context can be different per
432
+ dimension/axis (if provided as list) or the same (if provided as
433
+ scalar value).
434
+
435
+ Also see `Block.cover`.
436
+
437
+ """
438
+ shape = tuple(shape)
439
+ n = len(shape)
440
+ axes = axes_check_and_normalize(axes, length=n)
441
+ if np.isscalar(block_size): block_size = n*[block_size]
442
+ if np.isscalar(min_overlap): min_overlap = n*[min_overlap]
443
+ if np.isscalar(context): context = n*[context]
444
+ if np.isscalar(grid): grid = n*[grid]
445
+ assert n == len(block_size) == len(min_overlap) == len(context) == len(grid)
446
+
447
+ # compute cover for each dimension
448
+ cover_1d = [Block.cover(*args) for args in zip(shape, block_size, min_overlap, context, grid)]
449
+ # return cover as Cartesian product of 1-dimensional blocks
450
+ return tuple(BlockND(i,blocks,axes) for i,blocks in enumerate(product(*cover_1d)))
451
+
452
+
453
+
454
+ class Polygon:
455
+
456
+ def __init__(self, coord, bbox=None, shape_max=None):
457
+ self.bbox = self.coords_bbox(coord, shape_max=shape_max) if bbox is None else bbox
458
+ self.coord = coord - np.array([r[0] for r in self.bbox]).reshape(2,1)
459
+ self.slice = tuple(slice(*r) for r in self.bbox)
460
+ self.shape = tuple(r[1]-r[0] for r in self.bbox)
461
+ rr,cc = polygon(*self.coord, self.shape)
462
+ self.mask = np.zeros(self.shape, bool)
463
+ self.mask[rr,cc] = True
464
+
465
+ @staticmethod
466
+ def coords_bbox(*coords, shape_max=None):
467
+ assert all(isinstance(c, np.ndarray) and c.ndim==2 and c.shape[0]==2 for c in coords)
468
+ if shape_max is None:
469
+ shape_max = (np.inf, np.inf)
470
+ coord = np.concatenate(coords, axis=1)
471
+ mins = np.maximum(0, np.floor(np.min(coord,axis=1))).astype(int)
472
+ maxs = np.minimum(shape_max, np.ceil (np.max(coord,axis=1))).astype(int)
473
+ return tuple(zip(tuple(mins),tuple(maxs)))
474
+
475
+
476
+
477
+ class Polyhedron:
478
+
479
+ def __init__(self, dist, origin, rays, bbox=None, shape_max=None):
480
+ self.bbox = self.coords_bbox((dist, origin), rays=rays, shape_max=shape_max) if bbox is None else bbox
481
+ self.slice = tuple(slice(*r) for r in self.bbox)
482
+ self.shape = tuple(r[1]-r[0] for r in self.bbox)
483
+ _origin = origin.reshape(1,3) - np.array([r[0] for r in self.bbox]).reshape(1,3)
484
+ self.mask = polyhedron_to_label(dist[np.newaxis], _origin, rays, shape=self.shape, verbose=False).astype(bool)
485
+
486
+ @staticmethod
487
+ def coords_bbox(*dist_origin, rays, shape_max=None):
488
+ dists, points = zip(*dist_origin)
489
+ assert all(isinstance(d, np.ndarray) and d.ndim==1 and len(d)==len(rays) for d in dists)
490
+ assert all(isinstance(p, np.ndarray) and p.ndim==1 and len(p)==3 for p in points)
491
+ dists, points, verts = np.stack(dists)[...,np.newaxis], np.stack(points)[:,np.newaxis], rays.vertices[np.newaxis]
492
+ coord = dists * verts + points
493
+ coord = np.concatenate(coord, axis=0)
494
+ if shape_max is None:
495
+ shape_max = (np.inf, np.inf, np.inf)
496
+ mins = np.maximum(0, np.floor(np.min(coord,axis=0))).astype(int)
497
+ maxs = np.minimum(shape_max, np.ceil (np.max(coord,axis=0))).astype(int)
498
+ return tuple(zip(tuple(mins),tuple(maxs)))
499
+
500
+
501
+
502
+ # def repaint_labels(output, labels, polys, show_progress=True):
503
+ # """Repaint object instances in correct order based on probability scores.
504
+
505
+ # Does modify 'output' and 'polys' in-place, but will only write sparsely to 'output' where needed.
506
+
507
+ # output: numpy.ndarray or similar
508
+ # Label image (integer-valued)
509
+ # labels: iterable of int
510
+ # List of integer label ids that occur in output
511
+ # polys: dict
512
+ # Dictionary of polygon/polyhedra properties.
513
+ # Assumption is that the label id (-1) corresponds to the index in the polys dict
514
+
515
+ # """
516
+ # assert output.ndim in (2,3)
517
+
518
+ # if show_progress:
519
+ # labels = tqdm(labels, leave=True)
520
+
521
+ # labels_eliminated = set()
522
+
523
+ # # TODO: inelegant to have so much duplicated code here
524
+ # if output.ndim == 2:
525
+ # coord = lambda i: polys['coord'][i-1]
526
+ # prob = lambda i: polys['prob'][i-1]
527
+
528
+ # for i in labels:
529
+ # if i in labels_eliminated: continue
530
+ # poly_i = Polygon(coord(i), shape_max=output.shape)
531
+
532
+ # # find all labels that overlap with i (including i)
533
+ # overlapping = set(np.unique(output[poly_i.slice][poly_i.mask])) - {0}
534
+ # assert i in overlapping
535
+ # # compute bbox union to find area to crop/replace in large output label image
536
+ # bbox_union = Polygon.coords_bbox(*[coord(j) for j in overlapping], shape_max=output.shape)
537
+
538
+ # # crop out label i, including the region that include all overlapping labels
539
+ # poly_i = Polygon(coord(i), bbox=bbox_union)
540
+ # mask = poly_i.mask.copy()
541
+
542
+ # # remove pixels from mask that belong to labels with higher probability
543
+ # for j in [j for j in overlapping if prob(j) > prob(i)]:
544
+ # mask[ Polygon(coord(j), bbox=bbox_union).mask ] = False
545
+
546
+ # crop = output[poly_i.slice]
547
+ # crop[crop==i] = 0 # delete all remnants of i in crop
548
+ # crop[mask] = i # paint i where mask still active
549
+
550
+ # labels_remaining = set(np.unique(output[poly_i.slice][poly_i.mask])) - {0}
551
+ # labels_eliminated.update(overlapping - labels_remaining)
552
+ # else:
553
+
554
+ # dist = lambda i: polys['dist'][i-1]
555
+ # origin = lambda i: polys['points'][i-1]
556
+ # prob = lambda i: polys['prob'][i-1]
557
+ # rays = polys['rays']
558
+
559
+ # for i in labels:
560
+ # if i in labels_eliminated: continue
561
+ # poly_i = Polyhedron(dist(i), origin(i), rays, shape_max=output.shape)
562
+
563
+ # # find all labels that overlap with i (including i)
564
+ # overlapping = set(np.unique(output[poly_i.slice][poly_i.mask])) - {0}
565
+ # assert i in overlapping
566
+ # # compute bbox union to find area to crop/replace in large output label image
567
+ # bbox_union = Polyhedron.coords_bbox(*[(dist(j),origin(j)) for j in overlapping], rays=rays, shape_max=output.shape)
568
+
569
+ # # crop out label i, including the region that include all overlapping labels
570
+ # poly_i = Polyhedron(dist(i), origin(i), rays, bbox=bbox_union)
571
+ # mask = poly_i.mask.copy()
572
+
573
+ # # remove pixels from mask that belong to labels with higher probability
574
+ # for j in [j for j in overlapping if prob(j) > prob(i)]:
575
+ # mask[ Polyhedron(dist(j), origin(j), rays, bbox=bbox_union).mask ] = False
576
+
577
+ # crop = output[poly_i.slice]
578
+ # crop[crop==i] = 0 # delete all remnants of i in crop
579
+ # crop[mask] = i # paint i where mask still active
580
+
581
+ # labels_remaining = set(np.unique(output[poly_i.slice][poly_i.mask])) - {0}
582
+ # labels_eliminated.update(overlapping - labels_remaining)
583
+
584
+ # if len(labels_eliminated) > 0:
585
+ # ind = [i-1 for i in labels_eliminated]
586
+ # for k,v in polys.items():
587
+ # if k in OBJECT_KEYS:
588
+ # polys[k] = np.delete(v, ind, axis=0)
589
+
590
+
591
+
592
+ ############
593
+
594
+
595
+
596
+ def predict_big(model, *args, **kwargs):
597
+ from .models import StarDist2D, StarDist3D
598
+ if isinstance(model,(StarDist2D,StarDist3D)):
599
+ dst = model.__class__.__name__
600
+ else:
601
+ dst = '{StarDist2D, StarDist3D}'
602
+ raise RuntimeError(f"This function has moved to {dst}.predict_instances_big.")
603
+
604
+
605
+
606
+ class NotFullyVisible(Exception):
607
+ pass
608
+
609
+
610
+
611
+ def _grid_divisible(grid, size, name=None, verbose=True):
612
+ if size % grid == 0:
613
+ return size
614
+ _size = size
615
+ size = math.ceil(size / grid) * grid
616
+ if bool(verbose):
617
+ print(f"{verbose if isinstance(verbose,str) else ''}increasing '{'value' if name is None else name}' from {_size} to {size} to be evenly divisible by {grid} (grid)", flush=True)
618
+ assert size % grid == 0
619
+ return size
620
+
621
+
622
+
623
+ # def render_polygons(polys, shape):
624
+ # return polygons_to_label_coord(polys['coord'], shape=shape)