Rhapso 0.1.92__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. Rhapso/__init__.py +1 -0
  2. Rhapso/data_prep/__init__.py +2 -0
  3. Rhapso/data_prep/n5_reader.py +188 -0
  4. Rhapso/data_prep/s3_big_stitcher_reader.py +55 -0
  5. Rhapso/data_prep/xml_to_dataframe.py +215 -0
  6. Rhapso/detection/__init__.py +5 -0
  7. Rhapso/detection/advanced_refinement.py +203 -0
  8. Rhapso/detection/difference_of_gaussian.py +324 -0
  9. Rhapso/detection/image_reader.py +117 -0
  10. Rhapso/detection/metadata_builder.py +130 -0
  11. Rhapso/detection/overlap_detection.py +327 -0
  12. Rhapso/detection/points_validation.py +49 -0
  13. Rhapso/detection/save_interest_points.py +265 -0
  14. Rhapso/detection/view_transform_models.py +67 -0
  15. Rhapso/fusion/__init__.py +0 -0
  16. Rhapso/fusion/affine_fusion/__init__.py +2 -0
  17. Rhapso/fusion/affine_fusion/blend.py +289 -0
  18. Rhapso/fusion/affine_fusion/fusion.py +601 -0
  19. Rhapso/fusion/affine_fusion/geometry.py +159 -0
  20. Rhapso/fusion/affine_fusion/io.py +546 -0
  21. Rhapso/fusion/affine_fusion/script_utils.py +111 -0
  22. Rhapso/fusion/affine_fusion/setup.py +4 -0
  23. Rhapso/fusion/affine_fusion_worker.py +234 -0
  24. Rhapso/fusion/multiscale/__init__.py +0 -0
  25. Rhapso/fusion/multiscale/aind_hcr_data_transformation/__init__.py +19 -0
  26. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/__init__.py +3 -0
  27. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/czi_to_zarr.py +698 -0
  28. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/zarr_writer.py +265 -0
  29. Rhapso/fusion/multiscale/aind_hcr_data_transformation/models.py +81 -0
  30. Rhapso/fusion/multiscale/aind_hcr_data_transformation/utils/__init__.py +3 -0
  31. Rhapso/fusion/multiscale/aind_hcr_data_transformation/utils/utils.py +526 -0
  32. Rhapso/fusion/multiscale/aind_hcr_data_transformation/zeiss_job.py +249 -0
  33. Rhapso/fusion/multiscale/aind_z1_radial_correction/__init__.py +21 -0
  34. Rhapso/fusion/multiscale/aind_z1_radial_correction/array_to_zarr.py +257 -0
  35. Rhapso/fusion/multiscale/aind_z1_radial_correction/radial_correction.py +557 -0
  36. Rhapso/fusion/multiscale/aind_z1_radial_correction/run_capsule.py +98 -0
  37. Rhapso/fusion/multiscale/aind_z1_radial_correction/utils/__init__.py +3 -0
  38. Rhapso/fusion/multiscale/aind_z1_radial_correction/utils/utils.py +266 -0
  39. Rhapso/fusion/multiscale/aind_z1_radial_correction/worker.py +89 -0
  40. Rhapso/fusion/multiscale_worker.py +113 -0
  41. Rhapso/fusion/neuroglancer_link_gen/__init__.py +8 -0
  42. Rhapso/fusion/neuroglancer_link_gen/dispim_link.py +235 -0
  43. Rhapso/fusion/neuroglancer_link_gen/exaspim_link.py +127 -0
  44. Rhapso/fusion/neuroglancer_link_gen/hcr_link.py +368 -0
  45. Rhapso/fusion/neuroglancer_link_gen/iSPIM_top.py +47 -0
  46. Rhapso/fusion/neuroglancer_link_gen/link_utils.py +239 -0
  47. Rhapso/fusion/neuroglancer_link_gen/main.py +299 -0
  48. Rhapso/fusion/neuroglancer_link_gen/ng_layer.py +1434 -0
  49. Rhapso/fusion/neuroglancer_link_gen/ng_state.py +1123 -0
  50. Rhapso/fusion/neuroglancer_link_gen/parsers.py +336 -0
  51. Rhapso/fusion/neuroglancer_link_gen/raw_link.py +116 -0
  52. Rhapso/fusion/neuroglancer_link_gen/utils/__init__.py +4 -0
  53. Rhapso/fusion/neuroglancer_link_gen/utils/shader_utils.py +85 -0
  54. Rhapso/fusion/neuroglancer_link_gen/utils/transfer.py +43 -0
  55. Rhapso/fusion/neuroglancer_link_gen/utils/utils.py +303 -0
  56. Rhapso/fusion/neuroglancer_link_gen_worker.py +30 -0
  57. Rhapso/matching/__init__.py +0 -0
  58. Rhapso/matching/load_and_transform_points.py +458 -0
  59. Rhapso/matching/ransac_matching.py +544 -0
  60. Rhapso/matching/save_matches.py +120 -0
  61. Rhapso/matching/xml_parser.py +302 -0
  62. Rhapso/pipelines/__init__.py +0 -0
  63. Rhapso/pipelines/ray/__init__.py +0 -0
  64. Rhapso/pipelines/ray/aws/__init__.py +0 -0
  65. Rhapso/pipelines/ray/aws/alignment_pipeline.py +227 -0
  66. Rhapso/pipelines/ray/aws/config/__init__.py +0 -0
  67. Rhapso/pipelines/ray/evaluation.py +71 -0
  68. Rhapso/pipelines/ray/interest_point_detection.py +137 -0
  69. Rhapso/pipelines/ray/interest_point_matching.py +110 -0
  70. Rhapso/pipelines/ray/local/__init__.py +0 -0
  71. Rhapso/pipelines/ray/local/alignment_pipeline.py +167 -0
  72. Rhapso/pipelines/ray/matching_stats.py +104 -0
  73. Rhapso/pipelines/ray/param/__init__.py +0 -0
  74. Rhapso/pipelines/ray/solver.py +120 -0
  75. Rhapso/pipelines/ray/split_dataset.py +78 -0
  76. Rhapso/solver/__init__.py +0 -0
  77. Rhapso/solver/compute_tiles.py +562 -0
  78. Rhapso/solver/concatenate_models.py +116 -0
  79. Rhapso/solver/connected_graphs.py +111 -0
  80. Rhapso/solver/data_prep.py +181 -0
  81. Rhapso/solver/global_optimization.py +410 -0
  82. Rhapso/solver/model_and_tile_setup.py +109 -0
  83. Rhapso/solver/pre_align_tiles.py +323 -0
  84. Rhapso/solver/save_results.py +97 -0
  85. Rhapso/solver/view_transforms.py +75 -0
  86. Rhapso/solver/xml_to_dataframe_solver.py +213 -0
  87. Rhapso/split_dataset/__init__.py +0 -0
  88. Rhapso/split_dataset/compute_grid_rules.py +78 -0
  89. Rhapso/split_dataset/save_points.py +101 -0
  90. Rhapso/split_dataset/save_xml.py +377 -0
  91. Rhapso/split_dataset/split_images.py +537 -0
  92. Rhapso/split_dataset/xml_to_dataframe_split.py +219 -0
  93. rhapso-0.1.92.dist-info/METADATA +39 -0
  94. rhapso-0.1.92.dist-info/RECORD +101 -0
  95. rhapso-0.1.92.dist-info/WHEEL +5 -0
  96. rhapso-0.1.92.dist-info/licenses/LICENSE +21 -0
  97. rhapso-0.1.92.dist-info/top_level.txt +2 -0
  98. tests/__init__.py +1 -0
  99. tests/test_detection.py +17 -0
  100. tests/test_matching.py +21 -0
  101. tests/test_solving.py +21 -0
@@ -0,0 +1,324 @@
1
+ from scipy.ndimage import gaussian_filter
2
+ from scipy.ndimage import map_coordinates
3
+ from scipy.ndimage import median_filter
4
+ from scipy.ndimage import maximum_filter
5
+ from scipy.linalg import lu_factor, lu_solve
6
+ import numpy as np
7
+
8
+ """
9
+ Difference of Gaussian computes the difference of gaussian on a 3D image chunk, collecting interest points and intensities
10
+ """
11
+
12
+ class DifferenceOfGaussian:
13
+ def __init__(self, min_intensity, max_intensity, sigma, threshold, median_filter, mip_map_downsample):
14
+ self.min_intensity = min_intensity
15
+ self.max_intensity = max_intensity
16
+ self.sigma = sigma
17
+ self.threshold = threshold
18
+ self.median_filter = median_filter
19
+ self.mip_map_downsample = mip_map_downsample
20
+
21
+ def apply_offset(self, peaks, offset_z):
22
+ """
23
+ Updates points with sub-regional offset
24
+ """
25
+ if peaks is None or peaks.size == 0:
26
+ return peaks
27
+
28
+ peaks = np.asarray(peaks, dtype=np.float32).copy()
29
+ peaks[:, 2] += offset_z
30
+
31
+ return peaks
32
+
33
+ def upsample_coordinates(self, points):
34
+ """
35
+ Map 3D points from downsampled (mipmap) space back to full-res
36
+ """
37
+ P = np.asarray(points, dtype=np.float32)
38
+ T = np.asarray(self.mip_map_downsample, dtype=np.float32)
39
+
40
+ R = T[:3, :3]
41
+ t = T[:3, 3]
42
+
43
+ return (P @ R.T) + t
44
+
45
+ def apply_lower_bounds(self, peaks, lower_bounds):
46
+ """
47
+ Updates points with lower bounds
48
+ """
49
+ if peaks is None or peaks.size == 0:
50
+ return peaks
51
+
52
+ peaks = np.asarray(peaks, dtype=np.float32).copy()
53
+ bounds_xyz = np.array(lower_bounds, dtype=np.float32)
54
+ peaks += bounds_xyz
55
+
56
+ return peaks
57
+
58
+ def gaussian_3d(self, xyz, amplitude, zo, yo, xo, sigma_x, sigma_y, sigma_z, offset):
59
+ """
60
+ Computes the 3D Gaussian value for given coordinates and Gaussian parameters.
61
+ """
62
+ x, y, z = xyz
63
+ g = offset + amplitude * np.exp(
64
+ -(((x - xo) ** 2) / (2 * sigma_x ** 2) +
65
+ ((y - yo) ** 2) / (2 * sigma_y ** 2) +
66
+ ((z - zo) ** 2) / (2 * sigma_z ** 2)))
67
+
68
+ return g.ravel()
69
+
70
+ def quadratic_fit(self, image, position):
71
+ """
72
+ Compute the gradient vector (g) and Hessian matrix (H) at an integer voxel using second-order central
73
+ """
74
+ n = len(position)
75
+ g = np.zeros(n, dtype=np.float64)
76
+ H = np.zeros((n, n), dtype=np.float64)
77
+
78
+ a1 = float(image[tuple(position)]) # center value
79
+
80
+ for d in range(n):
81
+ pos = list(position)
82
+ pos[d] -= 1
83
+ a0 = float(image[tuple(pos)])
84
+ pos[d] += 2
85
+ a2 = float(image[tuple(pos)])
86
+
87
+ # g(d) = (a2 - a0)/2
88
+ g[d] = 0.5 * (a2 - a0)
89
+
90
+ # H(dd) = a2 - 2*a1 + a0
91
+ H[d, d] = a2 - 2.0 * a1 + a0
92
+
93
+ # Off-diagonals: ( +1,+1 ), ( -1,+1 ), ( +1,-1 ), ( -1,-1 )
94
+ for e in range(d + 1, n):
95
+ vals = []
96
+ for off_d, off_e in ((1, 1), (-1, 1), (1, -1), (-1, -1)):
97
+ pos = list(position)
98
+ pos[d] += off_d
99
+ pos[e] += off_e
100
+ vals.append(float(image[tuple(pos)]))
101
+ v = (vals[0] - vals[1] - vals[2] + vals[3]) * 0.25
102
+ H[d, e] = H[e, d] = v
103
+
104
+ return g, H, a1
105
+
106
+ def refine_peaks(self, peaks, image):
107
+ """
108
+ Quadratic peak refinement - iteratively refine integer-voxel peaks to subpixel locations using a
109
+ quadratic (Newton) update from the local gradient/Hessian, with step capping and boundary checks
110
+ """
111
+ max_moves=10
112
+ maxima_tolerance=0.1
113
+ threshold=0.0
114
+ return_invalid_peaks=False
115
+ allowed_to_move_in_dim=None
116
+
117
+ if allowed_to_move_in_dim is None:
118
+ allowed_to_move_in_dim = [True] * image.ndim
119
+
120
+ refined_positions = []
121
+ shape = np.asarray(image.shape, dtype=np.int64)
122
+ n = image.ndim
123
+
124
+ def solve_Hg(H, g):
125
+ lu, piv = lu_factor(H)
126
+ return -lu_solve((lu, piv), g)
127
+
128
+ for peak in peaks:
129
+ base = getattr(peak, "location", peak)
130
+ position = np.array(base, dtype=np.int64)
131
+ stable = False
132
+
133
+ for move in range(max_moves):
134
+ # need interior neighborhood (±1 available)
135
+ if np.any(position < 1) or np.any(position >= shape - 1):
136
+ break
137
+
138
+ g, H, a1 = self.quadratic_fit(image, position)
139
+ offset = solve_Hg(H, g)
140
+
141
+ threshold_move = 0.5 + move * float(maxima_tolerance)
142
+
143
+ stable = True
144
+ for d in range(n):
145
+ if allowed_to_move_in_dim[d] and abs(offset[d]) > threshold_move:
146
+ position[d] += 1 if offset[d] > 0.0 else -1
147
+ stable = False
148
+
149
+ if stable:
150
+ # value at subpixel = center + 0.5 * g^T * offset
151
+ value = float(a1 + 0.5 * np.dot(g, offset))
152
+ if abs(value) > float(threshold):
153
+ refined_positions.append(position.astype(np.float64) + offset)
154
+ # whether kept or filtered by threshold, we’re done with this peak
155
+ break
156
+
157
+ if (not stable) and return_invalid_peaks:
158
+ # invalid handling: return original integer location
159
+ refined_positions.append(np.asarray(base, dtype=np.float64))
160
+
161
+ if not refined_positions:
162
+ return np.empty((0, n), dtype=np.float32)
163
+
164
+ return np.vstack(refined_positions).astype(np.float32)
165
+
166
+ def find_peaks(self, dog, min_initial_peak_value):
167
+ """
168
+ Find 3D peak candidates as strict local maxima (26-neighborhood)
169
+ """
170
+ L = np.asarray(dog, dtype=np.float32)
171
+
172
+ # skip outer 1-voxel border
173
+ interior = np.zeros_like(L, dtype=bool)
174
+ interior[1:-1, 1:-1, 1:-1] = True
175
+
176
+ # strict local maxima vs 26 neighbors (exclude center)
177
+ fp = np.ones((3, 3, 3), dtype=bool)
178
+ fp[1, 1, 1] = False
179
+ neigh_max = maximum_filter(L, footprint=fp, mode="reflect")
180
+ strict_max = L > neigh_max
181
+
182
+ strong = L >= float(min_initial_peak_value)
183
+ cand = interior & strict_max & strong
184
+
185
+ if not cand.any():
186
+ return np.empty((0, 3), dtype=np.int32)
187
+
188
+ peaks = np.column_stack(np.nonzero(cand)).astype(np.int32)
189
+
190
+ return peaks
191
+
192
+ def apply_gaussian_blur(self, img, sigma):
193
+ """
194
+ Apply an N-D Gaussian blur with per-axis sigmas using reflect padding at the borders
195
+ """
196
+ sigma = tuple(float(s) for s in sigma)
197
+ blurred_image = gaussian_filter(img, sigma=sigma, mode='reflect')
198
+
199
+ return blurred_image
200
+
201
+ def compute_sigma(self, steps, k, initial_sigma):
202
+ """
203
+ Computes a series of sigma values for Gaussian blurring.
204
+ Each subsequent sigma is derived by multiplying the previous one by the factor k.
205
+ """
206
+ sigma = np.zeros(steps + 1)
207
+ sigma[0] = initial_sigma
208
+
209
+ for i in range(1, steps + 1):
210
+ sigma[i] = sigma[i - 1] * k
211
+
212
+ return sigma
213
+
214
+ def compute_sigma_difference(self, sigma, image_sigma):
215
+ """
216
+ Computes the difference in sigma values required to achieve a desired level of blurring,
217
+ accounting for the existing blur (image_sigma) in an image.
218
+ """
219
+ steps = len(sigma) - 1
220
+ sigma_diff = np.zeros(steps + 1)
221
+ sigma_diff[0] = np.sqrt(sigma[0]**2 - image_sigma**2)
222
+
223
+ for i in range(1, steps + 1):
224
+ sigma_diff[i] = np.sqrt(sigma[i]**2 - image_sigma**2)
225
+
226
+ return sigma_diff
227
+
228
+ def compute_sigmas(self, initial_sigma, shape, k):
229
+ """
230
+ Generates sigma values for Gaussian blurring across specified dimensions.
231
+ Calculates the sigma differences required for sequential filtering steps.
232
+ """
233
+ steps = 3
234
+ sigma = np.zeros((2, shape))
235
+
236
+ for i in range(shape):
237
+ sigma_steps_x = self.compute_sigma(steps, k, initial_sigma)
238
+ sigma_steps_diff_x = self.compute_sigma_difference(sigma_steps_x, 0.5)
239
+ sigma[0][i] = sigma_steps_diff_x[0]
240
+ sigma[1][i] = sigma_steps_diff_x[1]
241
+
242
+ return sigma
243
+
244
+ def normalize_image(self, image):
245
+ """
246
+ Normalizes an image to the [0, 1] range using predefined minimum and maximum intensities.
247
+ """
248
+ normalized_image = (image - self.min_intensity) / (self.max_intensity - self.min_intensity)
249
+ return normalized_image
250
+
251
+ def compute_difference_of_gaussian(self, image):
252
+ """
253
+ Computes feature points in an image using the Difference of Gaussian (DoG) method.
254
+ """
255
+ shape = len(image.shape)
256
+ min_initial_peak_value = np.float32(self.threshold) / np.float32(3.0)
257
+ k = 2 ** (1 / 4)
258
+ k_min_1_inv = 1.0 / (k - 1.0)
259
+
260
+ # normalize image using min/max intensities
261
+ input_float = self.normalize_image(image)
262
+
263
+ # calculate gaussian blur levels
264
+ sigma_1, sigma_2 = self.compute_sigmas(self.sigma, shape, k)
265
+
266
+ # apply gaussian blur
267
+ blurred_image_1 = self.apply_gaussian_blur(input_float, sigma_1)
268
+ blurred_image_2 = self.apply_gaussian_blur(input_float, sigma_2)
269
+
270
+ # subtract blurred images
271
+ dog = (blurred_image_1 - blurred_image_2) * k_min_1_inv
272
+
273
+ # get all peaks
274
+ peaks = self.find_peaks(dog, min_initial_peak_value)
275
+
276
+ # localize peaks
277
+ final_peak_values = self.refine_peaks(peaks, dog)
278
+
279
+ return final_peak_values
280
+
281
+ def background_subtract_xy(self, image_chunk):
282
+ """
283
+ Remove slow-varying background in XY by subtracting a medianfilter
284
+ """
285
+ r = int(self.median_filter or 0)
286
+ img = image_chunk.astype(np.float32, copy=False)
287
+ if r <= 0:
288
+ return img
289
+
290
+ k = 2 * r + 1
291
+
292
+ # 1) Add XY border (reflect), no padding in Z
293
+ pad = ((r, r), (r, r), (0, 0))
294
+ img_pad = np.pad(img, pad, mode='reflect')
295
+
296
+ # 2) Median background on padded image (XY-only)
297
+ bg = median_filter(img_pad, size=(k, k, 1), mode='reflect')
298
+
299
+ # 3) Subtract and crop back to original core
300
+ sub = img_pad - bg
301
+
302
+ return sub[r:-r, r:-r, :]
303
+
304
+ def run(self, image_chunk, offset, lb):
305
+ """
306
+ Executes the entry point of the script.
307
+ """
308
+ image_chunk = self.background_subtract_xy(image_chunk)
309
+ peaks = self.compute_difference_of_gaussian(image_chunk)
310
+
311
+ if peaks.size == 0:
312
+ intensities = np.empty((0,), dtype=image_chunk.dtype)
313
+ final_peaks = peaks
314
+
315
+ else:
316
+ intensities = map_coordinates(image_chunk, peaks.T, order=1, mode='reflect')
317
+ final_peaks = self.apply_lower_bounds(peaks, lb)
318
+ final_peaks = self.apply_offset(final_peaks, offset)
319
+ final_peaks = self.upsample_coordinates(final_peaks)
320
+
321
+ return {
322
+ 'interest_points': final_peaks,
323
+ 'intensities': intensities
324
+ }
@@ -0,0 +1,117 @@
1
+ import zarr
2
+ import numpy as np
3
+ from bioio import BioImage
4
+ import bioio_tifffile
5
+ import dask.array as da
6
+ import s3fs
7
+
8
+ """
9
+ Image Reader loads and downsamples Zarr and TIFF OME data
10
+ """
11
+
12
+ class CustomBioImage(BioImage):
13
+ def standard_metadata(self):
14
+ pass
15
+
16
+ def scale(self):
17
+ pass
18
+
19
+ def time_interval(self):
20
+ pass
21
+
22
+ class ImageReader:
23
+ def __init__(self, file_type):
24
+ self.file_type = file_type
25
+
26
+ def downsample(self, arr, axis):
27
+ """
28
+ Reduce size by 2 along `axis` by averaging adjacent elements
29
+ """
30
+ s0 = [slice(None)] * arr.ndim
31
+ s1 = [slice(None)] * arr.ndim
32
+ s0[axis] = slice(0, None, 2)
33
+ s1[axis] = slice(1, None, 2)
34
+
35
+ a0 = arr[tuple(s0)]
36
+ a1 = arr[tuple(s1)]
37
+
38
+ len1 = a1.shape[axis]
39
+ s0c = [slice(None)] * a0.ndim
40
+ s0c[axis] = slice(0, len1)
41
+ a0 = a0[tuple(s0c)]
42
+
43
+ return (a0 + a1) * 0.5
44
+
45
+ def interface_downsampling(self, data, dsxy, dsz):
46
+ """
47
+ Downsample a 3D volume by powers of two by repeatedly halving along each axis
48
+ """
49
+ # Process X dimension
50
+ f = dsxy
51
+ while f > 1:
52
+ data = self.downsample(data, axis=0)
53
+ f //= 2
54
+
55
+ # Process Y dimension
56
+ f = dsxy
57
+ while f > 1:
58
+ data = self.downsample(data, axis=1)
59
+ f //= 2
60
+
61
+ # Process Z dimension
62
+ f = dsz
63
+ while f > 1:
64
+ data = self.downsample(data, axis=2)
65
+ f //= 2
66
+
67
+ return data
68
+
69
+ def fetch_image_data(self, record, dsxy, dsz):
70
+ """
71
+ Loads image chunk, downsamples it, and sub_chunks based on predefined intervals.
72
+ """
73
+ view_id = record['view_id']
74
+ file_path = record['file_path']
75
+ interval_key = record['interval_key']
76
+ offset = record['offset']
77
+ lower_bound = record['lb']
78
+
79
+ # Create image pathways using Dask
80
+ if self.file_type == "tiff":
81
+ img = CustomBioImage(file_path, reader=bioio_tifffile.Reader)
82
+ dask_array = img.get_dask_stack()[0, 0, 0, :, :, :]
83
+
84
+ elif self.file_type == "zarr":
85
+ s3 = s3fs.S3FileSystem(anon=False)
86
+ full_path = f"{file_path}"
87
+ store = s3fs.S3Map(root=full_path, s3=s3)
88
+ zarr_array = zarr.open(store, mode='r')
89
+ dask_array = da.from_zarr(zarr_array)[0, 0, :, :, :]
90
+
91
+ dask_array = dask_array.astype(np.float32)
92
+ dask_array = dask_array.transpose()
93
+
94
+ # Downsample Dask array
95
+ downsampled_stack = self.interface_downsampling(dask_array, dsxy, dsz)
96
+
97
+ # Get lower and upper bounds
98
+ lb = list(interval_key[0])
99
+ ub = list(interval_key[1])
100
+
101
+ # Load image chunk into mem
102
+ downsampled_image_chunk = downsampled_stack[lb[0]:ub[0]+1, lb[1]:ub[1]+1, lb[2]:ub[2]+1].compute()
103
+
104
+ interval_key = (
105
+ tuple(lb),
106
+ tuple(ub),
107
+ tuple((ub[0] - lb[0]+1, ub[1] - lb[1]+1, ub[2] - lb[2]+1))
108
+ )
109
+
110
+ return view_id, interval_key, downsampled_image_chunk, offset, lower_bound
111
+
112
+ def run(self, metadata_df, dsxy, dsz):
113
+ """
114
+ Executes the entry point of the script.
115
+ """
116
+ return self.fetch_image_data(metadata_df, dsxy, dsz)
117
+
@@ -0,0 +1,130 @@
1
+ import numpy as np
2
+
3
+ """
4
+ Metadata Builder constructs lists of pathways to each image chunk needed for interest point detection
5
+ """
6
+
7
+ class MetadataBuilder:
8
+ def __init__(self, dataframes, overlapping_area, image_file_prefix, file_type, dsxy, dsz, chunks_per_bound, sigma, run_type,
9
+ level
10
+ ):
11
+ self.image_loader_df = dataframes['image_loader']
12
+ self.overlapping_area = overlapping_area
13
+ self.image_file_prefix = image_file_prefix
14
+ self.file_type = file_type
15
+ self.dsxy = dsxy
16
+ self.dsz = dsz
17
+ self.chunks_per_bound = chunks_per_bound
18
+ self.run_type = run_type
19
+ self.level = level
20
+ self.overlap = int(np.ceil(3 * sigma))
21
+ self.sub_region_chunking = not chunks_per_bound == 0
22
+ self.metadata = []
23
+
24
+ def build_image_metadata(self, process_intervals, file_path, view_id):
25
+ """
26
+ Builds list of metadata with optional sub-chunking
27
+ """
28
+ for bound_set in process_intervals:
29
+ lb = tuple(int(x) for x in bound_set['lower_bound'])
30
+ ub = tuple(int(x) for x in bound_set['upper_bound'])
31
+
32
+ # No chunking needed
33
+ if not self.sub_region_chunking:
34
+ lb_fixed = tuple(int(x) for x in lb)
35
+ ub_fixed = tuple(int(x) for x in ub)
36
+ span = tuple(int(ub_fixed[i] - lb_fixed[i]) for i in range(3))
37
+ interval_key = (lb_fixed, ub_fixed, span)
38
+
39
+ self.metadata.append({
40
+ 'view_id': view_id,
41
+ 'file_path': file_path,
42
+ 'interval_key': interval_key,
43
+ 'offset': 0,
44
+ 'lb': lb_fixed
45
+ })
46
+
47
+ # Apply sub-region chunking
48
+ else:
49
+ if self.file_type == "tiff":
50
+
51
+ num_chunks = self.chunks_per_bound
52
+
53
+ # Compute cropped shape from bounds
54
+ x_start, y_start, z_start = lb
55
+ x_stop, y_stop, z_stop = [u + 1 for u in ub]
56
+ cropped_shape = (z_stop - z_start, y_stop - y_start, x_stop - x_start)
57
+
58
+ # Create num_chunks sets of z indices
59
+ z_indices = np.array_split(np.arange(cropped_shape[0]), num_chunks)
60
+
61
+ for chunk in z_indices:
62
+ z = max(0, chunk[0] - self.overlap)
63
+ z_end = min(chunk[-1] + 1 + self.overlap, cropped_shape[0])
64
+
65
+ actual_lb = (x_start, y_start, z_start + z)
66
+ actual_ub = (x_stop, y_stop, z_start + z_end)
67
+
68
+ span = tuple(actual_ub[i] - actual_lb[i] for i in range(3))
69
+ interval_key = (actual_lb, actual_ub, span)
70
+
71
+ self.metadata.append({
72
+ 'view_id': view_id,
73
+ 'file_path': file_path,
74
+ 'interval_key': interval_key,
75
+ 'offset': z,
76
+ 'lb' : lb
77
+ })
78
+
79
+ elif self.file_type == "zarr":
80
+
81
+ # # Compute cropped shape from bounds
82
+ x_start, y_start, z_start = lb
83
+ x_stop, y_stop, z_stop = [u + 1 for u in ub]
84
+
85
+ num_chunks = self.chunks_per_bound
86
+
87
+ # Create num_chunks sets of z indices
88
+ z_indices = np.array_split(np.arange(z_stop - z_start), num_chunks)
89
+
90
+ for chunk in z_indices:
91
+ z = max(0, chunk[0] - self.overlap)
92
+ z_end = min(chunk[-1] + 1 + self.overlap, z_stop - z_start)
93
+
94
+ actual_lb = (lb[0], lb[1], z_start + z)
95
+ actual_ub = (ub[0], ub[1], z_start + z_end)
96
+
97
+ span = tuple(actual_ub[i] - actual_lb[i] for i in range(3))
98
+ interval_key = (actual_lb, actual_ub, span)
99
+
100
+ self.metadata.append({
101
+ 'view_id': view_id,
102
+ 'file_path': file_path,
103
+ 'interval_key': interval_key,
104
+ 'offset': z,
105
+ 'lb' : lb
106
+ })
107
+
108
+ def build_paths(self):
109
+ """
110
+ Iterates through views to interface metadata building
111
+ """
112
+ for _, row in self.image_loader_df.iterrows():
113
+ view_id = f"timepoint: {row['timepoint']}, setup: {row['view_setup']}"
114
+ process_intervals = self.overlapping_area[view_id]
115
+
116
+ if self.file_type == 'zarr':
117
+ file_path = self.image_file_prefix + row['file_path'] + f'/{self.level}'
118
+ elif self.file_type == 'tiff':
119
+ file_path = self.image_file_prefix + row['file_path']
120
+ else:
121
+ raise ValueError(f"Unsupported file_type: {self.file_type!r}")
122
+
123
+ if self.run_type == 'ray':
124
+ self.build_image_metadata(process_intervals, file_path, view_id)
125
+ else:
126
+ raise ValueError(f"Unsupported run type: {self.run_type!r}")
127
+
128
+ def run(self):
129
+ self.build_paths()
130
+ return self.metadata