Rhapso 0.1.92__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. Rhapso/__init__.py +1 -0
  2. Rhapso/data_prep/__init__.py +2 -0
  3. Rhapso/data_prep/n5_reader.py +188 -0
  4. Rhapso/data_prep/s3_big_stitcher_reader.py +55 -0
  5. Rhapso/data_prep/xml_to_dataframe.py +215 -0
  6. Rhapso/detection/__init__.py +5 -0
  7. Rhapso/detection/advanced_refinement.py +203 -0
  8. Rhapso/detection/difference_of_gaussian.py +324 -0
  9. Rhapso/detection/image_reader.py +117 -0
  10. Rhapso/detection/metadata_builder.py +130 -0
  11. Rhapso/detection/overlap_detection.py +327 -0
  12. Rhapso/detection/points_validation.py +49 -0
  13. Rhapso/detection/save_interest_points.py +265 -0
  14. Rhapso/detection/view_transform_models.py +67 -0
  15. Rhapso/fusion/__init__.py +0 -0
  16. Rhapso/fusion/affine_fusion/__init__.py +2 -0
  17. Rhapso/fusion/affine_fusion/blend.py +289 -0
  18. Rhapso/fusion/affine_fusion/fusion.py +601 -0
  19. Rhapso/fusion/affine_fusion/geometry.py +159 -0
  20. Rhapso/fusion/affine_fusion/io.py +546 -0
  21. Rhapso/fusion/affine_fusion/script_utils.py +111 -0
  22. Rhapso/fusion/affine_fusion/setup.py +4 -0
  23. Rhapso/fusion/affine_fusion_worker.py +234 -0
  24. Rhapso/fusion/multiscale/__init__.py +0 -0
  25. Rhapso/fusion/multiscale/aind_hcr_data_transformation/__init__.py +19 -0
  26. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/__init__.py +3 -0
  27. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/czi_to_zarr.py +698 -0
  28. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/zarr_writer.py +265 -0
  29. Rhapso/fusion/multiscale/aind_hcr_data_transformation/models.py +81 -0
  30. Rhapso/fusion/multiscale/aind_hcr_data_transformation/utils/__init__.py +3 -0
  31. Rhapso/fusion/multiscale/aind_hcr_data_transformation/utils/utils.py +526 -0
  32. Rhapso/fusion/multiscale/aind_hcr_data_transformation/zeiss_job.py +249 -0
  33. Rhapso/fusion/multiscale/aind_z1_radial_correction/__init__.py +21 -0
  34. Rhapso/fusion/multiscale/aind_z1_radial_correction/array_to_zarr.py +257 -0
  35. Rhapso/fusion/multiscale/aind_z1_radial_correction/radial_correction.py +557 -0
  36. Rhapso/fusion/multiscale/aind_z1_radial_correction/run_capsule.py +98 -0
  37. Rhapso/fusion/multiscale/aind_z1_radial_correction/utils/__init__.py +3 -0
  38. Rhapso/fusion/multiscale/aind_z1_radial_correction/utils/utils.py +266 -0
  39. Rhapso/fusion/multiscale/aind_z1_radial_correction/worker.py +89 -0
  40. Rhapso/fusion/multiscale_worker.py +113 -0
  41. Rhapso/fusion/neuroglancer_link_gen/__init__.py +8 -0
  42. Rhapso/fusion/neuroglancer_link_gen/dispim_link.py +235 -0
  43. Rhapso/fusion/neuroglancer_link_gen/exaspim_link.py +127 -0
  44. Rhapso/fusion/neuroglancer_link_gen/hcr_link.py +368 -0
  45. Rhapso/fusion/neuroglancer_link_gen/iSPIM_top.py +47 -0
  46. Rhapso/fusion/neuroglancer_link_gen/link_utils.py +239 -0
  47. Rhapso/fusion/neuroglancer_link_gen/main.py +299 -0
  48. Rhapso/fusion/neuroglancer_link_gen/ng_layer.py +1434 -0
  49. Rhapso/fusion/neuroglancer_link_gen/ng_state.py +1123 -0
  50. Rhapso/fusion/neuroglancer_link_gen/parsers.py +336 -0
  51. Rhapso/fusion/neuroglancer_link_gen/raw_link.py +116 -0
  52. Rhapso/fusion/neuroglancer_link_gen/utils/__init__.py +4 -0
  53. Rhapso/fusion/neuroglancer_link_gen/utils/shader_utils.py +85 -0
  54. Rhapso/fusion/neuroglancer_link_gen/utils/transfer.py +43 -0
  55. Rhapso/fusion/neuroglancer_link_gen/utils/utils.py +303 -0
  56. Rhapso/fusion/neuroglancer_link_gen_worker.py +30 -0
  57. Rhapso/matching/__init__.py +0 -0
  58. Rhapso/matching/load_and_transform_points.py +458 -0
  59. Rhapso/matching/ransac_matching.py +544 -0
  60. Rhapso/matching/save_matches.py +120 -0
  61. Rhapso/matching/xml_parser.py +302 -0
  62. Rhapso/pipelines/__init__.py +0 -0
  63. Rhapso/pipelines/ray/__init__.py +0 -0
  64. Rhapso/pipelines/ray/aws/__init__.py +0 -0
  65. Rhapso/pipelines/ray/aws/alignment_pipeline.py +227 -0
  66. Rhapso/pipelines/ray/aws/config/__init__.py +0 -0
  67. Rhapso/pipelines/ray/evaluation.py +71 -0
  68. Rhapso/pipelines/ray/interest_point_detection.py +137 -0
  69. Rhapso/pipelines/ray/interest_point_matching.py +110 -0
  70. Rhapso/pipelines/ray/local/__init__.py +0 -0
  71. Rhapso/pipelines/ray/local/alignment_pipeline.py +167 -0
  72. Rhapso/pipelines/ray/matching_stats.py +104 -0
  73. Rhapso/pipelines/ray/param/__init__.py +0 -0
  74. Rhapso/pipelines/ray/solver.py +120 -0
  75. Rhapso/pipelines/ray/split_dataset.py +78 -0
  76. Rhapso/solver/__init__.py +0 -0
  77. Rhapso/solver/compute_tiles.py +562 -0
  78. Rhapso/solver/concatenate_models.py +116 -0
  79. Rhapso/solver/connected_graphs.py +111 -0
  80. Rhapso/solver/data_prep.py +181 -0
  81. Rhapso/solver/global_optimization.py +410 -0
  82. Rhapso/solver/model_and_tile_setup.py +109 -0
  83. Rhapso/solver/pre_align_tiles.py +323 -0
  84. Rhapso/solver/save_results.py +97 -0
  85. Rhapso/solver/view_transforms.py +75 -0
  86. Rhapso/solver/xml_to_dataframe_solver.py +213 -0
  87. Rhapso/split_dataset/__init__.py +0 -0
  88. Rhapso/split_dataset/compute_grid_rules.py +78 -0
  89. Rhapso/split_dataset/save_points.py +101 -0
  90. Rhapso/split_dataset/save_xml.py +377 -0
  91. Rhapso/split_dataset/split_images.py +537 -0
  92. Rhapso/split_dataset/xml_to_dataframe_split.py +219 -0
  93. rhapso-0.1.92.dist-info/METADATA +39 -0
  94. rhapso-0.1.92.dist-info/RECORD +101 -0
  95. rhapso-0.1.92.dist-info/WHEEL +5 -0
  96. rhapso-0.1.92.dist-info/licenses/LICENSE +21 -0
  97. rhapso-0.1.92.dist-info/top_level.txt +2 -0
  98. tests/__init__.py +1 -0
  99. tests/test_detection.py +17 -0
  100. tests/test_matching.py +21 -0
  101. tests/test_solving.py +21 -0
@@ -0,0 +1,78 @@
1
+ from Rhapso.split_dataset.xml_to_dataframe_split import XMLToDataFrameSplit
2
+ from Rhapso.split_dataset.compute_grid_rules import ComputeGridRules
3
+ from Rhapso.split_dataset.split_images import SplitImages
4
+ from Rhapso.split_dataset.save_xml import SaveXML
5
+ from Rhapso.split_dataset.save_points import SavePoints
6
+ import boto3
7
+ import ray
8
+
9
+ class SplitDataset:
10
+ def __init__(self, xml_file_path, xml_output_file_path, n5_path, point_density, min_points, max_points, error, exclude_radius,
11
+ target_image_size, target_overlap):
12
+ self.xml_file_path = xml_file_path
13
+ self.xml_output_file_path = xml_output_file_path
14
+ self.n5_path = n5_path
15
+ self.point_density = point_density
16
+ self.min_points = min_points
17
+ self.max_points = max_points
18
+ self.error = error
19
+ self.exclude_radius = exclude_radius
20
+ self.target_image_size = target_image_size
21
+ self.target_overlap = target_overlap
22
+
23
+ def split(self):
24
+ if self.xml_file_path.startswith("s3://"):
25
+ no_scheme = self.xml_file_path.replace("s3://", "", 1)
26
+ bucket, key = no_scheme.split("/", 1)
27
+ s3 = boto3.client("s3")
28
+ response = s3.get_object(Bucket=bucket, Key=key)
29
+ xml_file = response["Body"].read().decode("utf-8")
30
+ else:
31
+ with open(self.xml_file_path, "r", encoding="utf-8") as f:
32
+ xml_file = f.read()
33
+
34
+ xml_to_dataframe = XMLToDataFrameSplit(xml_file)
35
+ data_global = xml_to_dataframe.run()
36
+ print("XML loaded")
37
+
38
+ split = ComputeGridRules(data_global, self.target_image_size, self.target_overlap)
39
+ xyz_size, xyz_overlap, min_step_size = split.run()
40
+ print("Split rules computed")
41
+
42
+ split_images = SplitImages(xyz_size, xyz_overlap, min_step_size, data_global, self.n5_path, self.point_density, self.min_points, self.max_points,
43
+ self.error, self.exclude_radius)
44
+ new_split_interest_points, self_definition = split_images.run()
45
+ print("Tiles have been split")
46
+
47
+ save_xml = SaveXML(data_global, new_split_interest_points, self_definition, xml_file, self.xml_output_file_path)
48
+ save_xml.run()
49
+ print("XML saved")
50
+
51
+ @ray.remote
52
+ def distribute_points_saving(label_entries, n5_path):
53
+ save_points = SavePoints(label_entries, n5_path)
54
+ return save_points.run()
55
+
56
+ futures = [distribute_points_saving.remote(label_entries, self.n5_path)
57
+ for label_entries in new_split_interest_points.values()
58
+ ]
59
+
60
+ _ = ray.get(futures)
61
+ print("Points saved")
62
+
63
+ print("Dataset split complete")
64
+
65
+ def run(self):
66
+ self.split()
67
+
68
+
69
+ # DEBUG - STEP THROUGH DISTRIBUTED SAVE
70
+ # for label_entries in new_split_interest_points.values():
71
+ # save_points = SavePoints(label_entries, self.n5_path)
72
+ # save_points.run()
73
+
74
+
75
+
76
+
77
+
78
+
File without changes
@@ -0,0 +1,562 @@
1
+ import numpy as np
2
+
3
+ """
4
+ Compute Tiles prepares a set of tiles and pairwise correspondences for global alignment.
5
+ """
6
+
7
+ class ComputeTiles:
8
+ def __init__(self, pmc, view_id_set, groups, dataframes, run_type):
9
+ self.pmc = pmc
10
+ self.view_id_set = view_id_set
11
+ self.groups = groups
12
+ self.dataframes = dataframes
13
+ self.run_type = run_type
14
+
15
+ def flip_matches(self, matches):
16
+ """
17
+ Swap endpoints of each match to create the reverse (B→A) correspondences
18
+ """
19
+ flipped = []
20
+ for match in matches:
21
+ p1 = match['p2']
22
+ p2 = match['p1']
23
+ weight = match.get('weight', 1)
24
+ strength = match.get('strength', 1)
25
+
26
+ flipped.append({
27
+ 'p1': p1,
28
+ 'p2': p2,
29
+ 'weight': weight,
30
+ 'strength': strength
31
+ })
32
+ return flipped
33
+
34
+ def get_bounding_boxes(self, M, dims):
35
+ """
36
+ Compute world-space AABB (min/max corners) of a voxel-aligned box of size dims after applying affine M
37
+ """
38
+ M = np.asarray(M, float)
39
+ if M.shape == (3, 4):
40
+ M = np.vstack([M, [0.0, 0.0, 0.0, 1.0]])
41
+
42
+ # interval mins/maxes
43
+ t0 = 0.0; t1 = 0.0; t2 = 0.0
44
+ s0 = float(dims[0]) - 1.0
45
+ s1 = float(dims[1]) - 1.0
46
+ s2 = float(dims[2]) - 1.0
47
+
48
+ A = M[:3, :3]
49
+ tx, ty, tz = M[0, 3], M[1, 3], M[2, 3]
50
+
51
+ # row 0
52
+ tt0 = A[0,0]*t0 + A[0,1]*t1 + A[0,2]*t2 + tx
53
+ rMin0 = rMax0 = tt0
54
+ rMin0 += s0*A[0,0] if A[0,0] < 0 else 0.0; rMax0 += 0.0 if A[0,0] < 0 else s0*A[0,0]
55
+ rMin0 += s1*A[0,1] if A[0,1] < 0 else 0.0; rMax0 += 0.0 if A[0,1] < 0 else s1*A[0,1]
56
+ rMin0 += s2*A[0,2] if A[0,2] < 0 else 0.0; rMax0 += 0.0 if A[0,2] < 0 else s2*A[0,2]
57
+
58
+ # row 1
59
+ tt1 = A[1,0]*t0 + A[1,1]*t1 + A[1,2]*t2 + ty
60
+ rMin1 = rMax1 = tt1
61
+ rMin1 += s0*A[1,0] if A[1,0] < 0 else 0.0; rMax1 += 0.0 if A[1,0] < 0 else s0*A[1,0]
62
+ rMin1 += s1*A[1,1] if A[1,1] < 0 else 0.0; rMax1 += 0.0 if A[1,1] < 0 else s1*A[1,1]
63
+ rMin1 += s2*A[1,2] if A[1,2] < 0 else 0.0; rMax1 += 0.0 if A[1,2] < 0 else s2*A[1,2]
64
+
65
+ # row 2
66
+ tt2 = A[2,0]*t0 + A[2,1]*t1 + A[2,2]*t2 + tz
67
+ rMin2 = rMax2 = tt2
68
+ rMin2 += s0*A[2,0] if A[2,0] < 0 else 0.0; rMax2 += 0.0 if A[2,0] < 0 else s0*A[2,0]
69
+ rMin2 += s1*A[2,1] if A[2,1] < 0 else 0.0; rMax2 += 0.0 if A[2,1] < 0 else s1*A[2,1]
70
+ rMin2 += s2*A[2,2] if A[2,2] < 0 else 0.0; rMax2 += 0.0 if A[2,2] < 0 else s2*A[2,2]
71
+
72
+ rMin = np.array([rMin0, rMin1, rMin2], float)
73
+ rMax = np.array([rMax0, rMax1, rMax2], float)
74
+ return rMin, rMax
75
+
76
+ def bounding_boxes(self, M, dims):
77
+ """
78
+ Compute an integer, padded axis-aligned bounding box from the real-valued bounds given transform M and volume size dims
79
+ """
80
+ rMin, rMax = self.get_bounding_boxes(M, dims)
81
+ min_i = np.rint(rMin).astype(int) - 1
82
+ max_i = np.rint(rMax).astype(int) + 1
83
+ return (min_i.tolist(), max_i.tolist())
84
+
85
+ def transform_matrices(self, view):
86
+ """
87
+ Compose the 4x4 world transform for a view by fetching all its affine models and chaining them in order
88
+ """
89
+ M = np.eye(4, dtype=float)
90
+
91
+ view, setup = [p.strip() for p in view['view'].split(", ", 1)]
92
+ view_key = int(view.split(": ", 1)[1])
93
+ setup_key = int(setup.split(": ", 1)[1])
94
+
95
+ vr_df = self.dataframes["view_registrations"]
96
+ sub = vr_df[
97
+ (vr_df["timepoint"].astype(int) == view_key) &
98
+ (vr_df["setup"].astype(int) == setup_key) &
99
+ (vr_df["type"] == "affine")
100
+ ]
101
+
102
+ for model in sub["affine"]:
103
+ vals = np.fromstring(model.replace(",", " "), sep=" ", dtype=float)
104
+ T = np.eye(4, dtype=float); T[:3, :4] = vals.reshape(3, 4)
105
+ M = M @ T
106
+
107
+ return M
108
+
109
+ def overlaps(self, bba, bbb):
110
+ """
111
+ Boolean check if two axis-aligned boxes *strictly* overlap on every axis
112
+ """
113
+ (minA, maxA) = bba
114
+ (minB, maxB) = bbb
115
+ for d in range(len(minA)):
116
+ if ((minA[d] <= minB[d] and maxA[d] <= minB[d]) or
117
+ (minA[d] >= maxB[d] and maxA[d] >= maxB[d])):
118
+ return False
119
+ return True
120
+
121
+ def overlap(self, view_a, dims_a, view_b, dims_b):
122
+ """
123
+ Builds each view transform, computes its AABB, then checks overlap
124
+ """
125
+ ma = self.transform_matrices(view_a)
126
+ mb = self.transform_matrices(view_b)
127
+
128
+ bba = self.bounding_boxes(ma, dims_a)
129
+ bbb = self.bounding_boxes(mb, dims_b)
130
+
131
+ return self.overlaps(bba, bbb)
132
+
133
+ def bb_overlap(self, real_bb1, real_bb2):
134
+ """
135
+ Axis-aligned box overlap test
136
+ """
137
+ min1, max1 = real_bb1
138
+ min2, max2 = real_bb2
139
+ for d in range(len(min1)):
140
+ if (min1[d] < min2[d] and max1[d] < min2[d]) or (min1[d] > max2[d] and max1[d] > max2[d]):
141
+ return False
142
+
143
+ return True
144
+
145
+ def get_overlap_interval(self, view_a, dims_a, view_b, dims_b):
146
+ """
147
+ Compute the continuous overlap box in world space between views A and B
148
+ """
149
+ ma = self.transform_matrices(view_a)
150
+ mb = self.transform_matrices(view_b)
151
+
152
+ bb1 = self.get_bounding_boxes(ma, dims_a)
153
+ bb2 = self.get_bounding_boxes(mb, dims_b)
154
+
155
+ if not bb1 or not bb2:
156
+ return
157
+
158
+ real_bb1 = self.bounding_boxes(ma, dims_a)
159
+ real_bb2 = self.bounding_boxes(mb, dims_b)
160
+
161
+ if self.bb_overlap(real_bb1, real_bb2):
162
+ rmin1, rmax1 = real_bb1
163
+ rmin2, rmax2 = real_bb2
164
+
165
+ mins = [0.0] * len(rmin1)
166
+ maxs = [0.0] * len(rmin1)
167
+
168
+ for d in range(len(rmin1)):
169
+ mins[d] = max(rmin1[d], rmin2[d])
170
+ maxs[d] = min(rmax1[d], rmax2[d])
171
+
172
+ if d == 2 and mins[d] == maxs[d] == 0.0 and dims_a[2] == 1 and dims_b[2] == 1:
173
+ mins[d], maxs[d] = 0.0, 1.0
174
+ elif mins[d] == maxs[d] or maxs[d] < mins[d]:
175
+ return None
176
+
177
+ return (mins, maxs)
178
+
179
+ def cube_for(self, overlap):
180
+ """
181
+ Find the 8 corner points of an axis-aligned 3D box
182
+ """
183
+ mins, maxs = overlap
184
+ min0, min1, min2 = map(float, mins)
185
+ max0, max1, max2 = map(float, maxs)
186
+
187
+ return [
188
+ [min0, min1, min2],
189
+ [min0, min1, max2],
190
+ [min0, max1, min2],
191
+ [min0, max1, max2],
192
+ [max0, min1, min2],
193
+ [max0, min1, max2],
194
+ [max0, max1, min2],
195
+ [max0, max1, max2],
196
+ ]
197
+
198
+ def apply(self, model, source, target):
199
+ """
200
+ Apply a 3D affine transform
201
+ """
202
+ x, y, z = float(source[0]), float(source[1]), float(source[2])
203
+
204
+ if isinstance(model, dict):
205
+ t0 = x*model['m00'] + y*model['m01'] + z*model['m02'] + model['m03']
206
+ t1 = x*model['m10'] + y*model['m11'] + z*model['m12'] + model['m13']
207
+ t2 = x*model['m20'] + y*model['m21'] + z*model['m22'] + model['m23']
208
+ else:
209
+ M = np.asarray(model, float)
210
+ if M.shape == (3, 4) or M.shape == (4, 4):
211
+ t0 = x*M[0,0] + y*M[0,1] + z*M[0,2] + M[0,3]
212
+ t1 = x*M[1,0] + y*M[1,1] + z*M[1,2] + M[1,3]
213
+ t2 = x*M[2,0] + y*M[2,1] + z*M[2,2] + M[2,3]
214
+ else:
215
+ raise ValueError("model must be dict m00..m23 or a 3x4/4x4 array")
216
+
217
+ target[0] = t0
218
+ target[1] = t1
219
+ target[2] = t2
220
+
221
+ return target
222
+
223
+ def assign_weak_link_point_matches(self, view_map, groups):
224
+ """
225
+ Create "weak-link" synthetic matches between tiles that belong to different groups but spatially overlap, then attach
226
+ those matches to all tiles in the two groups and connect the tile graphs (without duplicate edges)
227
+ """
228
+ group_map = {}
229
+ for v in view_map:
230
+ for group in groups:
231
+ if v in group['views']:
232
+ group_map[v] = group
233
+ break
234
+
235
+ views = list(view_map.values())
236
+ views = sorted(views, key=lambda d: int(d['view'].partition('setup:')[2].strip().split()[0]))
237
+ for a in range(len(views) - 1):
238
+ for b in range(a + 1, len(views)):
239
+ view_a = views[a]
240
+ view_b = views[b]
241
+
242
+ if group_map[view_a['view']] == group_map[view_b['view']]:
243
+ continue
244
+
245
+ pm = []
246
+
247
+ vs_df = self.dataframes["view_setups"]
248
+ vs_df["id"] = vs_df["id"].astype(int)
249
+ vs_df = vs_df[vs_df["name"].isna()]
250
+ vs_idx = vs_df.assign(id_int=vs_df["id"].astype(int)).set_index("id_int")
251
+
252
+ setup_a = int(view_a['view'].split(", setup: ", 1)[1])
253
+ setup_b = int(view_b["view"].split(", setup: ", 1)[1])
254
+
255
+ row_a = vs_idx.loc[setup_a]
256
+ row_b = vs_idx.loc[setup_b]
257
+
258
+ dims_a = tuple(map(int, str(row_a["size"]).split()))
259
+ dims_b = tuple(map(int, str(row_b["size"]).split()))
260
+
261
+ if self.overlap(view_a, dims_a, view_b, dims_b):
262
+
263
+ overlap = self.get_overlap_interval(view_a, dims_a, view_b, dims_b)
264
+
265
+ if overlap is None:
266
+ continue
267
+
268
+ pa = self.cube_for(overlap)
269
+ pb = self.cube_for(overlap)
270
+
271
+ key = (lambda v: v if isinstance(v, str) else v["view"])
272
+ ta = next(m["model"]["regularized"] for m in self.pmc["models"] if m["view"] == key(view_a))
273
+ tb = next(m["model"]["regularized"] for m in self.pmc["models"] if m["view"] == key(view_b))
274
+
275
+ for i in range(len(pa)):
276
+ points_a = self.apply(ta, pa[i], pa[i])
277
+ points_b = self.apply(tb, pb[i], pb[i])
278
+ if points_a is None or points_b is None:
279
+ print()
280
+ match = {
281
+ "p1": {
282
+ "l": points_a,
283
+ "w": points_a,
284
+ "weight": 1,
285
+ "strength": 1
286
+ },
287
+ "p2": {
288
+ "l": points_b,
289
+ "w": points_b,
290
+ "weight": 1,
291
+ "strength": 1
292
+ },
293
+ "weight": 1,
294
+ "strength": 1
295
+ }
296
+ pm.append(match)
297
+
298
+
299
+ idx = next((i for i, g in enumerate(groups) if view_a['view'] in g.get('views', ())), None)
300
+ views_a = groups[idx]['views'] if idx is not None else [view_a['view']]
301
+
302
+ for va in views_a:
303
+ tile_a = view_map.get(va)
304
+ if tile_a:
305
+ tile_a['matches'].extend(pm)
306
+
307
+ flipped_matches = self.flip_matches(pm)
308
+ idx = next((i for i, g in enumerate(groups) if view_b['view'] in g.get('views', ())), None)
309
+ views_b = groups[idx]['views'] if idx is not None else [view_b['view']]
310
+
311
+ for vb in views_b:
312
+ tile_b = view_map.get(vb)
313
+ if tile_b:
314
+ tile_b['matches'].extend(flipped_matches)
315
+
316
+ # Precompute tile lists for both groups
317
+ tiles_a = [view_map[va] for va in views_a if va in view_map]
318
+ tiles_b = [view_map[vb] for vb in views_b if vb in view_map]
319
+
320
+ # Initialize a fast membership set on each tile once
321
+ from itertools import chain
322
+ for t in chain(tiles_a, tiles_b):
323
+ ct = t.setdefault('connected_tiles', [])
324
+ s = t.get('_connected_set')
325
+ if s is None:
326
+ # normalize existing entries (dict or str) to view-id strings
327
+ s = {(c['view'] if isinstance(c, dict) else c) for c in ct}
328
+ t['_connected_set'] = s
329
+
330
+ views_a_set = {t['view'] for t in tiles_a}
331
+ views_b_set = {t['view'] for t in tiles_b}
332
+
333
+ # A -> B (add only what’s missing)
334
+ for ta in tiles_a:
335
+ missing = views_b_set - ta['_connected_set'] - {ta['view']}
336
+ if missing:
337
+ ta['connected_tiles'].extend({'view': vb, 'tile': view_map.get(vb)} for vb in missing)
338
+ ta['_connected_set'].update(missing)
339
+
340
+ # B -> A (add only what’s missing)
341
+ for tb in tiles_b:
342
+ missing = views_a_set - tb['_connected_set'] - {tb['view']}
343
+ if missing:
344
+ tb['connected_tiles'].extend({'view': va, 'tile': view_map.get(va)} for va in missing)
345
+ tb['_connected_set'].update(missing)
346
+
347
+ return view_map
348
+
349
+ def assign_point_matches(self, map):
350
+ """
351
+ Attach inlier correspondences to each tile for both directions
352
+ """
353
+ for pair in self.pmc:
354
+ pair_a = pair['view'][0]
355
+ pair_b = pair['view'][1]
356
+ tile_a = map[pair_a]
357
+ tile_b = map[pair_b]
358
+
359
+ correspondences = pair['inliers']
360
+ if len(correspondences) > 0:
361
+
362
+ pm = correspondences
363
+ flipped_matches = self.flip_matches(pm)
364
+
365
+ tile_a['matches'].extend(pm)
366
+ tile_b['matches'].extend(flipped_matches)
367
+
368
+ tile_a['connected_tiles'].append({'view': pair_b, 'tile': tile_b})
369
+ tile_b['connected_tiles'].append({'view': pair_a, 'tile': tile_a})
370
+
371
+ pair['flipped'] = flipped_matches
372
+
373
+ return map
374
+
375
+ def create_default_model_3d(self):
376
+ """
377
+ Returns a default 3D rigid transformation model with identity rotation and zero translation.
378
+ """
379
+ return {
380
+ "m00": 1.0, "m01": 0.0, "m02": 0.0, "m03": 0.0,
381
+ "m10": 0.0, "m11": 1.0, "m12": 0.0, "m13": 0.0,
382
+ "m20": 0.0, "m21": 0.0, "m22": 1.0, "m23": 0.0,
383
+ "i00": 1.0, "i01": 0.0, "i02": 0.0, "i03": 0.0,
384
+ "i10": 0.0, "i11": 1.0, "i12": 0.0, "i13": 0.0,
385
+ "i20": 0.0, "i21": 0.0, "i22": 1.0, "i23": 0.0,
386
+ "cost": 1.7976931348623157e+308,
387
+ "isInvertible": True
388
+ }
389
+
390
+ def create_models(self):
391
+ """
392
+ Initializes default transformation models and parameters for affine and rigid alignment.
393
+ """
394
+ return {
395
+ 'a' : self.create_default_model_3d(),
396
+ 'b' : self.create_default_model_3d(),
397
+ 'regularized': self.create_default_model_3d(),
398
+ 'cost' : 1.7976931348623157e+308,
399
+ 'l1' : 0.900000,
400
+ 'lambda' : 0.100000
401
+ }
402
+
403
+ def assign_views_to_tiles(self, groups):
404
+ """
405
+ Create initial view_map entry for each view to be optimized.
406
+ """
407
+ view_map = {}
408
+ if groups:
409
+
410
+ remaining_views = {f"timepoint: {tp}, setup: {vs}" for (tp, vs) in self.view_id_set}
411
+ for group in groups:
412
+ for view in group['views']:
413
+ view_map[view] = {
414
+ 'view': view,
415
+ 'connected_tiles': [],
416
+ 'cost': 0,
417
+ 'distance': 0,
418
+ 'matches': [],
419
+ 'model': self.create_models()
420
+ }
421
+
422
+ if view not in remaining_views:
423
+ raise RuntimeError(f"{view} is part of two groups; groups should have been merged.")
424
+
425
+ remaining_views.remove(view)
426
+
427
+ for view in remaining_views:
428
+ view_map[view] = {
429
+ 'views': [view],
430
+ 'connected_tiles': [],
431
+ 'cost': 0,
432
+ 'distance': 0,
433
+ 'matches': [],
434
+ 'model': self.create_models(),
435
+ }
436
+
437
+ else:
438
+ for view in self.view_id_set:
439
+ tp, setup = view
440
+ key = f"timepoint: {tp}, setup: {setup}"
441
+
442
+ view_map[key] = {
443
+ 'view': key,
444
+ 'connected_tiles': [],
445
+ 'cost': 0,
446
+ 'distance': 0,
447
+ 'matches': [],
448
+ 'model': self.create_models()
449
+ }
450
+
451
+ return view_map
452
+
453
+ def merge_all_overlapping_groups(self):
454
+ """
455
+ Repeatedly merge any groups that share at least one view until no overlaps remain
456
+ """
457
+ g = [{'views': list(gr.get('views', []))} for gr in self.groups]
458
+
459
+ while True:
460
+ pair = None
461
+ n = len(g)
462
+
463
+ for a in range(n - 1):
464
+ va = set(g[a]['views'])
465
+ for b in range(a + 1, n):
466
+ if va & set(g[b]['views']): # overlaps?
467
+ pair = (a, b)
468
+ break
469
+ if pair:
470
+ break
471
+
472
+ if not pair:
473
+ break
474
+
475
+ i, j = pair
476
+ ga, gb = g[i], g[j]
477
+
478
+ # remove indexB then indexA (j > i)
479
+ del g[j]
480
+ del g[i]
481
+
482
+ # merge(ga, gb): preserve order, dedup
483
+ merged_views = list(dict.fromkeys(ga['views'] + gb['views']))
484
+ g.append({'views': merged_views})
485
+
486
+ return g
487
+
488
+ def init_global_opt(self):
489
+ """
490
+ Build the tile map and attach point matches
491
+ """
492
+ if self.groups is None:
493
+ groups = self.groups
494
+ else:
495
+ groups = self.groups
496
+
497
+ view_map = self.assign_views_to_tiles(groups)
498
+
499
+ if self.groups is None:
500
+ view_map = self.assign_point_matches(view_map)
501
+ else:
502
+ view_map = self.assign_weak_link_point_matches(view_map, groups)
503
+
504
+ return view_map
505
+
506
+ def add_and_fix_tiles(self, view_map):
507
+ """
508
+ Build the initial tile collection for alignment
509
+ """
510
+ tc = {
511
+ 'error': 0,
512
+ 'fixed_tiles': [],
513
+ 'max_error': 0,
514
+ 'min_error': float('inf'),
515
+ 'tiles': []
516
+ }
517
+
518
+ if self.groups:
519
+ view_to_group_idx = {v: gi for gi, g in enumerate(self.groups) for v in g.get('views', [])}
520
+
521
+ first_by_group = {}
522
+ for view_id, gi in view_to_group_idx.items():
523
+ first_by_group.setdefault(gi, view_id)
524
+
525
+ # add exactly one tile per group
526
+ for gi in sorted(first_by_group):
527
+ rep_view = first_by_group[gi]
528
+ t = view_map.get(rep_view)
529
+ if len(t['connected_tiles']) > 0:
530
+ tc['tiles'].append(t)
531
+
532
+ else:
533
+ tiles = []
534
+ for tp, setup in self.view_id_set:
535
+ key = f"timepoint: {tp}, setup: {setup}"
536
+ tile = view_map[key]
537
+ tiles.append(tile)
538
+
539
+ for tile in tiles:
540
+ if len(tile['connected_tiles']) > 0:
541
+ tc['tiles'].append(tile)
542
+
543
+ return tc
544
+
545
+ def compute_tiles(self):
546
+ """
547
+ Interface tile computing
548
+ """
549
+ view_map = self.init_global_opt()
550
+ tc = self.add_and_fix_tiles(view_map)
551
+
552
+ if len(tc['tiles']) == 0:
553
+ return None
554
+ else:
555
+ return tc, view_map
556
+
557
+ def run(self):
558
+ """
559
+ Executes the entry point of the script.
560
+ """
561
+ tc, view_map = self.compute_tiles()
562
+ return tc, view_map