Rhapso 0.1.92__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. Rhapso/__init__.py +1 -0
  2. Rhapso/data_prep/__init__.py +2 -0
  3. Rhapso/data_prep/n5_reader.py +188 -0
  4. Rhapso/data_prep/s3_big_stitcher_reader.py +55 -0
  5. Rhapso/data_prep/xml_to_dataframe.py +215 -0
  6. Rhapso/detection/__init__.py +5 -0
  7. Rhapso/detection/advanced_refinement.py +203 -0
  8. Rhapso/detection/difference_of_gaussian.py +324 -0
  9. Rhapso/detection/image_reader.py +117 -0
  10. Rhapso/detection/metadata_builder.py +130 -0
  11. Rhapso/detection/overlap_detection.py +327 -0
  12. Rhapso/detection/points_validation.py +49 -0
  13. Rhapso/detection/save_interest_points.py +265 -0
  14. Rhapso/detection/view_transform_models.py +67 -0
  15. Rhapso/fusion/__init__.py +0 -0
  16. Rhapso/fusion/affine_fusion/__init__.py +2 -0
  17. Rhapso/fusion/affine_fusion/blend.py +289 -0
  18. Rhapso/fusion/affine_fusion/fusion.py +601 -0
  19. Rhapso/fusion/affine_fusion/geometry.py +159 -0
  20. Rhapso/fusion/affine_fusion/io.py +546 -0
  21. Rhapso/fusion/affine_fusion/script_utils.py +111 -0
  22. Rhapso/fusion/affine_fusion/setup.py +4 -0
  23. Rhapso/fusion/affine_fusion_worker.py +234 -0
  24. Rhapso/fusion/multiscale/__init__.py +0 -0
  25. Rhapso/fusion/multiscale/aind_hcr_data_transformation/__init__.py +19 -0
  26. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/__init__.py +3 -0
  27. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/czi_to_zarr.py +698 -0
  28. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/zarr_writer.py +265 -0
  29. Rhapso/fusion/multiscale/aind_hcr_data_transformation/models.py +81 -0
  30. Rhapso/fusion/multiscale/aind_hcr_data_transformation/utils/__init__.py +3 -0
  31. Rhapso/fusion/multiscale/aind_hcr_data_transformation/utils/utils.py +526 -0
  32. Rhapso/fusion/multiscale/aind_hcr_data_transformation/zeiss_job.py +249 -0
  33. Rhapso/fusion/multiscale/aind_z1_radial_correction/__init__.py +21 -0
  34. Rhapso/fusion/multiscale/aind_z1_radial_correction/array_to_zarr.py +257 -0
  35. Rhapso/fusion/multiscale/aind_z1_radial_correction/radial_correction.py +557 -0
  36. Rhapso/fusion/multiscale/aind_z1_radial_correction/run_capsule.py +98 -0
  37. Rhapso/fusion/multiscale/aind_z1_radial_correction/utils/__init__.py +3 -0
  38. Rhapso/fusion/multiscale/aind_z1_radial_correction/utils/utils.py +266 -0
  39. Rhapso/fusion/multiscale/aind_z1_radial_correction/worker.py +89 -0
  40. Rhapso/fusion/multiscale_worker.py +113 -0
  41. Rhapso/fusion/neuroglancer_link_gen/__init__.py +8 -0
  42. Rhapso/fusion/neuroglancer_link_gen/dispim_link.py +235 -0
  43. Rhapso/fusion/neuroglancer_link_gen/exaspim_link.py +127 -0
  44. Rhapso/fusion/neuroglancer_link_gen/hcr_link.py +368 -0
  45. Rhapso/fusion/neuroglancer_link_gen/iSPIM_top.py +47 -0
  46. Rhapso/fusion/neuroglancer_link_gen/link_utils.py +239 -0
  47. Rhapso/fusion/neuroglancer_link_gen/main.py +299 -0
  48. Rhapso/fusion/neuroglancer_link_gen/ng_layer.py +1434 -0
  49. Rhapso/fusion/neuroglancer_link_gen/ng_state.py +1123 -0
  50. Rhapso/fusion/neuroglancer_link_gen/parsers.py +336 -0
  51. Rhapso/fusion/neuroglancer_link_gen/raw_link.py +116 -0
  52. Rhapso/fusion/neuroglancer_link_gen/utils/__init__.py +4 -0
  53. Rhapso/fusion/neuroglancer_link_gen/utils/shader_utils.py +85 -0
  54. Rhapso/fusion/neuroglancer_link_gen/utils/transfer.py +43 -0
  55. Rhapso/fusion/neuroglancer_link_gen/utils/utils.py +303 -0
  56. Rhapso/fusion/neuroglancer_link_gen_worker.py +30 -0
  57. Rhapso/matching/__init__.py +0 -0
  58. Rhapso/matching/load_and_transform_points.py +458 -0
  59. Rhapso/matching/ransac_matching.py +544 -0
  60. Rhapso/matching/save_matches.py +120 -0
  61. Rhapso/matching/xml_parser.py +302 -0
  62. Rhapso/pipelines/__init__.py +0 -0
  63. Rhapso/pipelines/ray/__init__.py +0 -0
  64. Rhapso/pipelines/ray/aws/__init__.py +0 -0
  65. Rhapso/pipelines/ray/aws/alignment_pipeline.py +227 -0
  66. Rhapso/pipelines/ray/aws/config/__init__.py +0 -0
  67. Rhapso/pipelines/ray/evaluation.py +71 -0
  68. Rhapso/pipelines/ray/interest_point_detection.py +137 -0
  69. Rhapso/pipelines/ray/interest_point_matching.py +110 -0
  70. Rhapso/pipelines/ray/local/__init__.py +0 -0
  71. Rhapso/pipelines/ray/local/alignment_pipeline.py +167 -0
  72. Rhapso/pipelines/ray/matching_stats.py +104 -0
  73. Rhapso/pipelines/ray/param/__init__.py +0 -0
  74. Rhapso/pipelines/ray/solver.py +120 -0
  75. Rhapso/pipelines/ray/split_dataset.py +78 -0
  76. Rhapso/solver/__init__.py +0 -0
  77. Rhapso/solver/compute_tiles.py +562 -0
  78. Rhapso/solver/concatenate_models.py +116 -0
  79. Rhapso/solver/connected_graphs.py +111 -0
  80. Rhapso/solver/data_prep.py +181 -0
  81. Rhapso/solver/global_optimization.py +410 -0
  82. Rhapso/solver/model_and_tile_setup.py +109 -0
  83. Rhapso/solver/pre_align_tiles.py +323 -0
  84. Rhapso/solver/save_results.py +97 -0
  85. Rhapso/solver/view_transforms.py +75 -0
  86. Rhapso/solver/xml_to_dataframe_solver.py +213 -0
  87. Rhapso/split_dataset/__init__.py +0 -0
  88. Rhapso/split_dataset/compute_grid_rules.py +78 -0
  89. Rhapso/split_dataset/save_points.py +101 -0
  90. Rhapso/split_dataset/save_xml.py +377 -0
  91. Rhapso/split_dataset/split_images.py +537 -0
  92. Rhapso/split_dataset/xml_to_dataframe_split.py +219 -0
  93. rhapso-0.1.92.dist-info/METADATA +39 -0
  94. rhapso-0.1.92.dist-info/RECORD +101 -0
  95. rhapso-0.1.92.dist-info/WHEEL +5 -0
  96. rhapso-0.1.92.dist-info/licenses/LICENSE +21 -0
  97. rhapso-0.1.92.dist-info/top_level.txt +2 -0
  98. tests/__init__.py +1 -0
  99. tests/test_detection.py +17 -0
  100. tests/test_matching.py +21 -0
  101. tests/test_solving.py +21 -0
@@ -0,0 +1,526 @@
1
+ """
2
+ Utility functions for image readers
3
+ """
4
+
5
+ import json
6
+ import multiprocessing
7
+ import os
8
+ import platform
9
+ import subprocess
10
+ from concurrent.futures import ThreadPoolExecutor
11
+ from typing import List, Optional
12
+
13
+ import numpy as np
14
+ from czifile.czifile import create_output
15
+ from natsort import natsorted
16
+
17
+ from ..models import ArrayLike, PathLike
18
+
19
+
20
+ def add_leading_dim(data: ArrayLike) -> ArrayLike:
21
+ """
22
+ Adds a new dimension to existing data.
23
+ Parameters
24
+ ------------------------
25
+ arr: ArrayLike
26
+ Dask/numpy array that contains image data.
27
+
28
+ Returns
29
+ ------------------------
30
+ ArrayLike:
31
+ Padded dask/numpy array.
32
+ """
33
+
34
+ return data[None, ...]
35
+
36
+
37
+ def pad_array_n_d(arr: ArrayLike, dim: int = 5) -> ArrayLike:
38
+ """
39
+ Pads a daks array to be in a 5D shape.
40
+
41
+ Parameters
42
+ ------------------------
43
+
44
+ arr: ArrayLike
45
+ Dask/numpy array that contains image data.
46
+ dim: int
47
+ Number of dimensions that the array will be padded
48
+
49
+ Returns
50
+ ------------------------
51
+ ArrayLike:
52
+ Padded dask/numpy array.
53
+ """
54
+ if dim > 5:
55
+ raise ValueError("Padding more than 5 dimensions is not supported.")
56
+
57
+ while arr.ndim < dim:
58
+ arr = arr[np.newaxis, ...]
59
+ return arr
60
+
61
+
62
+ def extract_data(
63
+ arr: ArrayLike, last_dimensions: Optional[int] = None
64
+ ) -> ArrayLike:
65
+ """
66
+ Extracts n dimensional data (numpy array or dask array)
67
+ given expanded dimensions.
68
+ e.g., (1, 1, 1, 1600, 2000) -> (1600, 2000)
69
+ e.g., (1, 1600, 2000) -> (1600, 2000)
70
+ e.g., (1, 1, 2, 1600, 2000) -> (2, 1600, 2000)
71
+
72
+ Parameters
73
+ ------------------------
74
+ arr: ArrayLike
75
+ Numpy or dask array with image data. It is assumed
76
+ that the last dimensions of the array contain
77
+ the information about the image.
78
+
79
+ last_dimensions: Optional[int]
80
+ If given, it selects the number of dimensions given
81
+ stating from the end
82
+ of the array
83
+ e.g., arr=(1, 1, 1600, 2000) last_dimensions=3 -> (1, 1600, 2000)
84
+ e.g., arr=(1, 1, 1600, 2000) last_dimensions=1 -> (2000)
85
+
86
+ Raises
87
+ ------------------------
88
+ ValueError:
89
+ Whenever the last dimensions value is higher
90
+ than the array dimensions.
91
+
92
+ Returns
93
+ ------------------------
94
+ ArrayLike:
95
+ Reshaped array with the selected indices.
96
+ """
97
+
98
+ if last_dimensions is not None:
99
+ if last_dimensions > arr.ndim:
100
+ raise ValueError(
101
+ "Last dimensions should be lower than array dimensions"
102
+ )
103
+
104
+ else:
105
+ last_dimensions = len(arr.shape) - arr.shape.count(1)
106
+
107
+ dynamic_indices = [slice(None)] * arr.ndim
108
+
109
+ for idx in range(arr.ndim - last_dimensions):
110
+ dynamic_indices[idx] = 0
111
+
112
+ return arr[tuple(dynamic_indices)]
113
+
114
+
115
+ def read_json_as_dict(filepath: PathLike) -> dict:
116
+ """
117
+ Reads a json as dictionary.
118
+
119
+ Parameters
120
+ ------------------------
121
+
122
+ filepath: PathLike
123
+ Path where the json is located.
124
+
125
+ Returns
126
+ ------------------------
127
+
128
+ dict:
129
+ Dictionary with the data the json has.
130
+
131
+ """
132
+
133
+ dictionary = {}
134
+
135
+ if os.path.exists(filepath):
136
+ with open(filepath) as json_file:
137
+ dictionary = json.load(json_file)
138
+
139
+ return dictionary
140
+
141
+
142
+ def sync_dir_to_s3(directory_to_upload: PathLike, s3_location: str) -> None:
143
+ """
144
+ Syncs a local directory to an s3 location by running aws cli in a
145
+ subprocess.
146
+
147
+ Parameters
148
+ ----------
149
+ directory_to_upload : PathLike
150
+ s3_location : str
151
+
152
+ Returns
153
+ -------
154
+ None
155
+
156
+ """
157
+ # Upload to s3
158
+ if platform.system() == "Windows":
159
+ shell = True
160
+ else:
161
+ shell = False
162
+
163
+ base_command = [
164
+ "aws",
165
+ "s3",
166
+ "sync",
167
+ str(directory_to_upload),
168
+ s3_location,
169
+ "--only-show-errors",
170
+ ]
171
+
172
+ subprocess.run(base_command, shell=shell, check=True)
173
+
174
+
175
+ def copy_file_to_s3(file_to_upload: PathLike, s3_location: str) -> None:
176
+ """
177
+ Syncs a local directory to an s3 location by running aws cli in a
178
+ subprocess.
179
+
180
+ Parameters
181
+ ----------
182
+ file_to_upload : PathLike
183
+ s3_location : str
184
+
185
+ Returns
186
+ -------
187
+ None
188
+
189
+ """
190
+ # Upload to s3
191
+ if platform.system() == "Windows":
192
+ shell = True
193
+ else:
194
+ shell = False
195
+
196
+ base_command = [
197
+ "aws",
198
+ "s3",
199
+ "cp",
200
+ str(file_to_upload),
201
+ s3_location,
202
+ "--only-show-errors",
203
+ ]
204
+
205
+ subprocess.run(base_command, shell=shell, check=True)
206
+
207
+
208
+ def validate_slices(start_slice: int, end_slice: int, len_dir: int):
209
+ """
210
+ Validates that the slice indices are within bounds
211
+
212
+ Parameters
213
+ ----------
214
+ start_slice: int
215
+ Start slice integer
216
+
217
+ end_slice: int
218
+ End slice integer
219
+
220
+ len_dir: int
221
+ Len of czi directory
222
+ """
223
+ if not (0 <= start_slice < end_slice <= len_dir):
224
+ msg = (
225
+ f"Slices out of bounds. Total: {len_dir}"
226
+ f"Start: {start_slice}, End: {end_slice}"
227
+ )
228
+ raise ValueError(msg)
229
+
230
+
231
+ def parallel_reader(
232
+ args: tuple,
233
+ out: np.ndarray,
234
+ nominal_start: np.ndarray,
235
+ start_slice: int,
236
+ ax_index: int,
237
+ resize: bool,
238
+ order: int,
239
+ ):
240
+ """
241
+ Reads a single subblock and places it in the output array.
242
+
243
+ Parameters
244
+ ----------
245
+ args: tuple
246
+ Index and directory entry of the czi file.
247
+
248
+ out: np.ndarray
249
+ Placeholder array for the data
250
+
251
+ nominal_start: np.ndarray
252
+ Nominal start of the dataset when it was acquired.
253
+
254
+ start_slice: int
255
+ Start slice.
256
+
257
+ ax_index: int
258
+ Axis index.
259
+
260
+ resize: bool
261
+ True if resizing is needed when reading CZI data.
262
+
263
+ order: int
264
+ Interpolation in resizing.
265
+ """
266
+ idx, directory_entry = args
267
+ subblock = directory_entry.data_segment()
268
+ tile = subblock.data(resize=resize, order=order)
269
+ dir_start = np.array(directory_entry.start) - nominal_start
270
+
271
+ # Calculate index placement
272
+ index = tuple(slice(i, i + k) for i, k in zip(dir_start, tile.shape))
273
+ index = list(index)
274
+ index[ax_index] = slice(
275
+ index[ax_index].start - start_slice, index[ax_index].stop - start_slice
276
+ )
277
+
278
+ try:
279
+ out[tuple(index)] = tile
280
+ except ValueError as e:
281
+ raise ValueError(f"Error writing subblock {idx + start_slice}: {e}")
282
+
283
+
284
+ def read_slices_czi(
285
+ czi_stream,
286
+ subblock_directory: List,
287
+ start_slice: int,
288
+ end_slice: int,
289
+ slice_axis: Optional[str] = "z",
290
+ resize: Optional[bool] = True,
291
+ order: Optional[int] = 0,
292
+ out: Optional[List[int]] = None,
293
+ max_workers: Optional[int] = None,
294
+ ):
295
+ """
296
+ Reads chunked data from CZI files. From AIND-Zeiss
297
+ the data is being chunked in a slice basis. Therefore,
298
+ we assume the slice axis to be 'z'.
299
+
300
+ Parameters
301
+ ----------
302
+ czi_stream
303
+ Opened CZI file decriptor.
304
+
305
+ subblock_directory: List
306
+ List of subblock directories. These must be ordered.
307
+
308
+ start_slice: int
309
+ Start slice from where the data will be pulled.
310
+
311
+ end_slice: int
312
+ End slice from where the data will be pulled.
313
+
314
+ slice_axis: Optional[str] = 'z'
315
+ Axis in which start and end slice parameters will
316
+ be applied.
317
+ Default: 'z'
318
+
319
+ resize: Optional[bool] = True
320
+ If we want to resize the tile from the CZI file.
321
+ Default: True
322
+
323
+ order: Optional[int] = 0
324
+ Interpolation order
325
+ Default: 0
326
+
327
+ out: Optional[List[int]] = None
328
+ Out shape of the final array
329
+ Default: None
330
+
331
+ max_workers: Optional[int] = None
332
+ Number of workers that will be pulling data.
333
+ Default: None
334
+
335
+ Returns
336
+ -------
337
+ np.ndarray
338
+ Numpy array with the pulled data
339
+ """
340
+
341
+ shape, dtype, axes = (
342
+ czi_stream.shape,
343
+ czi_stream.dtype,
344
+ list(czi_stream.axes.lower()),
345
+ )
346
+ nominal_start = np.array(czi_stream.start)
347
+
348
+ len_dir = len(subblock_directory)
349
+
350
+ validate_slices(start_slice, end_slice, len_dir)
351
+
352
+ ax_index = axes.index(slice_axis.lower())
353
+ new_shape = list(shape)
354
+ new_shape[ax_index] = end_slice - start_slice
355
+ new_shape[axes.index("c")] = 1 # Assume 1 channel per CZI
356
+
357
+ out = create_output(out, new_shape, dtype)
358
+ max_workers = max_workers or min(
359
+ multiprocessing.cpu_count() // 2, end_slice - start_slice
360
+ )
361
+
362
+ selected_entries = subblock_directory[start_slice:end_slice]
363
+
364
+ if max_workers > 1 and end_slice - start_slice > 1:
365
+ czi_stream._fh.lock = True
366
+ with ThreadPoolExecutor(max_workers) as executor:
367
+ executor.map(
368
+ lambda args: parallel_reader(
369
+ args,
370
+ out,
371
+ nominal_start,
372
+ start_slice,
373
+ ax_index,
374
+ resize,
375
+ order,
376
+ ),
377
+ enumerate(selected_entries),
378
+ )
379
+ czi_stream._fh.lock = None
380
+ else:
381
+ for idx, entry in enumerate(selected_entries):
382
+ parallel_reader(
383
+ (idx, entry),
384
+ out,
385
+ nominal_start,
386
+ start_slice,
387
+ ax_index,
388
+ resize,
389
+ order,
390
+ )
391
+
392
+ if hasattr(out, "flush"):
393
+ out.flush()
394
+
395
+ return np.squeeze(out)
396
+
397
+
398
+ def generate_jumps(n: int, jump_size: Optional[int] = 128):
399
+ """
400
+ Generates jumps for indexing.
401
+
402
+ Parameters
403
+ ----------
404
+ n: int
405
+ Final number for indexing.
406
+ It is exclusive in the final number.
407
+
408
+ jump_size: Optional[int] = 128
409
+ Jump size.
410
+ """
411
+ jumps = list(range(0, n, jump_size))
412
+ # if jumps[-1] + jump_size >= n:
413
+ # jumps.append(n)
414
+
415
+ return jumps
416
+
417
+
418
+ def get_axis_index(czi_shape: List[int], czi_axis: int, axis_name: str):
419
+ """
420
+ Gets the axis index from the CZI natural shape.
421
+
422
+ Parameters
423
+ ----------
424
+ czi_shape: List[int]
425
+ List of ints of the CZI shape. CZI files come
426
+ with many more axis than traditional file formats.
427
+ Please, check its documentation.
428
+
429
+ czi_axis: int
430
+ Axis from which we will pull the index.
431
+
432
+ axis_name: str
433
+ Axis name. Allowed axis names are:
434
+ ['b', 'v', 'i', 'h', 'r', 's', 'c', 't', 'z', 'y', 'x', '0']
435
+ """
436
+ czi_axis = list(str(czi_axis).lower())
437
+ axis_name = axis_name.lower()
438
+ ALLOWED_AXIS_NAMES = [
439
+ "b",
440
+ "v",
441
+ "i",
442
+ "h",
443
+ "r",
444
+ "s",
445
+ "c",
446
+ "t",
447
+ "z",
448
+ "y",
449
+ "x",
450
+ "0",
451
+ ]
452
+
453
+ if axis_name not in ALLOWED_AXIS_NAMES:
454
+ raise ValueError(f"Axis {axis_name} not valid!")
455
+
456
+ czi_shape = list(czi_shape)
457
+ ax_index = czi_axis.index(axis_name)
458
+
459
+ return ax_index, czi_shape[ax_index]
460
+
461
+
462
+ def czi_block_generator(
463
+ czi_decriptor,
464
+ axis_jumps: Optional[int] = 128,
465
+ slice_axis: Optional[str] = "z",
466
+ ):
467
+ """
468
+ CZI data block generator.
469
+
470
+ Parameters
471
+ ----------
472
+ czi_decriptor
473
+ Opened CZI file.
474
+
475
+ axis_jumps: int
476
+ Number of jumps in a given axis.
477
+ Default: 128
478
+
479
+ slice_axis: str
480
+ Axis in which the jumps will be
481
+ generated.
482
+ Default: 'z'
483
+
484
+ Yields
485
+ ------
486
+ np.ndarray
487
+ Numpy array with the data
488
+ of the picked block.
489
+
490
+ slice
491
+ Slice of start and end positions
492
+ in a given axis.
493
+ """
494
+
495
+ axis_index, axis_shape = get_axis_index(
496
+ czi_decriptor.shape, czi_decriptor.axes, slice_axis
497
+ )
498
+
499
+ subblock_directory = czi_decriptor.filtered_subblock_directory
500
+
501
+ # Sorting indices so planes are ordered
502
+ ordered_subblock_directory = natsorted(
503
+ subblock_directory, key=lambda sb: sb.start[axis_index]
504
+ )
505
+
506
+ jumps = generate_jumps(axis_shape, axis_jumps)
507
+ n_jumps = len(jumps)
508
+ for i, start_slice in enumerate(jumps):
509
+ if i + 1 < n_jumps:
510
+ end_slice = jumps[i + 1]
511
+
512
+ else:
513
+ end_slice = axis_shape
514
+
515
+ block = read_slices_czi(
516
+ czi_decriptor,
517
+ subblock_directory=ordered_subblock_directory,
518
+ start_slice=start_slice,
519
+ end_slice=end_slice,
520
+ slice_axis=slice_axis,
521
+ resize=True,
522
+ order=0,
523
+ out=None,
524
+ max_workers=None,
525
+ )
526
+ yield block, slice(start_slice, end_slice)