Rhapso 0.1.92__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. Rhapso/__init__.py +1 -0
  2. Rhapso/data_prep/__init__.py +2 -0
  3. Rhapso/data_prep/n5_reader.py +188 -0
  4. Rhapso/data_prep/s3_big_stitcher_reader.py +55 -0
  5. Rhapso/data_prep/xml_to_dataframe.py +215 -0
  6. Rhapso/detection/__init__.py +5 -0
  7. Rhapso/detection/advanced_refinement.py +203 -0
  8. Rhapso/detection/difference_of_gaussian.py +324 -0
  9. Rhapso/detection/image_reader.py +117 -0
  10. Rhapso/detection/metadata_builder.py +130 -0
  11. Rhapso/detection/overlap_detection.py +327 -0
  12. Rhapso/detection/points_validation.py +49 -0
  13. Rhapso/detection/save_interest_points.py +265 -0
  14. Rhapso/detection/view_transform_models.py +67 -0
  15. Rhapso/fusion/__init__.py +0 -0
  16. Rhapso/fusion/affine_fusion/__init__.py +2 -0
  17. Rhapso/fusion/affine_fusion/blend.py +289 -0
  18. Rhapso/fusion/affine_fusion/fusion.py +601 -0
  19. Rhapso/fusion/affine_fusion/geometry.py +159 -0
  20. Rhapso/fusion/affine_fusion/io.py +546 -0
  21. Rhapso/fusion/affine_fusion/script_utils.py +111 -0
  22. Rhapso/fusion/affine_fusion/setup.py +4 -0
  23. Rhapso/fusion/affine_fusion_worker.py +234 -0
  24. Rhapso/fusion/multiscale/__init__.py +0 -0
  25. Rhapso/fusion/multiscale/aind_hcr_data_transformation/__init__.py +19 -0
  26. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/__init__.py +3 -0
  27. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/czi_to_zarr.py +698 -0
  28. Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/zarr_writer.py +265 -0
  29. Rhapso/fusion/multiscale/aind_hcr_data_transformation/models.py +81 -0
  30. Rhapso/fusion/multiscale/aind_hcr_data_transformation/utils/__init__.py +3 -0
  31. Rhapso/fusion/multiscale/aind_hcr_data_transformation/utils/utils.py +526 -0
  32. Rhapso/fusion/multiscale/aind_hcr_data_transformation/zeiss_job.py +249 -0
  33. Rhapso/fusion/multiscale/aind_z1_radial_correction/__init__.py +21 -0
  34. Rhapso/fusion/multiscale/aind_z1_radial_correction/array_to_zarr.py +257 -0
  35. Rhapso/fusion/multiscale/aind_z1_radial_correction/radial_correction.py +557 -0
  36. Rhapso/fusion/multiscale/aind_z1_radial_correction/run_capsule.py +98 -0
  37. Rhapso/fusion/multiscale/aind_z1_radial_correction/utils/__init__.py +3 -0
  38. Rhapso/fusion/multiscale/aind_z1_radial_correction/utils/utils.py +266 -0
  39. Rhapso/fusion/multiscale/aind_z1_radial_correction/worker.py +89 -0
  40. Rhapso/fusion/multiscale_worker.py +113 -0
  41. Rhapso/fusion/neuroglancer_link_gen/__init__.py +8 -0
  42. Rhapso/fusion/neuroglancer_link_gen/dispim_link.py +235 -0
  43. Rhapso/fusion/neuroglancer_link_gen/exaspim_link.py +127 -0
  44. Rhapso/fusion/neuroglancer_link_gen/hcr_link.py +368 -0
  45. Rhapso/fusion/neuroglancer_link_gen/iSPIM_top.py +47 -0
  46. Rhapso/fusion/neuroglancer_link_gen/link_utils.py +239 -0
  47. Rhapso/fusion/neuroglancer_link_gen/main.py +299 -0
  48. Rhapso/fusion/neuroglancer_link_gen/ng_layer.py +1434 -0
  49. Rhapso/fusion/neuroglancer_link_gen/ng_state.py +1123 -0
  50. Rhapso/fusion/neuroglancer_link_gen/parsers.py +336 -0
  51. Rhapso/fusion/neuroglancer_link_gen/raw_link.py +116 -0
  52. Rhapso/fusion/neuroglancer_link_gen/utils/__init__.py +4 -0
  53. Rhapso/fusion/neuroglancer_link_gen/utils/shader_utils.py +85 -0
  54. Rhapso/fusion/neuroglancer_link_gen/utils/transfer.py +43 -0
  55. Rhapso/fusion/neuroglancer_link_gen/utils/utils.py +303 -0
  56. Rhapso/fusion/neuroglancer_link_gen_worker.py +30 -0
  57. Rhapso/matching/__init__.py +0 -0
  58. Rhapso/matching/load_and_transform_points.py +458 -0
  59. Rhapso/matching/ransac_matching.py +544 -0
  60. Rhapso/matching/save_matches.py +120 -0
  61. Rhapso/matching/xml_parser.py +302 -0
  62. Rhapso/pipelines/__init__.py +0 -0
  63. Rhapso/pipelines/ray/__init__.py +0 -0
  64. Rhapso/pipelines/ray/aws/__init__.py +0 -0
  65. Rhapso/pipelines/ray/aws/alignment_pipeline.py +227 -0
  66. Rhapso/pipelines/ray/aws/config/__init__.py +0 -0
  67. Rhapso/pipelines/ray/evaluation.py +71 -0
  68. Rhapso/pipelines/ray/interest_point_detection.py +137 -0
  69. Rhapso/pipelines/ray/interest_point_matching.py +110 -0
  70. Rhapso/pipelines/ray/local/__init__.py +0 -0
  71. Rhapso/pipelines/ray/local/alignment_pipeline.py +167 -0
  72. Rhapso/pipelines/ray/matching_stats.py +104 -0
  73. Rhapso/pipelines/ray/param/__init__.py +0 -0
  74. Rhapso/pipelines/ray/solver.py +120 -0
  75. Rhapso/pipelines/ray/split_dataset.py +78 -0
  76. Rhapso/solver/__init__.py +0 -0
  77. Rhapso/solver/compute_tiles.py +562 -0
  78. Rhapso/solver/concatenate_models.py +116 -0
  79. Rhapso/solver/connected_graphs.py +111 -0
  80. Rhapso/solver/data_prep.py +181 -0
  81. Rhapso/solver/global_optimization.py +410 -0
  82. Rhapso/solver/model_and_tile_setup.py +109 -0
  83. Rhapso/solver/pre_align_tiles.py +323 -0
  84. Rhapso/solver/save_results.py +97 -0
  85. Rhapso/solver/view_transforms.py +75 -0
  86. Rhapso/solver/xml_to_dataframe_solver.py +213 -0
  87. Rhapso/split_dataset/__init__.py +0 -0
  88. Rhapso/split_dataset/compute_grid_rules.py +78 -0
  89. Rhapso/split_dataset/save_points.py +101 -0
  90. Rhapso/split_dataset/save_xml.py +377 -0
  91. Rhapso/split_dataset/split_images.py +537 -0
  92. Rhapso/split_dataset/xml_to_dataframe_split.py +219 -0
  93. rhapso-0.1.92.dist-info/METADATA +39 -0
  94. rhapso-0.1.92.dist-info/RECORD +101 -0
  95. rhapso-0.1.92.dist-info/WHEEL +5 -0
  96. rhapso-0.1.92.dist-info/licenses/LICENSE +21 -0
  97. rhapso-0.1.92.dist-info/top_level.txt +2 -0
  98. tests/__init__.py +1 -0
  99. tests/test_detection.py +17 -0
  100. tests/test_matching.py +21 -0
  101. tests/test_solving.py +21 -0
@@ -0,0 +1,303 @@
1
+ """
2
+ Utility functions
3
+ """
4
+ import json
5
+ import os
6
+ import shutil
7
+ import subprocess
8
+ from pathlib import Path
9
+ from typing import List, Optional, Union
10
+
11
+ import boto3
12
+ import pandas as pd
13
+
14
+ # IO types
15
+ PathLike = Union[str, Path]
16
+
17
+
18
+ def create_folder(dest_dir: PathLike, verbose: Optional[bool] = False) -> None:
19
+ """
20
+ Create new folders.
21
+
22
+ Parameters
23
+ ------------------------
24
+ dest_dir: PathLike
25
+ Path where the folder will be created if it does not exist.
26
+ verbose: Optional[bool]
27
+ If we want to show information about the folder status. Default False.
28
+
29
+ Raises
30
+ ------------------------
31
+ OSError:
32
+ if the folder exists.
33
+
34
+ """
35
+
36
+ if not (os.path.exists(dest_dir)):
37
+ try:
38
+ if verbose:
39
+ print(f"Creating new directory: {dest_dir}")
40
+ os.makedirs(dest_dir)
41
+ except OSError as e:
42
+ if e.errno != os.errno.EEXIST:
43
+ raise
44
+
45
+
46
+ def delete_folder(dest_dir: PathLike, verbose: Optional[bool] = False) -> None:
47
+ """
48
+ Delete a folder path.
49
+ Parameters
50
+ ------------------------
51
+ dest_dir: PathLike
52
+ Path that will be removed.
53
+ verbose: Optional[bool]
54
+ If we want to show information about the folder status. Default False.
55
+
56
+ Raises
57
+ ------------------------
58
+ shutil.Error:
59
+ If the folder could not be removed.
60
+
61
+ Returns
62
+ ------------------------
63
+ None
64
+
65
+ """
66
+ if os.path.exists(dest_dir):
67
+ try:
68
+ shutil.rmtree(dest_dir)
69
+ if verbose:
70
+ print(f"Folder {dest_dir} was removed!")
71
+ except shutil.Error as e:
72
+ print(f"Folder could not be removed! Error {e}")
73
+
74
+
75
+ def execute_command_helper(
76
+ command: str,
77
+ print_command: bool = False,
78
+ stdout_log_file: Optional[PathLike] = None,
79
+ ) -> None:
80
+ """
81
+ Execute a shell command.
82
+
83
+ Parameters
84
+ ------------------------
85
+ command: str
86
+ Command that we want to execute.
87
+ print_command: bool
88
+ Bool that dictates if we print the command in the console.
89
+
90
+ Raises
91
+ ------------------------
92
+ CalledProcessError:
93
+ if the command could not be executed (Returned non-zero status).
94
+
95
+ """
96
+
97
+ if print_command:
98
+ print(command)
99
+
100
+ if stdout_log_file and len(str(stdout_log_file)):
101
+ save_string_to_txt("$ " + command, stdout_log_file, "a")
102
+
103
+ popen = subprocess.Popen(
104
+ command, stdout=subprocess.PIPE, universal_newlines=True, shell=True
105
+ )
106
+ for stdout_line in iter(popen.stdout.readline, ""):
107
+ yield str(stdout_line).strip()
108
+ popen.stdout.close()
109
+ return_code = popen.wait()
110
+ if return_code:
111
+ raise subprocess.CalledProcessError(return_code, command)
112
+
113
+
114
+ def execute_command(config: dict) -> None:
115
+ """
116
+ Execute a shell command with a given configuration.
117
+
118
+ Parameters
119
+ ------------------------
120
+ command: str
121
+ Command that we want to execute.
122
+ print_command: bool
123
+ Bool that dictates if we print the command in the console.
124
+
125
+ Raises
126
+ ------------------------
127
+ CalledProcessError:
128
+ if the command could not be executed (Returned non-zero status).
129
+
130
+ """
131
+
132
+ for out in execute_command_helper(
133
+ config["command"], config["verbose"], config["stdout_log_file"]
134
+ ):
135
+ if len(out):
136
+ config["logger"].info(out)
137
+
138
+ if config["exists_stdout"]:
139
+ save_string_to_txt(out, config["stdout_log_file"], "a")
140
+
141
+
142
+ def check_path_instance(obj: object) -> bool:
143
+ """
144
+ Checks if an objects belongs to pathlib.Path subclasses.
145
+
146
+ Parameters
147
+ ------------------------
148
+ obj: object
149
+ Object that wants to be validated.
150
+
151
+ Returns
152
+ ------------------------
153
+ bool:
154
+ True if the object is an instance of Path subclass, False otherwise.
155
+ """
156
+
157
+ for childclass in Path.__subclasses__():
158
+ if isinstance(obj, childclass):
159
+ return True
160
+
161
+ return False
162
+
163
+
164
+ def save_dict_as_json(
165
+ filename: str, dictionary: dict, verbose: Optional[bool] = False
166
+ ) -> None:
167
+ """
168
+ Saves a dictionary as a json file.
169
+
170
+ Parameters
171
+ ------------------------
172
+ filename: str
173
+ Name of the json file.
174
+ dictionary: dict
175
+ Dictionary that will be saved as json.
176
+ verbose: Optional[bool]
177
+ True if you want to print the path where the file was saved.
178
+
179
+ """
180
+
181
+ if dictionary is None:
182
+ dictionary = {}
183
+
184
+ else:
185
+ for key, value in dictionary.items():
186
+ # Converting path to str to dump dictionary into json
187
+ if check_path_instance(value):
188
+ # TODO fix the \\ encode problem in dump
189
+ dictionary[key] = str(value)
190
+
191
+ with open(filename, "w") as json_file:
192
+ json.dump(dictionary, json_file, indent=4)
193
+
194
+ if verbose:
195
+ print(f"- Json file saved: {filename}")
196
+
197
+
198
+ def read_json_as_dict(filepath: str) -> dict:
199
+ """
200
+ Reads a json as dictionary.
201
+
202
+ Parameters
203
+ ------------------------
204
+ filepath: PathLike
205
+ Path where the json is located.
206
+
207
+ Returns
208
+ ------------------------
209
+ dict:
210
+ Dictionary with the data the json has.
211
+
212
+ """
213
+
214
+ dictionary = None
215
+
216
+ if os.path.exists(filepath):
217
+ with open(filepath) as json_file:
218
+ dictionary = json.load(json_file)
219
+
220
+ return dictionary
221
+
222
+
223
+ def save_string_to_txt(txt: str, filepath: PathLike, mode="w") -> None:
224
+ """
225
+ Saves a text in a file in the given mode.
226
+
227
+ Parameters
228
+ ------------------------
229
+ txt: str
230
+ String to be saved.
231
+
232
+ filepath: PathLike
233
+ Path where the file is located or will be saved.
234
+
235
+ mode: str
236
+ File open mode.
237
+
238
+ """
239
+
240
+ with open(filepath, mode) as file:
241
+ file.write(txt + "\n")
242
+
243
+
244
+ def create_s3_client() -> boto3.client:
245
+ """
246
+ Create and return a boto3 S3 client.
247
+
248
+ Returns
249
+ -------
250
+ boto3.Client
251
+ A boto3 S3 client object.
252
+ """
253
+ return boto3.client("s3")
254
+
255
+
256
+ def list_folders_s3(
257
+ s3_client: boto3.client, bucket_name: str, prefix: str
258
+ ) -> list:
259
+ """
260
+ List top-level folders in an S3 bucket with a specified prefix.
261
+
262
+ Parameters
263
+ ----------
264
+ s3_client : boto3.Client
265
+ The S3 client object.
266
+ bucket_name : str
267
+ The name of the S3 bucket.
268
+ prefix : str
269
+ The prefix to filter folders.
270
+
271
+ Returns
272
+ -------
273
+ list
274
+ A list of folder names.
275
+ """
276
+ response = s3_client.list_objects_v2(
277
+ Bucket=bucket_name, Prefix=prefix, Delimiter="/"
278
+ )
279
+ return [
280
+ content.get("Prefix").rstrip("/")
281
+ for content in response.get("CommonPrefixes", [])
282
+ ]
283
+
284
+
285
+ def save_to_csv(data: List[dict], file_path: str) -> str:
286
+ """
287
+ Save the given data to a CSV file.
288
+
289
+ Parameters
290
+ ----------
291
+ data : List[dict]
292
+ The data to be saved.
293
+ file_path : str
294
+ The file path for the CSV file.
295
+
296
+ Returns
297
+ -------
298
+ str
299
+ The path of the saved CSV file.
300
+ """
301
+ df = pd.DataFrame(data)
302
+ df.to_csv(file_path, index=False)
303
+ return file_path
@@ -0,0 +1,30 @@
1
+ '''
2
+ Worker script with hard-coded parameters to generate Neuroglancer link
3
+ '''
4
+
5
+ from Rhapso.fusion.neuroglancer_link_gen.main import generate_neuroglancer_link
6
+
7
+ # Hard-coded parameters
8
+ ZARR_PATH = "s3://martin-test-bucket/output7/multiscale_channel_488.zarr"
9
+ VMIN = 90
10
+ VMAX = 400
11
+ JSON_UPLOAD_BUCKET = "martin-test-bucket"
12
+ JSON_UPLOAD_PATH = "NG_out.json"
13
+ JSON_LOCAL_OUTPUT = "results"
14
+ DATASET_TYPE = "hcr"
15
+ OPACITY = 0.5
16
+ BLEND = "default"
17
+
18
+ if __name__ == "__main__":
19
+ # Call the function with hard-coded parameters
20
+ generate_neuroglancer_link(
21
+ zarr_path=ZARR_PATH,
22
+ vmin=VMIN,
23
+ vmax=VMAX,
24
+ json_upload_bucket=JSON_UPLOAD_BUCKET,
25
+ json_upload_path=JSON_UPLOAD_PATH,
26
+ json_local_output=JSON_LOCAL_OUTPUT,
27
+ dataset_type=DATASET_TYPE,
28
+ opacity=OPACITY,
29
+ blend=BLEND
30
+ )
File without changes