Rhapso 0.1.92__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- Rhapso/__init__.py +1 -0
- Rhapso/data_prep/__init__.py +2 -0
- Rhapso/data_prep/n5_reader.py +188 -0
- Rhapso/data_prep/s3_big_stitcher_reader.py +55 -0
- Rhapso/data_prep/xml_to_dataframe.py +215 -0
- Rhapso/detection/__init__.py +5 -0
- Rhapso/detection/advanced_refinement.py +203 -0
- Rhapso/detection/difference_of_gaussian.py +324 -0
- Rhapso/detection/image_reader.py +117 -0
- Rhapso/detection/metadata_builder.py +130 -0
- Rhapso/detection/overlap_detection.py +327 -0
- Rhapso/detection/points_validation.py +49 -0
- Rhapso/detection/save_interest_points.py +265 -0
- Rhapso/detection/view_transform_models.py +67 -0
- Rhapso/fusion/__init__.py +0 -0
- Rhapso/fusion/affine_fusion/__init__.py +2 -0
- Rhapso/fusion/affine_fusion/blend.py +289 -0
- Rhapso/fusion/affine_fusion/fusion.py +601 -0
- Rhapso/fusion/affine_fusion/geometry.py +159 -0
- Rhapso/fusion/affine_fusion/io.py +546 -0
- Rhapso/fusion/affine_fusion/script_utils.py +111 -0
- Rhapso/fusion/affine_fusion/setup.py +4 -0
- Rhapso/fusion/affine_fusion_worker.py +234 -0
- Rhapso/fusion/multiscale/__init__.py +0 -0
- Rhapso/fusion/multiscale/aind_hcr_data_transformation/__init__.py +19 -0
- Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/__init__.py +3 -0
- Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/czi_to_zarr.py +698 -0
- Rhapso/fusion/multiscale/aind_hcr_data_transformation/compress/zarr_writer.py +265 -0
- Rhapso/fusion/multiscale/aind_hcr_data_transformation/models.py +81 -0
- Rhapso/fusion/multiscale/aind_hcr_data_transformation/utils/__init__.py +3 -0
- Rhapso/fusion/multiscale/aind_hcr_data_transformation/utils/utils.py +526 -0
- Rhapso/fusion/multiscale/aind_hcr_data_transformation/zeiss_job.py +249 -0
- Rhapso/fusion/multiscale/aind_z1_radial_correction/__init__.py +21 -0
- Rhapso/fusion/multiscale/aind_z1_radial_correction/array_to_zarr.py +257 -0
- Rhapso/fusion/multiscale/aind_z1_radial_correction/radial_correction.py +557 -0
- Rhapso/fusion/multiscale/aind_z1_radial_correction/run_capsule.py +98 -0
- Rhapso/fusion/multiscale/aind_z1_radial_correction/utils/__init__.py +3 -0
- Rhapso/fusion/multiscale/aind_z1_radial_correction/utils/utils.py +266 -0
- Rhapso/fusion/multiscale/aind_z1_radial_correction/worker.py +89 -0
- Rhapso/fusion/multiscale_worker.py +113 -0
- Rhapso/fusion/neuroglancer_link_gen/__init__.py +8 -0
- Rhapso/fusion/neuroglancer_link_gen/dispim_link.py +235 -0
- Rhapso/fusion/neuroglancer_link_gen/exaspim_link.py +127 -0
- Rhapso/fusion/neuroglancer_link_gen/hcr_link.py +368 -0
- Rhapso/fusion/neuroglancer_link_gen/iSPIM_top.py +47 -0
- Rhapso/fusion/neuroglancer_link_gen/link_utils.py +239 -0
- Rhapso/fusion/neuroglancer_link_gen/main.py +299 -0
- Rhapso/fusion/neuroglancer_link_gen/ng_layer.py +1434 -0
- Rhapso/fusion/neuroglancer_link_gen/ng_state.py +1123 -0
- Rhapso/fusion/neuroglancer_link_gen/parsers.py +336 -0
- Rhapso/fusion/neuroglancer_link_gen/raw_link.py +116 -0
- Rhapso/fusion/neuroglancer_link_gen/utils/__init__.py +4 -0
- Rhapso/fusion/neuroglancer_link_gen/utils/shader_utils.py +85 -0
- Rhapso/fusion/neuroglancer_link_gen/utils/transfer.py +43 -0
- Rhapso/fusion/neuroglancer_link_gen/utils/utils.py +303 -0
- Rhapso/fusion/neuroglancer_link_gen_worker.py +30 -0
- Rhapso/matching/__init__.py +0 -0
- Rhapso/matching/load_and_transform_points.py +458 -0
- Rhapso/matching/ransac_matching.py +544 -0
- Rhapso/matching/save_matches.py +120 -0
- Rhapso/matching/xml_parser.py +302 -0
- Rhapso/pipelines/__init__.py +0 -0
- Rhapso/pipelines/ray/__init__.py +0 -0
- Rhapso/pipelines/ray/aws/__init__.py +0 -0
- Rhapso/pipelines/ray/aws/alignment_pipeline.py +227 -0
- Rhapso/pipelines/ray/aws/config/__init__.py +0 -0
- Rhapso/pipelines/ray/evaluation.py +71 -0
- Rhapso/pipelines/ray/interest_point_detection.py +137 -0
- Rhapso/pipelines/ray/interest_point_matching.py +110 -0
- Rhapso/pipelines/ray/local/__init__.py +0 -0
- Rhapso/pipelines/ray/local/alignment_pipeline.py +167 -0
- Rhapso/pipelines/ray/matching_stats.py +104 -0
- Rhapso/pipelines/ray/param/__init__.py +0 -0
- Rhapso/pipelines/ray/solver.py +120 -0
- Rhapso/pipelines/ray/split_dataset.py +78 -0
- Rhapso/solver/__init__.py +0 -0
- Rhapso/solver/compute_tiles.py +562 -0
- Rhapso/solver/concatenate_models.py +116 -0
- Rhapso/solver/connected_graphs.py +111 -0
- Rhapso/solver/data_prep.py +181 -0
- Rhapso/solver/global_optimization.py +410 -0
- Rhapso/solver/model_and_tile_setup.py +109 -0
- Rhapso/solver/pre_align_tiles.py +323 -0
- Rhapso/solver/save_results.py +97 -0
- Rhapso/solver/view_transforms.py +75 -0
- Rhapso/solver/xml_to_dataframe_solver.py +213 -0
- Rhapso/split_dataset/__init__.py +0 -0
- Rhapso/split_dataset/compute_grid_rules.py +78 -0
- Rhapso/split_dataset/save_points.py +101 -0
- Rhapso/split_dataset/save_xml.py +377 -0
- Rhapso/split_dataset/split_images.py +537 -0
- Rhapso/split_dataset/xml_to_dataframe_split.py +219 -0
- rhapso-0.1.92.dist-info/METADATA +39 -0
- rhapso-0.1.92.dist-info/RECORD +101 -0
- rhapso-0.1.92.dist-info/WHEEL +5 -0
- rhapso-0.1.92.dist-info/licenses/LICENSE +21 -0
- rhapso-0.1.92.dist-info/top_level.txt +2 -0
- tests/__init__.py +1 -0
- tests/test_detection.py +17 -0
- tests/test_matching.py +21 -0
- tests/test_solving.py +21 -0
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utilities for scripts.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import boto3
|
|
6
|
+
import re
|
|
7
|
+
import yaml
|
|
8
|
+
import fsspec
|
|
9
|
+
|
|
10
|
+
def read_config_yaml(yaml_path: str) -> dict:
|
|
11
|
+
if yaml_path.startswith("s3://"):
|
|
12
|
+
with fsspec.open(yaml_path, "rt") as f:
|
|
13
|
+
yaml_dict = yaml.safe_load(f)
|
|
14
|
+
else:
|
|
15
|
+
with open(yaml_path, "r") as f:
|
|
16
|
+
yaml_dict = yaml.safe_load(f)
|
|
17
|
+
|
|
18
|
+
return yaml_dict
|
|
19
|
+
|
|
20
|
+
def write_config_yaml(yaml_path: str, yaml_data: dict) -> None:
|
|
21
|
+
if yaml_path.startswith("s3://"):
|
|
22
|
+
with fsspec.open(yaml_path, "wt") as file:
|
|
23
|
+
yaml.dump(yaml_data, file)
|
|
24
|
+
else:
|
|
25
|
+
with open(yaml_path, "w") as file:
|
|
26
|
+
yaml.dump(yaml_data, file)
|
|
27
|
+
|
|
28
|
+
def list_all_tiles_in_bucket_path(
|
|
29
|
+
bucket_SPIM_folder: str, bucket_name="aind-open-data"
|
|
30
|
+
) -> list:
|
|
31
|
+
"""
|
|
32
|
+
Lists all tiles in a given bucket path
|
|
33
|
+
|
|
34
|
+
Parameters
|
|
35
|
+
------------------------
|
|
36
|
+
bucket_SPIM_folder: str
|
|
37
|
+
Path to SPIM folder in bucket.
|
|
38
|
+
bucket_name: str
|
|
39
|
+
Name of bucket.
|
|
40
|
+
|
|
41
|
+
Returns
|
|
42
|
+
------------------------
|
|
43
|
+
list:
|
|
44
|
+
List of all tiles in SPIM folder.
|
|
45
|
+
"""
|
|
46
|
+
# s3 = boto3.resource('s3')
|
|
47
|
+
bucket_name, prefix = bucket_SPIM_folder.replace("s3://", "").split("/", 1)
|
|
48
|
+
|
|
49
|
+
client = boto3.client("s3")
|
|
50
|
+
result = client.list_objects(
|
|
51
|
+
Bucket=bucket_name, Prefix=prefix, Delimiter="/"
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
tiles = []
|
|
55
|
+
for o in result.get("CommonPrefixes"):
|
|
56
|
+
tiles.append(o.get("Prefix"))
|
|
57
|
+
return tiles
|
|
58
|
+
|
|
59
|
+
def extract_channel_from_tile_path(t_path: str) -> int:
|
|
60
|
+
"""
|
|
61
|
+
Extracts channel from tile path naming convention:
|
|
62
|
+
tile_X_####_Y_####_Z_####_ch_####.filetype
|
|
63
|
+
|
|
64
|
+
Parameters
|
|
65
|
+
------------------------
|
|
66
|
+
t_path: str
|
|
67
|
+
Tile path to run regex on.
|
|
68
|
+
|
|
69
|
+
Returns
|
|
70
|
+
------------------------
|
|
71
|
+
int:
|
|
72
|
+
Channel value.
|
|
73
|
+
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
pattern = r"(ch|CH)_(\d+)"
|
|
77
|
+
match = re.search(pattern, t_path)
|
|
78
|
+
channel = int(match.group(2))
|
|
79
|
+
return channel
|
|
80
|
+
|
|
81
|
+
def get_unique_channels_for_dataset(dataset_path: str) -> list:
|
|
82
|
+
"""
|
|
83
|
+
Extracts a list of channels in a given dataset
|
|
84
|
+
|
|
85
|
+
Parameters:
|
|
86
|
+
-----------
|
|
87
|
+
dataset_path: str
|
|
88
|
+
Path to a dataset's zarr folder
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
--------
|
|
92
|
+
unique_list_of_channels: list(int)
|
|
93
|
+
A list of int, containing the unique list of channel wavelengths
|
|
94
|
+
|
|
95
|
+
"""
|
|
96
|
+
# Reference Path: s3://aind-open-data/HCR_677594_2023-10-13_13-55-48/SPIM.ome.zarr/
|
|
97
|
+
# path_parts = dataset_path.split('/')
|
|
98
|
+
# tiles_in_path = list_bucket_directory(path_parts[2], path_parts[3] + '/' + path_parts[4])
|
|
99
|
+
|
|
100
|
+
tiles_in_path = list_all_tiles_in_bucket_path(
|
|
101
|
+
dataset_path, "aind-open-data"
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
unique_list_of_channels = []
|
|
105
|
+
for tile in tiles_in_path:
|
|
106
|
+
channel = extract_channel_from_tile_path(tile)
|
|
107
|
+
|
|
108
|
+
if channel not in unique_list_of_channels:
|
|
109
|
+
unique_list_of_channels.append(channel)
|
|
110
|
+
|
|
111
|
+
return unique_list_of_channels
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Runs fusion from config file generated
|
|
3
|
+
from dispim or exaspim scheduler.
|
|
4
|
+
Manages full Ray cluster lifecycle (ray up/exec/down) on AWS.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import signal
|
|
8
|
+
import sys
|
|
9
|
+
import subprocess
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
from Rhapso.fusion.affine_fusion import blend as blend
|
|
13
|
+
from Rhapso.fusion.affine_fusion import fusion as fusion
|
|
14
|
+
from Rhapso.fusion.affine_fusion import geometry as geometry
|
|
15
|
+
from Rhapso.fusion.affine_fusion import io as io
|
|
16
|
+
from Rhapso.fusion.affine_fusion import script_utils as script_utils
|
|
17
|
+
|
|
18
|
+
# Global state for cleanup
|
|
19
|
+
ray_config_path = None
|
|
20
|
+
should_cleanup = False
|
|
21
|
+
|
|
22
|
+
def cleanup_cluster(yml_filename: str, cwd: Path):
|
|
23
|
+
"""Clean up the Ray cluster and handle any errors gracefully"""
|
|
24
|
+
global should_cleanup
|
|
25
|
+
if should_cleanup:
|
|
26
|
+
return # Already cleaned up
|
|
27
|
+
|
|
28
|
+
print("\n=== Cleaning up cluster ===")
|
|
29
|
+
print("$", " ".join(["ray", "down", yml_filename, "-y"]))
|
|
30
|
+
try:
|
|
31
|
+
subprocess.run(["ray", "down", yml_filename, "-y"], cwd=cwd, capture_output=False, text=True)
|
|
32
|
+
print("ā
Cluster cleanup completed")
|
|
33
|
+
except Exception as cleanup_error:
|
|
34
|
+
print(f"ā ļø Cluster cleanup failed: {cleanup_error}")
|
|
35
|
+
try:
|
|
36
|
+
print("Trying alternative cleanup...")
|
|
37
|
+
subprocess.run(["ray", "down", yml_filename], cwd=cwd, capture_output=False, text=True)
|
|
38
|
+
except:
|
|
39
|
+
print("Alternative cleanup also failed - cluster may need manual cleanup")
|
|
40
|
+
|
|
41
|
+
should_cleanup = True
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def cleanup_existing_cluster(yml_filename: str, cwd: Path):
|
|
45
|
+
"""Clean up any existing cluster before starting a new one"""
|
|
46
|
+
print("\n=== Clean up any existing cluster ===")
|
|
47
|
+
print("$", " ".join(["ray", "down", yml_filename, "-y"]))
|
|
48
|
+
try:
|
|
49
|
+
subprocess.run(["ray", "down", yml_filename, "-y"], cwd=cwd, capture_output=False, text=True)
|
|
50
|
+
print("ā
Cleanup completed (or no existing cluster)")
|
|
51
|
+
except:
|
|
52
|
+
print("ā¹ļø No existing cluster to clean up")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def start_cluster(yml_filename: str, cwd: Path):
|
|
56
|
+
"""Start the Ray cluster"""
|
|
57
|
+
print("\n=== Start cluster ===")
|
|
58
|
+
print("$", " ".join(["ray", "up", yml_filename, "-y"]))
|
|
59
|
+
try:
|
|
60
|
+
result = subprocess.run(["ray", "up", yml_filename, "-y"], check=True, cwd=cwd, capture_output=False, text=True)
|
|
61
|
+
print("ā
Cluster started successfully")
|
|
62
|
+
if result.stdout:
|
|
63
|
+
print("STDOUT:", result.stdout)
|
|
64
|
+
if result.stderr:
|
|
65
|
+
print("STDERR:", result.stderr)
|
|
66
|
+
except subprocess.CalledProcessError as e:
|
|
67
|
+
print(f"ā Cluster startup failed with return code {e.returncode}")
|
|
68
|
+
print("STDOUT:", e.stdout)
|
|
69
|
+
print("STDERR:", e.stderr)
|
|
70
|
+
cleanup_cluster(yml_filename, cwd)
|
|
71
|
+
raise
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def signal_handler(sig, frame):
|
|
75
|
+
"""Handle Ctrl+C gracefully"""
|
|
76
|
+
global ray_config_path
|
|
77
|
+
print("\n\nā ļø Interrupt received (Ctrl+C). Cleaning up...")
|
|
78
|
+
if ray_config_path:
|
|
79
|
+
ray_config_dir = Path(ray_config_path).parent
|
|
80
|
+
yml_filename = Path(ray_config_path).name
|
|
81
|
+
cleanup_cluster(yml_filename, ray_config_dir)
|
|
82
|
+
print("ā
Cleanup completed. Exiting.")
|
|
83
|
+
sys.exit(0)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def execute_job(xml_path: str, image_data_input_path: str, output_s3_path: str, dataset_type: str, channel: int, ray_cluster_config_path: str):
|
|
87
|
+
"""
|
|
88
|
+
Execute fusion job with full Ray cluster lifecycle management.
|
|
89
|
+
|
|
90
|
+
xml_path: Path to BigStitcher XML file (local or S3)
|
|
91
|
+
image_data_input_path: Path to input image data (local or S3)
|
|
92
|
+
output_s3_path: Path to output location (local or S3)
|
|
93
|
+
dataset_type: Type of dataset ('BigStitcherDataset' or 'BigStitcherDatasetChannel')
|
|
94
|
+
channel: Channel number (int or None)
|
|
95
|
+
ray_cluster_config_path: Path to Ray cluster config YAML (e.g., fusion_cluster_martin.yml)
|
|
96
|
+
"""
|
|
97
|
+
global ray_config_path
|
|
98
|
+
ray_config_path = ray_cluster_config_path
|
|
99
|
+
|
|
100
|
+
# Get the directory containing the ray config file
|
|
101
|
+
ray_config_dir = Path(ray_cluster_config_path).parent
|
|
102
|
+
yml_filename = Path(ray_cluster_config_path).name
|
|
103
|
+
|
|
104
|
+
try:
|
|
105
|
+
# Clean up any existing cluster first
|
|
106
|
+
cleanup_existing_cluster(yml_filename, ray_config_dir)
|
|
107
|
+
|
|
108
|
+
# Start the Ray cluster
|
|
109
|
+
start_cluster(yml_filename, ray_config_dir)
|
|
110
|
+
|
|
111
|
+
# Application Parameters
|
|
112
|
+
cell_size = [640, 256, 256]
|
|
113
|
+
chunksize = (1, 1, 640, 256, 256)
|
|
114
|
+
|
|
115
|
+
# Reconstruct objects on cluster and run fusion.run_fusion()
|
|
116
|
+
channel_arg = f"channel={channel}" if channel is not None else "channel=None"
|
|
117
|
+
|
|
118
|
+
# Create the fusion command to run on the cluster
|
|
119
|
+
fusion_cmd = (
|
|
120
|
+
"bash -lc \""
|
|
121
|
+
"python3 - <<\\\"PY\\\"\n"
|
|
122
|
+
"import sys\n"
|
|
123
|
+
"sys.path.append('/home/ubuntu')\n"
|
|
124
|
+
"\n"
|
|
125
|
+
"from Rhapso.fusion.affine_fusion import blend, fusion, geometry, io\n"
|
|
126
|
+
"\n"
|
|
127
|
+
"# Reconstruct objects on cluster\n"
|
|
128
|
+
f"dataset_type = \\\"{dataset_type}\\\"\n"
|
|
129
|
+
f"{channel_arg}\n"
|
|
130
|
+
f"xml_path = \\\"{xml_path}\\\"\n"
|
|
131
|
+
f"image_data_input_path = \\\"{image_data_input_path}\\\"\n"
|
|
132
|
+
f"output_s3_path = \\\"{output_s3_path}\\\"\n"
|
|
133
|
+
f"cell_size = {cell_size}\n"
|
|
134
|
+
f"chunksize = {chunksize}\n"
|
|
135
|
+
"\n"
|
|
136
|
+
"if dataset_type == 'BigStitcherDataset':\n"
|
|
137
|
+
" DATASET = io.BigStitcherDataset(xml_path, image_data_input_path, datastore=0)\n"
|
|
138
|
+
"elif dataset_type == 'BigStitcherDatasetChannel':\n"
|
|
139
|
+
" DATASET = io.BigStitcherDatasetChannel(xml_path, image_data_input_path, channel, datastore=0)\n"
|
|
140
|
+
"\n"
|
|
141
|
+
"OUTPUT_PARAMS = io.OutputParameters(\n"
|
|
142
|
+
" path=output_s3_path,\n"
|
|
143
|
+
" chunksize=chunksize,\n"
|
|
144
|
+
" resolution_zyx=DATASET.tile_resolution_zyx,\n"
|
|
145
|
+
" datastore=0\n"
|
|
146
|
+
")\n"
|
|
147
|
+
"\n"
|
|
148
|
+
"CELL_SIZE = cell_size\n"
|
|
149
|
+
"POST_REG_TFMS = []\n"
|
|
150
|
+
"\n"
|
|
151
|
+
"_, _, _, _, tile_aabbs, _, _ = fusion.initialize_fusion(\n"
|
|
152
|
+
" DATASET, POST_REG_TFMS, OUTPUT_PARAMS\n"
|
|
153
|
+
")\n"
|
|
154
|
+
"\n"
|
|
155
|
+
"BLENDING_MODULE = blend.WeightedLinearBlending(tile_aabbs)\n"
|
|
156
|
+
"\n"
|
|
157
|
+
"fusion.run_fusion(\n"
|
|
158
|
+
" DATASET,\n"
|
|
159
|
+
" OUTPUT_PARAMS,\n"
|
|
160
|
+
" CELL_SIZE,\n"
|
|
161
|
+
" POST_REG_TFMS,\n"
|
|
162
|
+
" BLENDING_MODULE,\n"
|
|
163
|
+
")\n"
|
|
164
|
+
"PY\n"
|
|
165
|
+
"\""
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
# Run fusion on the cluster using ray exec
|
|
169
|
+
print(f'\nš Starting fusion.run_fusion() on cluster')
|
|
170
|
+
print(f' Output will be saved to: {output_s3_path}')
|
|
171
|
+
|
|
172
|
+
try:
|
|
173
|
+
result = subprocess.run(
|
|
174
|
+
["ray", "exec", yml_filename, fusion_cmd],
|
|
175
|
+
cwd=ray_config_dir,
|
|
176
|
+
capture_output=False,
|
|
177
|
+
text=True,
|
|
178
|
+
check=True,
|
|
179
|
+
timeout = 8 * 60 * 60 # 8 hour timeout
|
|
180
|
+
)
|
|
181
|
+
except subprocess.TimeoutExpired:
|
|
182
|
+
print("ā Fusion timed out after 8 hours")
|
|
183
|
+
cleanup_cluster(yml_filename, ray_config_dir)
|
|
184
|
+
raise
|
|
185
|
+
except subprocess.CalledProcessError as e:
|
|
186
|
+
print(f"ā Fusion failed with exit code {e.returncode}")
|
|
187
|
+
cleanup_cluster(yml_filename, ray_config_dir)
|
|
188
|
+
raise
|
|
189
|
+
|
|
190
|
+
print(f"\n{'='*60}")
|
|
191
|
+
print(f"ā
FUSION JOB COMPLETED")
|
|
192
|
+
print(f"š Output saved to: {output_s3_path}")
|
|
193
|
+
print(f"{'='*60}\n")
|
|
194
|
+
|
|
195
|
+
except KeyboardInterrupt:
|
|
196
|
+
print("\nā ļø Job interrupted by user")
|
|
197
|
+
raise
|
|
198
|
+
except Exception as e:
|
|
199
|
+
print(f"\n{'='*60}")
|
|
200
|
+
print(f"ā FUSION JOB FAILED")
|
|
201
|
+
print(f"ā Error: {e}")
|
|
202
|
+
print(f"{'='*60}\n")
|
|
203
|
+
raise
|
|
204
|
+
finally:
|
|
205
|
+
# Always try to clean up, even if everything succeeded
|
|
206
|
+
cleanup_cluster(yml_filename, ray_config_dir)
|
|
207
|
+
|
|
208
|
+
if __name__ == '__main__':
|
|
209
|
+
# Register signal handler for graceful shutdown
|
|
210
|
+
signal.signal(signal.SIGINT, signal_handler)
|
|
211
|
+
signal.signal(signal.SIGTERM, signal_handler)
|
|
212
|
+
|
|
213
|
+
xml_path = "s3://aind-open-data/HCR_802704_2025-08-30_02-00-00_processed_2025-10-01_21-09-24/image_tile_alignment/bigstitcher.xml"
|
|
214
|
+
image_data_input_path = "s3://aind-open-data/HCR_802704_2025-08-30_02-00-00_processed_2025-10-01_21-09-24/image_radial_correction/"
|
|
215
|
+
output_s3_path = "s3://martin-test-bucket/output7/channel_488.zarr"
|
|
216
|
+
dataset_type = "BigStitcherDataset"
|
|
217
|
+
channel = None # list channel num (int) if fusing a specific channel from an xml of multiple channels
|
|
218
|
+
ray_cluster_config_path = 'Rhapso/pipelines/ray/aws/config/dev/fusion_cluster_martin.yml'
|
|
219
|
+
|
|
220
|
+
print(f'{xml_path=}')
|
|
221
|
+
print(f'{image_data_input_path=}')
|
|
222
|
+
print(f'{output_s3_path=}')
|
|
223
|
+
print(f'{dataset_type=}')
|
|
224
|
+
print(f'{channel=}')
|
|
225
|
+
print(f'{ray_cluster_config_path=}')
|
|
226
|
+
|
|
227
|
+
try:
|
|
228
|
+
execute_job(xml_path, image_data_input_path, output_s3_path, dataset_type, channel, ray_cluster_config_path)
|
|
229
|
+
except KeyboardInterrupt:
|
|
230
|
+
print("\nš Goodbye!")
|
|
231
|
+
sys.exit(0)
|
|
232
|
+
except Exception as e:
|
|
233
|
+
print(f"\nš„ Fatal error: {e}")
|
|
234
|
+
sys.exit(1)
|
|
File without changes
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""
|
|
2
|
+
CZI to Zarr stack conversion
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
__version__ = "0.0.5"
|
|
6
|
+
__authors__ = ["Camilo Laiton"]
|
|
7
|
+
__author_emails__ = [
|
|
8
|
+
"camilo.laiton@alleninstitute.org",
|
|
9
|
+
]
|
|
10
|
+
__license__ = "MIT"
|
|
11
|
+
__description__ = "Repository for transforming CZI data to ome.zarr"
|
|
12
|
+
__url__ = "https://github.com/AllenNeuralDynamics/aind-hcr-data-transformation"
|
|
13
|
+
|
|
14
|
+
__maintainers__ = ["Camilo Laiton"]
|
|
15
|
+
__maintainer_emails__ = [
|
|
16
|
+
"camilo.laiton@alleninstitute.org",
|
|
17
|
+
]
|
|
18
|
+
__title__ = "aind-hcr-data-transformation"
|
|
19
|
+
__status__ = "Production" # 'Development' 'Production', 'Beta'
|