copick-utils 1.0.2__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
copick_utils/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # SPDX-FileCopyrightText: 2024-present Kyle Harrington <czi@kyleharrington.com>
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
- __version__ = "1.0.2"
4
+ __version__ = "1.2.0"
@@ -0,0 +1,34 @@
1
+ import click
2
+
3
+
4
+ @click.command(
5
+ context_settings={"show_default": True},
6
+ short_help="Download tilt series and alignments from the CryoET Data Portal.",
7
+ no_args_is_help=True,
8
+ )
9
+ @click.option(
10
+ "-ds",
11
+ "--dataset",
12
+ required=True,
13
+ type=str,
14
+ help="Dataset ID to download from the CryoET Data Portal.",
15
+ )
16
+ @click.option(
17
+ "-o",
18
+ "--output",
19
+ required=True,
20
+ default=".",
21
+ type=str,
22
+ help="Output directory to save the downloaded files.",
23
+ )
24
+ def project(dataset: str, output: str):
25
+ """
26
+ Download tilt series and alignments from the CryoET Data Portal for sub-tomogram averaging with py2rely.
27
+ """
28
+ download_project(dataset, output)
29
+
30
+
31
+ def download_project(dataset: str, output: str):
32
+ import copick_utils.io.portal as portal
33
+
34
+ portal.download_aretomo_files(dataset, output)
@@ -0,0 +1,11 @@
1
+ """CLI commands for downloading data from the CryoET Data Portal.
2
+
3
+ This module imports all download commands from specialized files for better organization.
4
+ """
5
+
6
+ from copick_utils.cli.download import project
7
+
8
+ # All commands are now available for import by the main CLI
9
+ __all__ = [
10
+ "project",
11
+ ]
@@ -5,6 +5,7 @@ from copick_utils.cli.fit_spline import fit_spline
5
5
  from copick_utils.cli.hull import hull
6
6
  from copick_utils.cli.separate_components import separate_components
7
7
  from copick_utils.cli.skeletonize import skeletonize
8
+ from copick_utils.cli.split_labels import split
8
9
  from copick_utils.cli.validbox import validbox
9
10
 
10
11
  # All commands are now available for import by the main CLI
@@ -15,4 +16,5 @@ __all__ = [
15
16
  "separate_components",
16
17
  "filter_components",
17
18
  "fit_spline",
19
+ "split",
18
20
  ]
@@ -0,0 +1,148 @@
1
+ """CLI command for splitting multilabel segmentations into individual single-class segmentations."""
2
+
3
+ import click
4
+ import copick
5
+ from click_option_group import optgroup
6
+ from copick.cli.util import add_config_option, add_debug_option
7
+ from copick.util.log import get_logger
8
+ from copick.util.uri import parse_copick_uri
9
+
10
+ from copick_utils.cli.util import add_input_option, add_workers_option
11
+
12
+
13
+ @click.command(
14
+ context_settings={"show_default": True},
15
+ short_help="Split multilabel segmentations into single-class segmentations.",
16
+ no_args_is_help=True,
17
+ )
18
+ @add_config_option
19
+ @optgroup.group("\nInput Options", help="Options related to the input segmentation.")
20
+ @optgroup.option(
21
+ "--run-names",
22
+ "-r",
23
+ multiple=True,
24
+ help="Specific run names to process (default: all runs).",
25
+ )
26
+ @add_input_option("segmentation")
27
+ @optgroup.group("\nTool Options", help="Options related to this tool.")
28
+ @add_workers_option
29
+ @optgroup.group("\nOutput Options", help="Options related to output segmentations.")
30
+ @optgroup.option(
31
+ "--output-user-id",
32
+ type=str,
33
+ default="split",
34
+ help="User ID for output segmentations.",
35
+ )
36
+ @add_debug_option
37
+ def split(
38
+ config,
39
+ run_names,
40
+ input_uri,
41
+ workers,
42
+ output_user_id,
43
+ debug,
44
+ ):
45
+ """
46
+ Split multilabel segmentations into individual single-class binary segmentations.
47
+
48
+ This command takes a multilabel segmentation and creates separate binary segmentations
49
+ for each label value. Each output segmentation is named after the corresponding
50
+ PickableObject (as defined in the copick config) and uses the same session ID as
51
+ the input.
52
+
53
+ \b
54
+ URI Format:
55
+ Segmentations: name:user_id/session_id@voxel_spacing
56
+
57
+ \b
58
+ Label-to-Object Mapping:
59
+ The tool looks up each label value in the pickable_objects configuration
60
+ and uses the object name for the output segmentation:
61
+ - Label 1 (ribosome) → ribosome:split/session-001@10.0
62
+ - Label 2 (membrane) → membrane:split/session-001@10.0
63
+ - Label 3 (proteasome) → proteasome:split/session-001@10.0
64
+
65
+ \b
66
+ Examples:
67
+ # Split multilabel segmentation (outputs named by pickable objects)
68
+ copick process split -i "predictions:model/run-001@10.0"
69
+
70
+ # Split with custom output user ID
71
+ copick process split -i "classes:annotator/manual@10.0" --output-user-id "per-class"
72
+
73
+ # Process specific runs only
74
+ copick process split -i "labels:*/*@10.0" --run-names TS_001 --run-names TS_002
75
+ """
76
+
77
+ logger = get_logger(__name__, debug=debug)
78
+
79
+ root = copick.from_file(config)
80
+ run_names_list = list(run_names) if run_names else None
81
+
82
+ # Parse input URI
83
+ try:
84
+ input_params = parse_copick_uri(input_uri, "segmentation")
85
+ except ValueError as e:
86
+ raise click.BadParameter(f"Invalid input URI: {e}") from e
87
+
88
+ segmentation_name = input_params["name"]
89
+ segmentation_user_id = input_params["user_id"]
90
+ segmentation_session_id = input_params["session_id"]
91
+ voxel_spacing = input_params.get("voxel_spacing")
92
+
93
+ if voxel_spacing is None or voxel_spacing == "*":
94
+ raise click.BadParameter("Input URI must include a specific voxel spacing (e.g., @10.0)")
95
+
96
+ # Check for patterns in critical fields
97
+ if "*" in segmentation_name or "*" in segmentation_user_id or "*" in segmentation_session_id:
98
+ raise click.BadParameter(
99
+ "Input URI cannot contain wildcards for splitting. "
100
+ "Please specify exact segmentation name, user_id, and session_id.",
101
+ )
102
+
103
+ logger.info(f"Splitting multilabel segmentation '{segmentation_name}'")
104
+ logger.debug(f"Input: {segmentation_user_id}/{segmentation_session_id} @ {voxel_spacing}Å")
105
+ logger.debug(f"Output user ID: {output_user_id}")
106
+ logger.debug(f"Workers: {workers}")
107
+
108
+ # Import batch function
109
+ from copick_utils.process.split_labels import split_labels_batch
110
+
111
+ # Process runs
112
+ results = split_labels_batch(
113
+ root=root,
114
+ segmentation_name=segmentation_name,
115
+ segmentation_user_id=segmentation_user_id,
116
+ segmentation_session_id=segmentation_session_id,
117
+ voxel_spacing=float(voxel_spacing),
118
+ output_user_id=output_user_id,
119
+ run_names=run_names_list,
120
+ workers=workers,
121
+ )
122
+
123
+ # Aggregate results
124
+ successful = sum(1 for result in results.values() if result and result.get("processed", 0) > 0)
125
+ total_labels = sum(result.get("labels_split", 0) for result in results.values() if result)
126
+
127
+ # Collect all unique object names created
128
+ all_object_names = set()
129
+ for result in results.values():
130
+ if result and result.get("object_names"):
131
+ all_object_names.update(result["object_names"])
132
+
133
+ # Collect all errors
134
+ all_errors = []
135
+ for result in results.values():
136
+ if result and result.get("errors"):
137
+ all_errors.extend(result["errors"])
138
+
139
+ logger.info(f"Completed: {successful}/{len(results)} runs processed successfully")
140
+ logger.info(f"Total labels split: {total_labels}")
141
+ logger.info(f"Object names created: {', '.join(sorted(all_object_names))}")
142
+
143
+ if all_errors:
144
+ logger.warning(f"Encountered {len(all_errors)} errors during processing")
145
+ for error in all_errors[:5]: # Show first 5 errors
146
+ logger.warning(f" - {error}")
147
+ if len(all_errors) > 5:
148
+ logger.warning(f" ... and {len(all_errors) - 5} more errors")
@@ -0,0 +1,149 @@
1
+ """
2
+ A minimal example using minimal libraries / imports to download relevant AreTomo files
3
+ from the CryoET Data Portal. Downloads the corresponding files, using the run ID as the
4
+ base filename.
5
+
6
+ Original implementation by Daniel Ji and Utz Ermel.
7
+ """
8
+ import multiprocessing
9
+ import os
10
+
11
+ import cryoet_data_portal as cdp
12
+ import mdocfile
13
+ import numpy as np
14
+ import pandas as pd
15
+ import requests
16
+ import s3fs
17
+
18
+ global_client = cdp.Client()
19
+
20
+
21
+ def download_aretomo_files(dataset_id: int, output_dir: str):
22
+ print(f"Fetching tiltseries for dataset id {dataset_id}...", flush=True)
23
+ tiltseries_list: list[cdp.TiltSeries] = [
24
+ tiltseries for run in cdp.Dataset.get_by_id(global_client, dataset_id).runs for tiltseries in run.tiltseries
25
+ ] # a bit slow for some reason, can take some time
26
+ tiltseries_run_ids_and_ts_ids = [(ts.run.id, ts.id) for ts in tiltseries_list]
27
+ print(
28
+ f"Found {len(tiltseries_run_ids_and_ts_ids)} tiltseries for dataset id {dataset_id}. Starting downloads...",
29
+ flush=True,
30
+ )
31
+ with multiprocessing.Pool(processes=8) as pool: # adjust number of processes as needed
32
+ for _ in pool.imap_unordered(
33
+ _worker_download_aretomo_files_for_tiltseries,
34
+ [
35
+ (dataset_id, run_name, output_dir, tiltseries_id)
36
+ for run_name, tiltseries_id in tiltseries_run_ids_and_ts_ids
37
+ ],
38
+ ):
39
+ pass
40
+
41
+
42
+ def _worker_download_aretomo_files_for_tiltseries(args):
43
+ dataset_id, run_name, output_dir, tiltseries_id = args
44
+ download_aretomo_files_for_tiltseries(dataset_id, run_name, output_dir, tiltseries_id)
45
+
46
+
47
+ # note: this function assumes that there is only one tiltseries per run
48
+ # note: the tiltseries name is equivlaent to the run name
49
+ # if tiltseries_id is provided, will be prioritized over dataset_id + run_name
50
+ def download_aretomo_files_for_tiltseries(dataset_id: int, run_name: str, output_dir: str, tiltseries_id: int = None):
51
+ print(f"[{run_name}] Downloading AreTomo files for tiltseries id {tiltseries_id}...", flush=True)
52
+
53
+ client = cdp.Client()
54
+ s3 = s3fs.S3FileSystem(anon=True)
55
+ if not tiltseries_id:
56
+ all_tiltseries = cdp.TiltSeries.find(
57
+ client,
58
+ query_filters=[cdp.TiltSeries.run.dataset_id == dataset_id, cdp.TiltSeries.run.name == run_name],
59
+ )
60
+ if len(all_tiltseries) == 0:
61
+ raise ValueError(f"No tiltseries found for dataset_id {dataset_id} and run_name {run_name}")
62
+ if len(all_tiltseries) > 1:
63
+ raise ValueError(f"Multiple tiltseries found for dataset_id {dataset_id} and run_name {run_name}")
64
+ tiltseries = all_tiltseries[0]
65
+ else:
66
+ tiltseries = cdp.TiltSeries.get_by_id(client, tiltseries_id)
67
+
68
+ # get the s3 folder path and then glob for *.tlt / *.rawtlt files to download them, renaming the base to match the run id
69
+ s3_folder_path = tiltseries.s3_mrc_file.rsplit("/", 1)[0] + "/"
70
+ tlt_files = s3.glob(s3_folder_path + "*.tlt") + s3.glob(s3_folder_path + "*.rawtlt")
71
+ for tlt_file in tlt_files:
72
+ base_name = os.path.basename(tlt_file)
73
+ ext = os.path.splitext(base_name)[1]
74
+ dest_file = os.path.join(output_dir, f"{tiltseries.run.id}{ext}")
75
+ s3.get(tlt_file, dest_file)
76
+ print(f"[{tiltseries.run.id}] Downloaded {base_name} as {os.path.basename(dest_file)}.", flush=True)
77
+
78
+ # do the same for "*CTF*.txt" files and "*ctf*.txt" files
79
+ ctf_files = s3.glob(s3_folder_path + "*CTF*.txt") + s3.glob(s3_folder_path + "*ctf*.txt")
80
+ if len(ctf_files) == 0:
81
+ print(f"WARNING: No CTF files found for tiltseries id {tiltseries.id}")
82
+ else:
83
+ ctf_file = ctf_files[0]
84
+ base_name = os.path.basename(ctf_file)
85
+ if len(ctf_files) > 1:
86
+ print(f"WARNING: Multiple CTF files found for tiltseries id {tiltseries.id}, using {base_name}")
87
+ ext = os.path.splitext(base_name)[1]
88
+ dest_file = os.path.join(output_dir, f"{tiltseries.run.id}_CTF.txt")
89
+ s3.get(ctf_file, dest_file)
90
+ print(f"[{tiltseries.run.id}] Downloaded {base_name} as {os.path.basename(dest_file)}.", flush=True)
91
+
92
+ # now find the corresponding alignment for this tiltseries and download the "*.aln" file
93
+ if len(tiltseries.alignments) == 0:
94
+ print(f"WARNING: No alignments found for tiltseries id {tiltseries.id}")
95
+ elif len(tiltseries.alignments) > 1:
96
+ print(f"WARNING: Multiple alignments found for tiltseries id {tiltseries.id}")
97
+ else:
98
+ alignment = tiltseries.alignments[0]
99
+ s3_alignment_folder_path = alignment.s3_alignment_metadata.rsplit("/", 1)[0] + "/"
100
+ aln_files = s3.glob(s3_alignment_folder_path + "*.aln")
101
+ if len(aln_files) == 0:
102
+ raise ValueError(f"No .aln files found for run name {tiltseries.run.name} and alignment id {alignment.id}")
103
+ aln_file = aln_files[0]
104
+ base_name = os.path.basename(aln_file)
105
+ if len(aln_files) > 1:
106
+ print(f"WARNING: Multiple .aln files found for run name {tiltseries.run.name}, using {base_name}")
107
+ ext = os.path.splitext(base_name)[1]
108
+ dest_file = os.path.join(output_dir, f"{tiltseries.run.id}{ext}")
109
+ s3.get(aln_file, dest_file)
110
+ print(f"[{tiltseries.run.id}] Downloaded {base_name} as {os.path.basename(dest_file)}.", flush=True)
111
+
112
+ # now get the mdoc file from the Frames/ folder
113
+ frames = tiltseries.run.frames
114
+ if len(frames) == 0:
115
+ raise ValueError(f"No frames found for run name {tiltseries.run.name}")
116
+ frame = frames[0]
117
+ s3_frames_folder_path = frame.s3_frame_path.rsplit("/", 1)[0] + "/"
118
+ mdoc_files = s3.glob(s3_frames_folder_path + "*.mdoc")
119
+ if len(mdoc_files) == 0:
120
+ raise ValueError(f"No .mdoc files found for run name {tiltseries.run.name}")
121
+ mdoc_file = mdoc_files[0]
122
+ base_name = os.path.basename(mdoc_file)
123
+ if len(mdoc_files) > 1:
124
+ print(f"WARNING: Multiple .mdoc files found for run name {tiltseries.run.name}, using {base_name}")
125
+ ext = os.path.splitext(base_name)[1]
126
+ dest_file = os.path.join(output_dir, f"{tiltseries.run.id}{ext}")
127
+ s3.get(mdoc_file, dest_file)
128
+ print(f"[{tiltseries.run.id}] Downloaded {base_name} as {os.path.basename(dest_file)}.", flush=True)
129
+
130
+ # download tiltseries mrc file
131
+ tiltseries_file = os.path.join(output_dir, f"{tiltseries.run.id}.mrc")
132
+ tiltseries_url = tiltseries.https_mrc_file
133
+ response = requests.get(tiltseries_url, stream=True)
134
+ response.raise_for_status()
135
+ with open(tiltseries_file, "wb") as f:
136
+ for chunk in response.iter_content(chunk_size=8192):
137
+ f.write(chunk)
138
+ print(f"[{tiltseries.run.id}] Downloaded tiltseries mrc file as {os.path.basename(tiltseries_file)}.", flush=True)
139
+
140
+ # create imod file for order list
141
+ mdoc = mdocfile.read(os.path.join(output_dir, f"{tiltseries.run.id}.mdoc"))
142
+ order_list = mdoc["TiltAngle"]
143
+ imodpath = os.path.join(output_dir, f"{tiltseries.run.id}_Imod")
144
+ os.makedirs(imodpath, exist_ok=True)
145
+ number = np.arange(len(order_list)) + 1
146
+
147
+ # save in csv with 'ImageNumber', 'TiltAngle' headers
148
+ df = pd.DataFrame({"ImageNumber": number, "TiltAngle": order_list})
149
+ df.to_csv(os.path.join(imodpath, f"{tiltseries.run.id}_order_list.csv"), index=False)
@@ -1,82 +1,125 @@
1
1
  import numpy as np
2
-
3
-
4
- def tomogram(run, voxel_size: float = 10, algorithm: str = "wbp", raise_error: bool = False):
5
- voxel_spacing_obj = run.get_voxel_spacing(voxel_size)
6
-
7
- if voxel_spacing_obj is None:
8
- # Query Avaiable Voxel Spacings
9
- availableVoxelSpacings = [tomo.voxel_size for tomo in run.voxel_spacings]
10
-
11
- # Report to the user which voxel spacings they can use
12
- message = (
13
- f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
14
- f"Available spacings are: {', '.join(map(str, availableVoxelSpacings))}"
15
- )
16
- if raise_error:
17
- raise ValueError(message)
18
- else:
19
- print(message)
2
+ from copick.util.uri import resolve_copick_objects
3
+
4
+
5
+ def tomogram(run, voxel_size: float = 10, algorithm: str = "wbp", raise_error: bool = False, verbose=True):
6
+ """
7
+ Reads a tomogram from a Copick run.
8
+
9
+ Parameters:
10
+ -----------
11
+ run: copick.Run
12
+ voxel_size: float
13
+ algorithm: str
14
+ raise_error: bool
15
+ verbose: bool
16
+ Returns:
17
+ --------
18
+ vol: np.ndarray - The tomogram.
19
+ """
20
+
21
+ # Get the tomogram from the Copick URI
22
+ try:
23
+ uri = f"{algorithm}@{voxel_size}"
24
+ vol = resolve_copick_objects(uri, run.root, "tomogram", run_name=run.name)
25
+ return vol[0].numpy()
26
+ except Exception as err: # Report which orbject is missing
27
+ # Try to resolve the tomogram using the Copick URI
28
+ voxel_spacing_obj = run.get_voxel_spacing(voxel_size)
29
+
30
+ if voxel_spacing_obj is None:
31
+ # Query Avaiable Voxel Spacings
32
+ availableVoxelSpacings = [tomo.voxel_size for tomo in run.voxel_spacings]
33
+
34
+ # Report to the user which voxel spacings they can use
35
+ message = (
36
+ f"[Warning] No tomogram found for {run.name} with uri: {uri}\n"
37
+ f"Available voxel sizes are: {', '.join(map(str, availableVoxelSpacings))}"
38
+ )
39
+ if raise_error:
40
+ raise ValueError(message) from err
41
+ elif verbose:
42
+ print(message)
20
43
  return None
21
44
 
22
- tomogram = voxel_spacing_obj.get_tomogram(algorithm)
23
- if tomogram is None:
24
- # Get available algorithms
25
- availableAlgorithms = [tomo.tomo_type for tomo in run.get_voxel_spacing(voxel_size).tomograms]
26
-
27
- # Report to the user which algorithms are available
28
- message = (
29
- f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
30
- f"Available algorithms are: {', '.join(availableAlgorithms)}"
31
- )
32
- if raise_error:
33
- raise ValueError(message)
34
- else:
35
- print(message)
45
+ tomogram = voxel_spacing_obj.get_tomogram(algorithm)
46
+ if tomogram is None:
47
+ # Get available algorithms
48
+ availableAlgorithms = [tomo.tomo_type for tomo in run.get_voxel_spacing(voxel_size).tomograms]
49
+
50
+ # Report to the user which algorithms are available
51
+ message = (
52
+ f"[Warning] No tomogram found for {run.name} with uri: {uri}\n"
53
+ f"Available algorithms @{voxel_size}A are: {', '.join(availableAlgorithms)}"
54
+ )
55
+ if raise_error:
56
+ raise ValueError(message) from err
57
+ elif verbose:
58
+ print(message)
36
59
  return None
37
60
 
38
- return tomogram.numpy()
39
61
 
62
+ def segmentation(run, voxel_spacing: float, name: str, user_id=None, session_id=None, raise_error=False, verbose=True):
63
+ """
64
+ Reads a segmentation from a Copick run.
65
+
66
+ Parameters:
67
+ -----------
68
+ run: copick.Run
69
+ voxel_spacing: float
70
+ name: str
71
+ user_id: str
72
+ session_id: str
73
+ raise_error: bool
74
+ verbose: bool
75
+ Returns:
76
+ --------
77
+ seg: np.ndarray - The segmentation.
78
+ """
79
+
80
+ # Construct the Target URI
81
+ if session_id is None and user_id is None:
82
+ uri = f"{name}@{voxel_spacing}"
83
+ elif session_id is None:
84
+ uri = f"{name}:{user_id}@{voxel_spacing}"
85
+ else:
86
+ uri = f"{name}:{user_id}/{session_id}@{voxel_spacing}"
87
+
88
+ # Try to resolve the segmentation using the Copick URI
89
+ try:
90
+ segs = resolve_copick_objects(uri, run.root, "segmentation", run_name=run.name)
91
+ return segs[0].numpy()
92
+ except Exception as err:
93
+ # Force the voxel spacing to be a float
94
+ voxel_spacing = float(voxel_spacing)
40
95
 
41
- def segmentation(run, voxel_spacing: float, segmentation_name: str, session_id=None, user_id=None, raise_error=False):
42
- seg = run.get_segmentations(
43
- name=segmentation_name,
44
- session_id=session_id,
45
- user_id=user_id,
46
- voxel_size=voxel_spacing,
47
- )
48
-
49
- # No Segmentations Are Available, Result in Error
50
- if len(seg) == 0:
51
96
  # Get all available segmentations with their metadata
52
97
  available_segs = run.get_segmentations(voxel_size=voxel_spacing)
53
- seg_info = [(s.name, s.user_id, s.session_id) for s in available_segs]
54
98
 
55
- # Format the information for display
56
- seg_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in seg_info]
99
+ if len(available_segs) == 0:
100
+ available_segs = run.get_segmentations()
101
+ message = (
102
+ f"No segmentation found for URI: {uri}\n"
103
+ f"Available segmentations avaiable w/following voxel sizes: {', '.join(map(str, [s.voxel_size for s in available_segs]))}"
104
+ )
105
+ else:
106
+ seg_info = [(s.name, s.user_id, s.session_id) for s in available_segs]
57
107
 
58
- message = (
59
- f"\nNo segmentation found matching:\n"
60
- f" name: {segmentation_name}, user_id: {user_id}, session_id: {session_id}\n"
61
- f"Available segmentations in {run.name} are:\n " + "\n ".join(seg_details)
62
- )
108
+ # Format the information for display
109
+ seg_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in seg_info]
110
+
111
+ message = (
112
+ f"\nNo segmentation at {voxel_spacing} A found matching:\n"
113
+ f" name: {name}, user_id: {user_id}, session_id: {session_id}\n"
114
+ f"Available segmentations in {run.name} are:\n " + "\n ".join(seg_details)
115
+ )
63
116
  if raise_error:
64
- raise ValueError(message)
65
- else:
117
+ raise ValueError(message) from err
118
+ elif verbose:
66
119
  print(message)
120
+ else:
67
121
  return None
68
122
 
69
- # No Segmentations Are Available, Result in Error
70
- if len(seg) > 1:
71
- print(
72
- f"[Warning] More Than 1 Segmentation is Available for the Query Information. "
73
- f"Available Segmentations are: {seg} "
74
- f"Defaulting to Loading: {seg[0]}\n",
75
- )
76
- seg = seg[0]
77
-
78
- return seg.numpy()
79
-
80
123
 
81
124
  def coordinates(
82
125
  run, # CoPick run object containing the segmentation data
@@ -85,7 +128,25 @@ def coordinates(
85
128
  session_id: str = None, # Identifier of the session that generated the picks
86
129
  voxel_size: float = 10, # Voxel size of the tomogram, used for scaling the coordinates
87
130
  raise_error: bool = False,
131
+ verbose: bool = True,
88
132
  ):
133
+ """
134
+ Reads the coordinates of the picks from a Copick run.
135
+
136
+ Parameters:
137
+ -----------
138
+ run: copick.Run
139
+ name: str
140
+ user_id: str
141
+ session_id: str
142
+ voxel_size: float
143
+ raise_error: bool
144
+ verbose: bool
145
+
146
+ Returns:
147
+ --------
148
+ coordinates: np.ndarray - The 3D coordinates of the picks in voxel space.
149
+ """
89
150
  # Retrieve the pick points associated with the specified object and user ID
90
151
  picks = run.get_picks(object_name=name, user_id=user_id, session_id=session_id)
91
152
 
@@ -105,18 +166,22 @@ def coordinates(
105
166
  )
106
167
  if raise_error:
107
168
  raise ValueError(message)
108
- else:
169
+ elif verbose:
109
170
  print(message)
110
- return None
171
+ return None
172
+
111
173
  elif len(picks) > 1:
112
174
  # Format pick information for display
113
175
  picks_info = [(p.pickable_object_name, p.user_id, p.session_id) for p in picks]
114
176
  picks_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in picks_info]
115
177
 
116
- print(
117
- "[Warning] More than 1 pick is available for the query information."
118
- "\nAvailable picks are:\n " + "\n ".join(picks_details) + f"\nDefaulting to loading:\n {picks[0]}\n",
119
- )
178
+ if verbose:
179
+ print(
180
+ "[Warning] More than 1 pick is available for the query information."
181
+ "\nAvailable picks are:\n " + "\n ".join(picks_details) + f"\n"
182
+ f"Defaulting to loading:\n {picks[0]}\n",
183
+ )
184
+
120
185
  points = picks[0].points
121
186
 
122
187
  # Initialize an array to store the coordinates
@@ -28,17 +28,17 @@ def tomogram(run, input_volume, voxel_size=10, algorithm="wbp"):
28
28
  voxel_spacing = run.new_voxel_spacing(voxel_size=voxel_size)
29
29
 
30
30
  # Check if We Need to Create a New Tomogram for Given Algorithm
31
- tomogram = voxel_spacing.get_tomogram(algorithm)
32
- if tomogram is None:
33
- tomogram = voxel_spacing.new_tomogram(tomo_type=algorithm)
31
+ tomo = voxel_spacing.get_tomogram(algorithm)
32
+ if tomo is None:
33
+ tomo = voxel_spacing.new_tomogram(tomo_type=algorithm)
34
34
 
35
35
  # Write the tomogram data
36
- tomogram.from_numpy(input_volume)
36
+ tomo.from_numpy(input_volume)
37
37
 
38
38
 
39
39
  def segmentation(
40
40
  run,
41
- segmentation_volume,
41
+ seg_vol,
42
42
  user_id,
43
43
  name="segmentation",
44
44
  session_id="0",
@@ -52,7 +52,7 @@ def segmentation(
52
52
  -----------
53
53
  run : copick.Run
54
54
  The current Copick run object.
55
- segmentation_volume : np.ndarray
55
+ seg_vol : np.ndarray
56
56
  The segmentation data to be written.
57
57
  user_id : str
58
58
  The ID of the user creating the segmentation.
@@ -76,7 +76,7 @@ def segmentation(
76
76
 
77
77
  # If no segmentation exists or no segmentation at the given voxel size, create a new one
78
78
  if len(segmentations) == 0 or any(seg.voxel_size != voxel_size for seg in segmentations):
79
- segmentation = run.new_segmentation(
79
+ seg = run.new_segmentation(
80
80
  voxel_size=voxel_size,
81
81
  name=name,
82
82
  session_id=session_id,
@@ -85,7 +85,7 @@ def segmentation(
85
85
  )
86
86
  else:
87
87
  # Overwrite the current segmentation at the specified voxel size if it exists
88
- segmentation = next(seg for seg in segmentations if seg.voxel_size == voxel_size)
88
+ seg = next(seg for seg in segmentations if seg.voxel_size == voxel_size)
89
89
 
90
90
  # Write the segmentation data
91
- segmentation.from_numpy(segmentation_volume, dtype=np.uint8)
91
+ seg.from_numpy(seg_vol, dtype=np.uint8)
@@ -9,13 +9,11 @@ from .connected_components import (
9
9
  )
10
10
  from .skeletonize import (
11
11
  TubeSkeletonizer3D,
12
- find_matching_segmentations,
13
12
  skeletonize_batch,
14
13
  skeletonize_segmentation,
15
14
  )
16
15
  from .spline_fitting import (
17
16
  SkeletonSplineFitter,
18
- find_matching_segmentations_for_spline,
19
17
  fit_spline_batch,
20
18
  fit_spline_to_segmentation,
21
19
  fit_spline_to_skeleton,
@@ -34,12 +32,10 @@ __all__ = [
34
32
  "separate_components_batch",
35
33
  "TubeSkeletonizer3D",
36
34
  "skeletonize_segmentation",
37
- "find_matching_segmentations",
38
35
  "skeletonize_batch",
39
36
  "SkeletonSplineFitter",
40
37
  "fit_spline_to_skeleton",
41
38
  "fit_spline_to_segmentation",
42
- "find_matching_segmentations_for_spline",
43
39
  "fit_spline_batch",
44
40
  "create_validbox_mesh",
45
41
  "generate_validbox",
@@ -0,0 +1,214 @@
1
+ """Split multilabel segmentations into individual single-class segmentations."""
2
+
3
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional
4
+
5
+ import numpy as np
6
+ from copick.util.log import get_logger
7
+
8
+ if TYPE_CHECKING:
9
+ from copick.models import CopickRoot, CopickRun, CopickSegmentation
10
+
11
+ logger = get_logger(__name__)
12
+
13
+
14
+ def split_multilabel_segmentation(
15
+ segmentation: "CopickSegmentation",
16
+ run: "CopickRun",
17
+ output_user_id: str = "split",
18
+ ) -> List["CopickSegmentation"]:
19
+ """
20
+ Split a multilabel segmentation into individual single-class binary segmentations.
21
+
22
+ For each label value in the multilabel segmentation, this function looks up the
23
+ corresponding PickableObject and creates a binary segmentation named after that object.
24
+
25
+ Args:
26
+ segmentation: Input multilabel segmentation to split
27
+ run: CopickRun object containing the segmentation
28
+ output_user_id: User ID for output segmentations (default: "split")
29
+
30
+ Returns:
31
+ List of created CopickSegmentation objects, one per label found in the input
32
+ """
33
+ # Load segmentation volume
34
+ volume = segmentation.numpy()
35
+ if volume is None:
36
+ raise ValueError("Could not load segmentation data")
37
+
38
+ if volume.size == 0:
39
+ raise ValueError("Empty segmentation data")
40
+
41
+ # Get root to access pickable objects configuration
42
+ root = run.root
43
+ voxel_size = segmentation.voxel_size
44
+ input_session_id = segmentation.session_id
45
+
46
+ # Find all unique non-zero labels
47
+ unique_labels = np.unique(volume)
48
+ unique_labels = unique_labels[unique_labels > 0] # Skip background (0)
49
+
50
+ logger.debug(f"Found {len(unique_labels)} unique labels: {unique_labels.tolist()}")
51
+
52
+ output_segmentations = []
53
+
54
+ # Process each label
55
+ for label_value in unique_labels:
56
+ # Look up the PickableObject with this label
57
+ pickable_obj = next((obj for obj in root.config.pickable_objects if obj.label == label_value), None)
58
+
59
+ if pickable_obj is None:
60
+ logger.warning(f"No pickable object found for label {label_value}, using label value as name")
61
+ object_name = str(label_value)
62
+ else:
63
+ object_name = pickable_obj.name
64
+ logger.debug(f"Label {label_value} → object '{object_name}'")
65
+
66
+ # Create binary mask for this label
67
+ binary_mask = (volume == label_value).astype(np.uint8)
68
+ voxel_count = int(np.sum(binary_mask))
69
+
70
+ if voxel_count == 0:
71
+ logger.warning(f"Label {label_value} has no voxels, skipping")
72
+ continue
73
+
74
+ logger.debug(f"Creating segmentation for '{object_name}' with {voxel_count} voxels")
75
+
76
+ # Create output segmentation
77
+ try:
78
+ output_seg = run.new_segmentation(
79
+ name=object_name,
80
+ user_id=output_user_id,
81
+ session_id=input_session_id,
82
+ is_multilabel=False,
83
+ voxel_size=voxel_size,
84
+ exist_ok=True,
85
+ )
86
+
87
+ # Store the binary mask
88
+ output_seg.from_numpy(binary_mask)
89
+ output_segmentations.append(output_seg)
90
+
91
+ logger.debug(f"Successfully created segmentation '{object_name}:{output_user_id}/{input_session_id}'")
92
+
93
+ except Exception as e:
94
+ logger.exception(f"Failed to create segmentation for label {label_value} ('{object_name}'): {e}")
95
+ continue
96
+
97
+ # Log single-line summary
98
+ if output_segmentations:
99
+ object_names = [seg.name for seg in output_segmentations]
100
+ logger.info(f"Run '{run.name}': Split {len(output_segmentations)} labels → {', '.join(object_names)}")
101
+
102
+ return output_segmentations
103
+
104
+
105
+ def _split_labels_worker(
106
+ run: "CopickRun",
107
+ segmentation_name: str,
108
+ segmentation_user_id: str,
109
+ segmentation_session_id: str,
110
+ voxel_spacing: float,
111
+ output_user_id: str,
112
+ ) -> Dict[str, Any]:
113
+ """
114
+ Worker function for batch splitting of multilabel segmentations.
115
+
116
+ Args:
117
+ run: CopickRun to process
118
+ segmentation_name: Name of the input segmentation
119
+ segmentation_user_id: User ID of the input segmentation
120
+ segmentation_session_id: Session ID of the input segmentation
121
+ voxel_spacing: Voxel spacing of the segmentation
122
+ output_user_id: User ID for output segmentations
123
+
124
+ Returns:
125
+ Dictionary with processing results and statistics
126
+ """
127
+ try:
128
+ # Get the input segmentation
129
+ segmentations = run.get_segmentations(
130
+ name=segmentation_name,
131
+ user_id=segmentation_user_id,
132
+ session_id=segmentation_session_id,
133
+ voxel_size=voxel_spacing,
134
+ is_multilabel=True,
135
+ )
136
+
137
+ if not segmentations:
138
+ return {"processed": 0, "errors": [f"No multilabel segmentation found for run {run.name}"]}
139
+
140
+ segmentation = segmentations[0]
141
+
142
+ # Verify it's multilabel
143
+ if not segmentation.is_multilabel:
144
+ return {
145
+ "processed": 0,
146
+ "errors": [f"Segmentation in run {run.name} is not multilabel (is_multilabel=False)"],
147
+ }
148
+
149
+ # Split the segmentation
150
+ output_segmentations = split_multilabel_segmentation(
151
+ segmentation=segmentation,
152
+ run=run,
153
+ output_user_id=output_user_id,
154
+ )
155
+
156
+ # Collect object names created
157
+ object_names = [seg.name for seg in output_segmentations]
158
+
159
+ return {
160
+ "processed": 1,
161
+ "errors": [],
162
+ "labels_split": len(output_segmentations),
163
+ "object_names": object_names,
164
+ }
165
+
166
+ except Exception as e:
167
+ logger.exception(f"Error processing run {run.name}: {e}")
168
+ return {"processed": 0, "errors": [f"Error processing run {run.name}: {e}"]}
169
+
170
+
171
+ def split_labels_batch(
172
+ root: "CopickRoot",
173
+ segmentation_name: str,
174
+ segmentation_user_id: str,
175
+ segmentation_session_id: str,
176
+ voxel_spacing: float,
177
+ output_user_id: str = "split",
178
+ run_names: Optional[List[str]] = None,
179
+ workers: int = 8,
180
+ ) -> Dict[str, Any]:
181
+ """
182
+ Batch split multilabel segmentations across multiple runs.
183
+
184
+ Args:
185
+ root: The copick root containing runs to process
186
+ segmentation_name: Name of the input segmentation
187
+ segmentation_user_id: User ID of the input segmentation
188
+ segmentation_session_id: Session ID of the input segmentation
189
+ voxel_spacing: Voxel spacing in angstroms
190
+ output_user_id: User ID for output segmentations (default: "split")
191
+ run_names: List of run names to process. If None, processes all runs.
192
+ workers: Number of worker processes (default: 8)
193
+
194
+ Returns:
195
+ Dictionary with processing results and statistics per run
196
+ """
197
+ from copick.ops.run import map_runs
198
+
199
+ runs_to_process = [run.name for run in root.runs] if run_names is None else run_names
200
+
201
+ results = map_runs(
202
+ callback=_split_labels_worker,
203
+ root=root,
204
+ runs=runs_to_process,
205
+ workers=workers,
206
+ task_desc="Splitting multilabel segmentations",
207
+ segmentation_name=segmentation_name,
208
+ segmentation_user_id=segmentation_user_id,
209
+ segmentation_session_id=segmentation_session_id,
210
+ voxel_spacing=voxel_spacing,
211
+ output_user_id=output_user_id,
212
+ )
213
+
214
+ return results
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: copick-utils
3
- Version: 1.0.2
3
+ Version: 1.2.0
4
4
  Summary: Utilities for copick
5
5
  Project-URL: Repository, https://github.com/KyleHarrington/copick-utils.git
6
6
  Project-URL: Issues, https://github.com/KyleHarrington/copick-utils/issues
@@ -33,6 +33,7 @@ Requires-Dist: click-option-group
33
33
  Requires-Dist: copick>=1.16.0
34
34
  Requires-Dist: manifold3d
35
35
  Requires-Dist: mapbox-earcut
36
+ Requires-Dist: mdocfile
36
37
  Requires-Dist: numpy
37
38
  Requires-Dist: rtree
38
39
  Requires-Dist: scikit-image
@@ -1,9 +1,11 @@
1
- copick_utils/__init__.py,sha256=KV4Of88sktfBOKjUJLLuaknxcX88-X8dHO3hfmY1TKQ,135
1
+ copick_utils/__init__.py,sha256=XY1t2MuZi-EFIR_wNRRTMCYSR3FyJStb4E0oxUyQoA8,135
2
2
  copick_utils/cli/__init__.py,sha256=51i1xC-N2laWLdZLdpsKMSB9LdO3BEdqDdWIbyxJ8f0,634
3
3
  copick_utils/cli/clipmesh.py,sha256=M1MP8lw16diGHajipFXbZDQ4ZrsTvFiqAaHji0Smmtc,6172
4
4
  copick_utils/cli/clippicks.py,sha256=7lmEZj8GDyVqc9l_rvGhAFTYWUhSEnwZcWu8w9LStBU,5857
5
5
  copick_utils/cli/clipseg.py,sha256=atx01VUBdk5xLSwEHh43bN7HEKV2bjVgDmaNaLwJQ5E,6437
6
6
  copick_utils/cli/conversion_commands.py,sha256=7I729S_9R7l67NzwfKaEWZAxcq0NbBhflZHKxTXaZhk,1045
7
+ copick_utils/cli/download.py,sha256=mwGlfpHHOIcGWRKCwfNrOs97udsAv8dvOh2d9-If7Bc,831
8
+ copick_utils/cli/download_commands.py,sha256=XgDyoXAA79c0w30HuUWQEucmVH73YpOIjsnkYnUN7Es,298
7
9
  copick_utils/cli/enclosed.py,sha256=gsJxapWCqH4IOMDyTTd_akv4L1qzuBfbRJEDBES6Wik,7209
8
10
  copick_utils/cli/filter_components.py,sha256=hOs8h0648Ws-rzVxTh7w8Futjk4aEOpavoqrJMvr-2Q,6163
9
11
  copick_utils/cli/fit_spline.py,sha256=BdLujzR9sen6IGlmZ9pGbhqs6Kjzk5MuAgmq3ewyMhk,6491
@@ -21,12 +23,13 @@ copick_utils/cli/picks2sphere.py,sha256=P-xFFDJdEowxHF20aLMO2Xymv7VHAaP4paBbgPWe
21
23
  copick_utils/cli/picks2surface.py,sha256=vXprZa3tHndSE3Sp1yPOEqyv2kRkAbV7QlHjF8oE1zg,5712
22
24
  copick_utils/cli/picksin.py,sha256=SD32-BJoxB2dGQU54ksuq2rCmcGjnQLldE0hrnV8pLA,5485
23
25
  copick_utils/cli/picksout.py,sha256=odm2JeTyDzBrXkr2SCsbE3REGj589bCgPOq3ldj8soA,5568
24
- copick_utils/cli/processing_commands.py,sha256=s9Mn0mvKQ60XQN6o6eyO2x7qZX_Hpq0K3kPwMuLHlKY,576
26
+ copick_utils/cli/processing_commands.py,sha256=3-zZhyM279_mQB8Z4-XM0DDowF8YK3i5cFOhUaw7MOM,637
25
27
  copick_utils/cli/seg2mesh.py,sha256=9fv3zaVHxAS4VhhlvU6qJ7W7wWjAYDedzDNeiaMZxTk,4732
26
28
  copick_utils/cli/seg2picks.py,sha256=05RUsOJETnOc9zMmW_x3DVr1t2F01w2e2R8sAaJrHg8,4549
27
29
  copick_utils/cli/segop.py,sha256=K_ju0MB4n-k5-wkOI7gOvkHCUslgHZCZUOFn-k2ta9M,9372
28
30
  copick_utils/cli/separate_components.py,sha256=m0c5v4ZXSeoUURcVoY7jCE6I_jGbTdZXqEoLUiURIHs,5670
29
31
  copick_utils/cli/skeletonize.py,sha256=CV_J9nibqLpKmckNRG6-7zlq6ROqPBLk166yDzhxJ5w,5646
32
+ copick_utils/cli/split_labels.py,sha256=J09-qAPLtzcmid6yln37zu9HwVRYz8wLzJ6PvHWgEgU,5443
30
33
  copick_utils/cli/util.py,sha256=tne45hU9dL0AepQhV9KKqkKjAMAfweR9_GLGHJnisnc,18011
31
34
  copick_utils/cli/validbox.py,sha256=Z-HY73QnWwxL-cLHLiIWNnYz4t-4jf63ho8pfk38xXU,5364
32
35
  copick_utils/converters/__init__.py,sha256=f3_UJJ2jJqkOAaEy6y4i3x1baFZEem9aTrGGdBQgXFQ,1701
@@ -45,8 +48,9 @@ copick_utils/converters/surface_from_picks.py,sha256=PHRVtdaDikygT3kmTJKUnw2Gvj_
45
48
  copick_utils/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
49
  copick_utils/features/skimage.py,sha256=Sz-348tTT44lLS825z14iIOM3L_tALXQctUF1HbnWnw,4209
47
50
  copick_utils/io/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
- copick_utils/io/readers.py,sha256=bE7IBPohNjsFgD6HRPTrWte6OjaJ0NrF4RS8Dwgf3nA,5435
49
- copick_utils/io/writers.py,sha256=iYyNkpBgrD0_N0N-LoyCOfIrk46WHWocKvkUUQYXMRg,2985
51
+ copick_utils/io/portal.py,sha256=fqk0ihdrkM8ayc5tfG5EIleYqKC-JehxL88TcYNySq8,7264
52
+ copick_utils/io/readers.py,sha256=Ll1NPEasPbddQK-oE7XIBC2EV15VuzGeK-V2oApl9Cg,7278
53
+ copick_utils/io/writers.py,sha256=mv34fLZLKa4q5cBJ2pP1tenwDz8qWoahk0suCcpsK-w,2906
50
54
  copick_utils/logical/__init__.py,sha256=coOdLd7vH6TCsoHj9T1Hj5hqnfiGM5CrVkmjVB-HdbM,1181
51
55
  copick_utils/logical/distance_operations.py,sha256=cVn8YywwrgNEaKJxnZ6KbsJwwx7fJ2VDDaUEgat0b9A,24071
52
56
  copick_utils/logical/enclosed_operations.py,sha256=f388K37vS4wCN5D0FL5lnvO29pd23pmC0LlXhNuNsS0,8583
@@ -55,17 +59,18 @@ copick_utils/logical/point_operations.py,sha256=a2KrTqSuXeb-GNvFywN7iDuATkTDsSz0
55
59
  copick_utils/logical/segmentation_operations.py,sha256=AlPHq3t7Hw46svW6qBnr3i1PPulcNvDNikoYD7_RnrI,12090
56
60
  copick_utils/pickers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
61
  copick_utils/pickers/grid_picker.py,sha256=KKfdv3fDmeY7XwqiVADRQJibr1eyjYoG9ZpaihcrgHw,2345
58
- copick_utils/process/__init__.py,sha256=ERh5Ka4vU0_ub4_UZFDWOXbr-my79s3RekfS9dV41wE,1227
62
+ copick_utils/process/__init__.py,sha256=lbjTjGxGQZdQgzt_ommpmCwety6tIWtGRp7tKzzAuDk,1069
59
63
  copick_utils/process/connected_components.py,sha256=9h_Feu3wxYWW84evaUmW73_UY0fGmTszS55R8oljjEo,13601
60
64
  copick_utils/process/filter_components.py,sha256=dU0l0PJ6ZejY6ANqsrDRtQ1RevdKWn6Txl4nr0sBq14,10587
61
65
  copick_utils/process/hull.py,sha256=tLah4_fcwIuiy9Kaly5sVVDHltGBh6euyO3i9W99pms,3191
62
66
  copick_utils/process/skeletonize.py,sha256=gLQgQdYlyuqFHyC19vzx8-gna15UiAM_1i85hF-mkMg,11713
63
67
  copick_utils/process/spline_fitting.py,sha256=6pBr4w7cKZb5xaKaU3b1R5A6cIVDrCTRKTVVUL4vDSI,25025
68
+ copick_utils/process/split_labels.py,sha256=5BZw4gzh57_4DgmYErwSwjWzBJKkiYbf3qu4On2MvCU,7352
64
69
  copick_utils/process/validbox.py,sha256=5uEbVjmyNJxp__2XJal1jP7Ecpqv2uZsBkAy26zpxx8,9494
65
70
  copick_utils/util/__init__.py,sha256=V76KYVdhERpiXkCsaap4WQBbvfXG04x3vGvdSayzpmk,190
66
71
  copick_utils/util/config_models.py,sha256=vzih8OsmvKQk8rULYyY6f6yiRHUnk48KKrCES5Pvq4w,21964
67
- copick_utils-1.0.2.dist-info/METADATA,sha256=D0LJF7w4_FX3kByXK8ee_Qn10sDSNgSZpBYZ47xrcaw,4562
68
- copick_utils-1.0.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
69
- copick_utils-1.0.2.dist-info/entry_points.txt,sha256=qDi4JAAncjmkB86tEivSpc9QVr6YbdFC3T0SwIabNaM,1504
70
- copick_utils-1.0.2.dist-info/licenses/LICENSE,sha256=3UHKsYd99Gh_qf1a9s8G5sdKqafgbGs5WIMoeX0OcdY,1105
71
- copick_utils-1.0.2.dist-info/RECORD,,
72
+ copick_utils-1.2.0.dist-info/METADATA,sha256=90qt-hq06uei8nx3nEIa8sGHpimmm59xcYJqBQMJ2fI,4586
73
+ copick_utils-1.2.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
74
+ copick_utils-1.2.0.dist-info/entry_points.txt,sha256=Vve0xfXIa463rKfI6ELKATtHzacLxNvgWffyLkYIrzQ,1636
75
+ copick_utils-1.2.0.dist-info/licenses/LICENSE,sha256=3UHKsYd99Gh_qf1a9s8G5sdKqafgbGs5WIMoeX0OcdY,1105
76
+ copick_utils-1.2.0.dist-info/RECORD,,
@@ -10,6 +10,9 @@ picks2surface = copick_utils.cli.conversion_commands:picks2surface
10
10
  seg2mesh = copick_utils.cli.conversion_commands:seg2mesh
11
11
  seg2picks = copick_utils.cli.conversion_commands:seg2picks
12
12
 
13
+ [copick.download.commands]
14
+ project = copick_utils.cli.download_commands:project
15
+
13
16
  [copick.logical.commands]
14
17
  clipmesh = copick_utils.cli.logical_commands:clipmesh
15
18
  clippicks = copick_utils.cli.logical_commands:clippicks
@@ -26,4 +29,5 @@ fit_spline = copick_utils.cli.processing_commands:fit_spline
26
29
  hull = copick_utils.cli.processing_commands:hull
27
30
  separate_components = copick_utils.cli.processing_commands:separate_components
28
31
  skeletonize = copick_utils.cli.processing_commands:skeletonize
32
+ split = copick_utils.cli.processing_commands:split
29
33
  validbox = copick_utils.cli.processing_commands:validbox