copick-utils 0.6.0__py3-none-any.whl → 0.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
copick_utils/__init__.py CHANGED
@@ -1,3 +1,4 @@
1
1
  # SPDX-FileCopyrightText: 2024-present Kyle Harrington <czi@kyleharrington.com>
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
+ __version__ = "0.6.1"
@@ -1,19 +1,30 @@
1
1
  import numpy as np
2
- from skimage.feature import multiscale_basic_features
3
2
  import zarr
4
3
  from numcodecs import Blosc
4
+ from skimage.feature import multiscale_basic_features
5
+
5
6
 
6
- def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True, edges=True, texture=True, sigma_min=0.5, sigma_max=16.0, feature_chunk_size=None):
7
+ def compute_skimage_features(
8
+ tomogram,
9
+ feature_type,
10
+ copick_root,
11
+ intensity=True,
12
+ edges=True,
13
+ texture=True,
14
+ sigma_min=0.5,
15
+ sigma_max=16.0,
16
+ feature_chunk_size=None,
17
+ ):
7
18
  """
8
19
  Processes the tomogram chunkwise and computes the multiscale basic features.
9
20
  Allows for optional feature chunk size.
10
21
  """
11
- image = zarr.open(tomogram.zarr(), mode='r')['0']
22
+ image = zarr.open(tomogram.zarr(), mode="r")["0"]
12
23
  input_chunk_size = feature_chunk_size if feature_chunk_size else image.chunks
13
24
  chunk_size = input_chunk_size if len(input_chunk_size) == 3 else input_chunk_size[1:]
14
-
25
+
15
26
  overlap = int(chunk_size[0] / 2)
16
-
27
+
17
28
  print(f"Processing image with shape {image.shape}")
18
29
  print(f"Using chunk size: {chunk_size}, overlap: {overlap}")
19
30
 
@@ -25,7 +36,7 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
25
36
  edges=edges,
26
37
  texture=texture,
27
38
  sigma_min=sigma_min,
28
- sigma_max=sigma_max
39
+ sigma_max=sigma_max,
29
40
  )
30
41
  num_features = test_features.shape[-1]
31
42
 
@@ -43,10 +54,10 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
43
54
  out_array = zarr.create(
44
55
  shape=(num_features, *image.shape),
45
56
  chunks=feature_chunk_size,
46
- dtype='float32',
47
- compressor=Blosc(cname='zstd', clevel=3, shuffle=2),
57
+ dtype="float32",
58
+ compressor=Blosc(cname="zstd", clevel=3, shuffle=2),
48
59
  store=feature_store,
49
- overwrite=True
60
+ overwrite=True,
50
61
  )
51
62
 
52
63
  # Process each chunk
@@ -67,7 +78,7 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
67
78
  edges=edges,
68
79
  texture=texture,
69
80
  sigma_min=sigma_min,
70
- sigma_max=sigma_max
81
+ sigma_max=sigma_max,
71
82
  )
72
83
 
73
84
  # Adjust indices for overlap
@@ -78,7 +89,12 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
78
89
  # Ensure contiguous array and correct slicing
79
90
  contiguous_chunk = np.ascontiguousarray(chunk_features[z_slice, y_slice, x_slice].transpose(3, 0, 1, 2))
80
91
 
81
- out_array[0:num_features, z:z + chunk_size[0], y:y + chunk_size[1], x:x + chunk_size[2]] = contiguous_chunk
92
+ out_array[
93
+ 0:num_features,
94
+ z : z + chunk_size[0],
95
+ y : y + chunk_size[1],
96
+ x : x + chunk_size[2],
97
+ ] = contiguous_chunk
82
98
 
83
99
  print(f"Features saved under feature type '{feature_type}'")
84
100
  return copick_features
@@ -91,6 +107,10 @@ if __name__ == "__main__":
91
107
  tomogram=tomo,
92
108
  feature_type="skimageFeatures",
93
109
  copick_root=root,
94
- intensity=True, edges=True, texture=True, sigma_min=0.5, sigma_max=16.0,
95
- feature_chunk_size=None # Default to detected chunk size
110
+ intensity=True,
111
+ edges=True,
112
+ texture=True,
113
+ sigma_min=0.5,
114
+ sigma_max=16.0,
115
+ feature_chunk_size=None, # Default to detected chunk size
96
116
  )
@@ -1,33 +1,34 @@
1
1
  import numpy as np
2
2
 
3
- def tomogram(run,
4
- voxel_size: float = 10,
5
- algorithm: str = 'wbp',
6
- raise_error: bool = False):
7
-
3
+
4
+ def tomogram(run, voxel_size: float = 10, algorithm: str = "wbp", raise_error: bool = False):
8
5
  voxel_spacing_obj = run.get_voxel_spacing(voxel_size)
9
6
 
10
7
  if voxel_spacing_obj is None:
11
8
  # Query Avaiable Voxel Spacings
12
9
  availableVoxelSpacings = [tomo.voxel_size for tomo in run.voxel_spacings]
13
10
 
14
- # Report to the user which voxel spacings they can use
15
- message = (f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
16
- f"Available spacings are: {', '.join(map(str, availableVoxelSpacings))}" )
11
+ # Report to the user which voxel spacings they can use
12
+ message = (
13
+ f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
14
+ f"Available spacings are: {', '.join(map(str, availableVoxelSpacings))}"
15
+ )
17
16
  if raise_error:
18
17
  raise ValueError(message)
19
18
  else:
20
19
  print(message)
21
20
  return None
22
-
21
+
23
22
  tomogram = voxel_spacing_obj.get_tomogram(algorithm)
24
23
  if tomogram is None:
25
24
  # Get available algorithms
26
25
  availableAlgorithms = [tomo.tomo_type for tomo in run.get_voxel_spacing(voxel_size).tomograms]
27
-
26
+
28
27
  # Report to the user which algorithms are available
29
- message = (f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
30
- f"Available algorithms are: {', '.join(availableAlgorithms)}")
28
+ message = (
29
+ f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
30
+ f"Available algorithms are: {', '.join(availableAlgorithms)}"
31
+ )
31
32
  if raise_error:
32
33
  raise ValueError(message)
33
34
  else:
@@ -36,32 +37,29 @@ def tomogram(run,
36
37
 
37
38
  return tomogram.numpy()
38
39
 
39
- def segmentation(run,
40
- voxel_spacing: float,
41
- segmentation_name: str,
42
- session_id=None,
43
- user_id=None,
44
- raise_error = False):
45
40
 
46
- seg = run.get_segmentations(name=segmentation_name,
47
- session_id = session_id,
48
- user_id = user_id,
49
- voxel_size = voxel_spacing)
41
+ def segmentation(run, voxel_spacing: float, segmentation_name: str, session_id=None, user_id=None, raise_error=False):
42
+ seg = run.get_segmentations(
43
+ name=segmentation_name,
44
+ session_id=session_id,
45
+ user_id=user_id,
46
+ voxel_size=voxel_spacing,
47
+ )
50
48
 
51
49
  # No Segmentations Are Available, Result in Error
52
50
  if len(seg) == 0:
53
51
  # Get all available segmentations with their metadata
54
52
  available_segs = run.get_segmentations(voxel_size=voxel_spacing)
55
53
  seg_info = [(s.name, s.user_id, s.session_id) for s in available_segs]
56
-
54
+
57
55
  # Format the information for display
58
- seg_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})"
59
- for name, uid, sid in seg_info]
60
-
61
- message = ( f'\nNo segmentation found matching:\n'
62
- f' name: {segmentation_name}, user_id: {user_id}, session_id: {session_id}\n'
63
- f'Available segmentations in {run.name} are:\n ' +
64
- '\n '.join(seg_details) )
56
+ seg_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in seg_info]
57
+
58
+ message = (
59
+ f"\nNo segmentation found matching:\n"
60
+ f" name: {segmentation_name}, user_id: {user_id}, session_id: {session_id}\n"
61
+ f"Available segmentations in {run.name} are:\n " + "\n ".join(seg_details)
62
+ )
65
63
  if raise_error:
66
64
  raise ValueError(message)
67
65
  else:
@@ -70,20 +68,24 @@ def segmentation(run,
70
68
 
71
69
  # No Segmentations Are Available, Result in Error
72
70
  if len(seg) > 1:
73
- print(f'[Warning] More Than 1 Segmentation is Available for the Query Information. '
74
- f'Available Segmentations are: {seg} '
75
- f'Defaulting to Loading: {seg[0]}\n')
71
+ print(
72
+ f"[Warning] More Than 1 Segmentation is Available for the Query Information. "
73
+ f"Available Segmentations are: {seg} "
74
+ f"Defaulting to Loading: {seg[0]}\n",
75
+ )
76
76
  seg = seg[0]
77
77
 
78
78
  return seg.numpy()
79
79
 
80
- def coordinates(run, # CoPick run object containing the segmentation data
81
- name: str, # Name of the object or protein for which coordinates are being extracted
82
- user_id: str, # Identifier of the user that generated the picks
83
- session_id: str = None, # Identifier of the session that generated the picks
84
- voxel_size: float = 10, # Voxel size of the tomogram, used for scaling the coordinates
85
- raise_error: bool = False):
86
-
80
+
81
+ def coordinates(
82
+ run, # CoPick run object containing the segmentation data
83
+ name: str, # Name of the object or protein for which coordinates are being extracted
84
+ user_id: str, # Identifier of the user that generated the picks
85
+ session_id: str = None, # Identifier of the session that generated the picks
86
+ voxel_size: float = 10, # Voxel size of the tomogram, used for scaling the coordinates
87
+ raise_error: bool = False,
88
+ ):
87
89
  # Retrieve the pick points associated with the specified object and user ID
88
90
  picks = run.get_picks(object_name=name, user_id=user_id, session_id=session_id)
89
91
 
@@ -92,15 +94,15 @@ def coordinates(run, # CoPick run object containing the segm
92
94
 
93
95
  available_picks = run.get_picks()
94
96
  picks_info = [(s.pickable_object_name, s.user_id, s.session_id) for s in available_picks]
95
-
97
+
96
98
  # Format the information for display
97
- picks_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})"
98
- for name, uid, sid in picks_info]
99
-
100
- message = ( f'\nNo picks found matching:\n'
101
- f' name: {name}, user_id: {user_id}, session_id: {session_id}\n'
102
- f'Available picks are:\n '
103
- + '\n '.join(picks_details) )
99
+ picks_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in picks_info]
100
+
101
+ message = (
102
+ f"\nNo picks found matching:\n"
103
+ f" name: {name}, user_id: {user_id}, session_id: {session_id}\n"
104
+ f"Available picks are:\n " + "\n ".join(picks_details)
105
+ )
104
106
  if raise_error:
105
107
  raise ValueError(message)
106
108
  else:
@@ -109,24 +111,25 @@ def coordinates(run, # CoPick run object containing the segm
109
111
  elif len(picks) > 1:
110
112
  # Format pick information for display
111
113
  picks_info = [(p.pickable_object_name, p.user_id, p.session_id) for p in picks]
112
- picks_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})"
113
- for name, uid, sid in picks_info]
114
+ picks_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in picks_info]
114
115
 
115
- print(f'[Warning] More than 1 pick is available for the query information.'
116
- f'\nAvailable picks are:\n ' +
117
- '\n '.join(picks_details) +
118
- f'\nDefaulting to loading:\n {picks[0]}\n')
116
+ print(
117
+ "[Warning] More than 1 pick is available for the query information."
118
+ "\nAvailable picks are:\n " + "\n ".join(picks_details) + f"\nDefaulting to loading:\n {picks[0]}\n",
119
+ )
119
120
  points = picks[0].points
120
121
 
121
122
  # Initialize an array to store the coordinates
122
- nPoints = len(picks[0].points) # Number of points retrieved
123
- coordinates = np.zeros([len(picks[0].points), 3]) # Create an empty array to hold the (z, y, x) coordinates
123
+ nPoints = len(picks[0].points) # Number of points retrieved
124
+ coordinates = np.zeros([len(picks[0].points), 3]) # Create an empty array to hold the (z, y, x) coordinates
124
125
 
125
126
  # Iterate over all points and convert their locations to coordinates in voxel space
126
127
  for ii in range(nPoints):
127
- coordinates[ii,] = [points[ii].location.z / voxel_size, # Scale z-coordinate by voxel size
128
- points[ii].location.y / voxel_size, # Scale y-coordinate by voxel size
129
- points[ii].location.x / voxel_size] # Scale x-coordinate by voxel size
128
+ coordinates[ii,] = [
129
+ points[ii].location.z / voxel_size, # Scale z-coordinate by voxel size
130
+ points[ii].location.y / voxel_size, # Scale y-coordinate by voxel size
131
+ points[ii].location.x / voxel_size,
132
+ ] # Scale x-coordinate by voxel size
130
133
 
131
134
  # Return the array of coordinates
132
135
  return coordinates
@@ -1,12 +1,7 @@
1
- from typing import Any, Dict, List
2
1
  import numpy as np
3
2
 
4
- def tomogram(
5
- run,
6
- input_volume,
7
- voxel_size=10,
8
- algorithm="wbp"
9
- ):
3
+
4
+ def tomogram(run, input_volume, voxel_size=10, algorithm="wbp"):
10
5
  """
11
6
  Writes a volumetric tomogram into an OME-Zarr format within a Copick directory.
12
7
 
@@ -26,17 +21,17 @@ def tomogram(
26
21
  copick.Tomogram
27
22
  The created or modified tomogram object.
28
23
  """
29
-
24
+
30
25
  # Retrieve or create voxel spacing
31
26
  voxel_spacing = run.get_voxel_spacing(voxel_size)
32
27
  if voxel_spacing is None:
33
28
  voxel_spacing = run.new_voxel_spacing(voxel_size=voxel_size)
34
-
29
+
35
30
  # Check if We Need to Create a New Tomogram for Given Algorithm
36
31
  tomogram = voxel_spacing.get_tomogram(algorithm)
37
32
  if tomogram is None:
38
33
  tomogram = voxel_spacing.new_tomogram(tomo_type=algorithm)
39
-
34
+
40
35
  # Write the tomogram data
41
36
  tomogram.from_numpy(input_volume)
42
37
 
@@ -48,7 +43,7 @@ def segmentation(
48
43
  name="segmentation",
49
44
  session_id="0",
50
45
  voxel_size=10,
51
- multilabel=True
46
+ multilabel=True,
52
47
  ):
53
48
  """
54
49
  Writes a segmentation into an OME-Zarr format within a Copick directory.
@@ -75,7 +70,7 @@ def segmentation(
75
70
  copick.Segmentation
76
71
  The created or modified segmentation object.
77
72
  """
78
-
73
+
79
74
  # Retrieve or create a segmentation
80
75
  segmentations = run.get_segmentations(name=name, user_id=user_id, session_id=session_id)
81
76
 
@@ -86,11 +81,11 @@ def segmentation(
86
81
  name=name,
87
82
  session_id=session_id,
88
83
  is_multilabel=multilabel,
89
- user_id=user_id
84
+ user_id=user_id,
90
85
  )
91
86
  else:
92
87
  # Overwrite the current segmentation at the specified voxel size if it exists
93
88
  segmentation = next(seg for seg in segmentations if seg.voxel_size == voxel_size)
94
-
89
+
95
90
  # Write the segmentation data
96
91
  segmentation.from_numpy(segmentation_volume, dtype=np.uint8)
@@ -2,6 +2,7 @@ import numpy as np
2
2
  import zarr
3
3
  from copick.models import CopickPoint
4
4
 
5
+
5
6
  def grid_picker(pickable_obj, run, tomogram, grid_spacing_factor, session_id="0", user_id="gridPicker"):
6
7
  """
7
8
  Creates a grid of picks for a pickable object based on a tomogram and grid spacing factor.
@@ -28,14 +29,14 @@ def grid_picker(pickable_obj, run, tomogram, grid_spacing_factor, session_id="0"
28
29
  grid_spacing = radius * grid_spacing_factor
29
30
 
30
31
  # Open the highest resolution of the tomogram
31
- image = zarr.open(tomogram.zarr(), mode='r')['0']
32
+ image = zarr.open(tomogram.zarr(), mode="r")["0"]
32
33
 
33
34
  # Create a grid of points
34
35
  points = []
35
36
  for z in np.arange(0, image.shape[0], grid_spacing):
36
37
  for y in np.arange(0, image.shape[1], grid_spacing):
37
38
  for x in np.arange(0, image.shape[2], grid_spacing):
38
- points.append(CopickPoint(location={'x': x, 'y': y, 'z': z}))
39
+ points.append(CopickPoint(location={"x": x, "y": y, "z": z}))
39
40
 
40
41
  # Save the picks
41
42
  pick_set = run.new_picks(obj_name, session_id, user_id)
@@ -45,6 +46,7 @@ def grid_picker(pickable_obj, run, tomogram, grid_spacing_factor, session_id="0"
45
46
  print(f"Saved {len(points)} grid points for object {obj_name}.")
46
47
  return pick_set
47
48
 
49
+
48
50
  if __name__ == "__main__":
49
51
  import copick
50
52
 
@@ -65,5 +67,4 @@ if __name__ == "__main__":
65
67
  tomogram = voxel_spacing_obj.get_tomogram(tomo_type)
66
68
 
67
69
  for pickable_obj in root.pickable_objects:
68
- create_grid_of_picks(pickable_obj, run, tomogram, grid_spacing_factor, session_id, user_id)
69
-
70
+ grid_picker(pickable_obj, run, tomogram, grid_spacing_factor, session_id, user_id)
@@ -1,11 +1,22 @@
1
1
  import numpy as np
2
2
  import scipy.ndimage as ndi
3
- from skimage.segmentation import watershed
4
3
  from skimage.measure import regionprops
5
- from skimage.morphology import binary_erosion, binary_dilation, ball
4
+ from skimage.morphology import ball, binary_dilation, binary_erosion
5
+ from skimage.segmentation import watershed
6
6
 
7
7
 
8
- def picks_from_segmentation(segmentation, segmentation_idx, maxima_filter_size, min_particle_size, max_particle_size, session_id, user_id, pickable_object, run, voxel_spacing=1):
8
+ def picks_from_segmentation(
9
+ segmentation,
10
+ segmentation_idx,
11
+ maxima_filter_size,
12
+ min_particle_size,
13
+ max_particle_size,
14
+ session_id,
15
+ user_id,
16
+ pickable_object,
17
+ run,
18
+ voxel_spacing=1,
19
+ ):
9
20
  """
10
21
  Process a specific label in the segmentation, extract centroids, and save them as picks.
11
22
 
@@ -36,7 +47,10 @@ def picks_from_segmentation(segmentation, segmentation_idx, maxima_filter_size,
36
47
 
37
48
  # Distance transform and local maxima detection
38
49
  distance = ndi.distance_transform_edt(dilated)
39
- local_max = (distance == ndi.maximum_filter(distance, footprint=np.ones((maxima_filter_size, maxima_filter_size, maxima_filter_size))))
50
+ local_max = distance == ndi.maximum_filter(
51
+ distance,
52
+ footprint=np.ones((maxima_filter_size, maxima_filter_size, maxima_filter_size)),
53
+ )
40
54
 
41
55
  # Watershed segmentation
42
56
  markers, _ = ndi.label(local_max)
@@ -55,7 +69,7 @@ def picks_from_segmentation(segmentation, segmentation_idx, maxima_filter_size,
55
69
  positions = np.array(all_centroids)[:, [2, 1, 0]] * voxel_spacing
56
70
  pick_set.from_numpy(positions=positions)
57
71
  pick_set.store()
58
-
72
+
59
73
  print(f"Centroids for label {segmentation_idx} saved successfully.")
60
74
  return pick_set
61
75
  else:
@@ -1,13 +1,9 @@
1
1
  import numpy as np
2
2
  import zarr
3
3
  from scipy.ndimage import zoom
4
- import copick
5
4
 
6
- def from_picks(pick,
7
- seg_volume,
8
- radius: float = 10.0,
9
- label_value: int = 1,
10
- voxel_spacing: float = 10):
5
+
6
+ def from_picks(pick, seg_volume, radius: float = 10.0, label_value: int = 1, voxel_spacing: float = 10):
11
7
  """
12
8
  Paints picks into a segmentation volume as spheres.
13
9
 
@@ -26,12 +22,13 @@ def from_picks(pick,
26
22
  Returns:
27
23
  --------
28
24
  numpy.ndarray
29
- The modified segmentation volume with spheres inserted at pick locations.
25
+ The modified segmentation volume with spheres inserted at pick locations.
30
26
  """
27
+
31
28
  def create_sphere(shape, center, radius, val):
32
29
  zc, yc, xc = center
33
30
  z, y, x = np.indices(shape)
34
- distance_sq = (x - xc)**2 + (y - yc)**2 + (z - zc)**2
31
+ distance_sq = (x - xc) ** 2 + (y - yc) ** 2 + (z - zc) ** 2
35
32
  sphere = np.zeros(shape, dtype=np.float32)
36
33
  sphere[distance_sq <= radius**2] = val
37
34
  return sphere
@@ -48,7 +45,11 @@ def from_picks(pick,
48
45
  # Paint each pick as a sphere
49
46
  for point in pick.points:
50
47
  # Convert the pick's location from angstroms to voxel units
51
- cx, cy, cz = point.location.x / voxel_spacing, point.location.y / voxel_spacing, point.location.z / voxel_spacing
48
+ cx, cy, cz = (
49
+ point.location.x / voxel_spacing,
50
+ point.location.y / voxel_spacing,
51
+ point.location.z / voxel_spacing,
52
+ )
52
53
 
53
54
  # Calculate subarray bounds
54
55
  xLow, xHigh = get_relative_target_coordinates(cx, delta, seg_volume.shape[2])
@@ -65,7 +66,10 @@ def from_picks(pick,
65
66
  sphere = create_sphere(subarray_shape, local_center, radius_voxel, label_value)
66
67
 
67
68
  # Assign Sphere to Segmentation Target Volume
68
- seg_volume[zLow:zHigh, yLow:yHigh, xLow:xHigh] = np.maximum(seg_volume[zLow:zHigh, yLow:yHigh, xLow:xHigh], sphere)
69
+ seg_volume[zLow:zHigh, yLow:yHigh, xLow:xHigh] = np.maximum(
70
+ seg_volume[zLow:zHigh, yLow:yHigh, xLow:xHigh],
71
+ sphere,
72
+ )
69
73
 
70
74
  return seg_volume
71
75
 
@@ -79,7 +83,17 @@ def downsample_to_exact_shape(array, target_shape):
79
83
  return zoom(array, zoom_factors, order=0)
80
84
 
81
85
 
82
- def segmentation_from_picks(radius, painting_segmentation_name, run, voxel_spacing, tomo_type, pickable_object, pick_set, user_id="paintedPicks", session_id="0"):
86
+ def segmentation_from_picks(
87
+ radius,
88
+ painting_segmentation_name,
89
+ run,
90
+ voxel_spacing,
91
+ tomo_type,
92
+ pickable_object,
93
+ pick_set,
94
+ user_id="paintedPicks",
95
+ session_id="0",
96
+ ):
83
97
  """
84
98
  Paints picks from a run into a multiscale segmentation array, representing them as spheres in 3D space.
85
99
 
@@ -115,7 +129,13 @@ def segmentation_from_picks(radius, painting_segmentation_name, run, voxel_spaci
115
129
  raise ValueError("Tomogram not found for the given parameters.")
116
130
 
117
131
  # Use copick to create a new segmentation if one does not exist
118
- segs = run.get_segmentations(user_id=user_id, session_id=session_id, is_multilabel=True, name=painting_segmentation_name, voxel_size=voxel_spacing)
132
+ segs = run.get_segmentations(
133
+ user_id=user_id,
134
+ session_id=session_id,
135
+ is_multilabel=True,
136
+ name=painting_segmentation_name,
137
+ voxel_size=voxel_spacing,
138
+ )
119
139
  if len(segs) == 0:
120
140
  seg = run.new_segmentation(voxel_spacing, painting_segmentation_name, session_id, True, user_id=user_id)
121
141
  else:
@@ -142,7 +162,7 @@ def segmentation_from_picks(radius, painting_segmentation_name, run, voxel_spaci
142
162
  segmentation_group[highest_res_name][:] = highest_res_seg
143
163
 
144
164
  # Downsample to create lower resolution scales
145
- multiscale_metadata = tomogram_zarr.attrs.get('multiscales', [{}])[0].get('datasets', [])
165
+ multiscale_metadata = tomogram_zarr.attrs.get("multiscales", [{}])[0].get("datasets", [])
146
166
  for level_index, level_metadata in enumerate(multiscale_metadata):
147
167
  if level_index == 0:
148
168
  continue
@@ -154,7 +174,13 @@ def segmentation_from_picks(radius, painting_segmentation_name, run, voxel_spaci
154
174
  scaled_array = downsample_to_exact_shape(highest_res_seg, expected_shape)
155
175
 
156
176
  # Create/overwrite the Zarr array for this level
157
- segmentation_group.create_dataset(level_name, shape=expected_shape, data=scaled_array, dtype=np.uint16, overwrite=True)
177
+ segmentation_group.create_dataset(
178
+ level_name,
179
+ shape=expected_shape,
180
+ data=scaled_array,
181
+ dtype=np.uint16,
182
+ overwrite=True,
183
+ )
158
184
 
159
185
  segmentation_group[level_name][:] = scaled_array
160
186
 
@@ -1,15 +1,25 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: copick-utils
3
- Version: 0.6.0
3
+ Version: 0.6.1
4
4
  Summary: Utilities for copick
5
- License: MIT
6
- Author: Kyle Harrington
7
- Author-email: czi@kyleharrington.com
8
- Requires-Python: >=3.9
5
+ Project-URL: Repository, https://github.com/KyleHarrington/copick-utils.git
6
+ Project-URL: Issues, https://github.com/KyleHarrington/copick-utils/issues
7
+ Project-URL: Documentation, https://github.com/KyleHarrington/copick-utils#readme
8
+ Author-email: Kyle Harrington <czi@kyleharrington.com>, Jonathan Schwartz <jonathan.schwartz@czii.org>
9
+ License: MIT License
10
+
11
+ Copyright (c) 2024-present Kyle Harrington <czi@kyleharrington.com>
12
+
13
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
14
+
15
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
16
+
17
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
18
+ License-File: LICENSE
19
+ Keywords: annotation,copick,cryo-et,cryoet,tomography,utilities
9
20
  Classifier: Development Status :: 4 - Beta
10
21
  Classifier: License :: OSI Approved :: MIT License
11
22
  Classifier: Programming Language :: Python
12
- Classifier: Programming Language :: Python :: 3
13
23
  Classifier: Programming Language :: Python :: 3.9
14
24
  Classifier: Programming Language :: Python :: 3.10
15
25
  Classifier: Programming Language :: Python :: 3.11
@@ -17,10 +27,14 @@ Classifier: Programming Language :: Python :: 3.12
17
27
  Classifier: Programming Language :: Python :: 3.13
18
28
  Classifier: Programming Language :: Python :: Implementation :: CPython
19
29
  Classifier: Programming Language :: Python :: Implementation :: PyPy
20
- Requires-Dist: copick (>=0.8.0)
21
- Project-URL: Documentation, https://github.com/KyleHarrington/copick-utils#readme
22
- Project-URL: Issues, https://github.com/KyleHarrington/copick-utils/issues
23
- Project-URL: Source, https://github.com/KyleHarrington/copick-utils
30
+ Requires-Python: >=3.9
31
+ Requires-Dist: copick>=0.8.0
32
+ Provides-Extra: dev
33
+ Requires-Dist: black>=25.1.0; extra == 'dev'
34
+ Requires-Dist: hatch-vcs>=0.4.0; extra == 'dev'
35
+ Requires-Dist: hatchling>=1.25.0; extra == 'dev'
36
+ Requires-Dist: pre-commit>=4.2.0; extra == 'dev'
37
+ Requires-Dist: ruff>=0.12.0; extra == 'dev'
24
38
  Description-Content-Type: text/markdown
25
39
 
26
40
  # copick-utils
@@ -69,4 +83,3 @@ This project adheres to the Contributor Covenant [code of conduct](https://githu
69
83
  ## Reporting Security Issues
70
84
 
71
85
  If you believe you have found a security issue, please responsibly disclose by contacting us at [security@chanzuckerberg.com](mailto:security@chanzuckerberg.com).
72
-
@@ -0,0 +1,14 @@
1
+ copick_utils/__init__.py,sha256=FqcMzBIYexR9TCNHvUyjKXblBOdEaa9-pt1fv8TNEcA,135
2
+ copick_utils/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ copick_utils/features/skimage.py,sha256=Sz-348tTT44lLS825z14iIOM3L_tALXQctUF1HbnWnw,4209
4
+ copick_utils/io/readers.py,sha256=bE7IBPohNjsFgD6HRPTrWte6OjaJ0NrF4RS8Dwgf3nA,5435
5
+ copick_utils/io/writers.py,sha256=iYyNkpBgrD0_N0N-LoyCOfIrk46WHWocKvkUUQYXMRg,2985
6
+ copick_utils/pickers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ copick_utils/pickers/grid_picker.py,sha256=KKfdv3fDmeY7XwqiVADRQJibr1eyjYoG9ZpaihcrgHw,2345
8
+ copick_utils/segmentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ copick_utils/segmentation/picks_from_segmentation.py,sha256=Pu3079P4LNJPQAEwsYdrJ3K_X_i1zmo_xxbu16goKAo,3007
10
+ copick_utils/segmentation/segmentation_from_picks.py,sha256=oeP9NdOYcRATbpeKf3SkbdqbRuUEZCOPArl-KBseYc0,6991
11
+ copick_utils-0.6.1.dist-info/METADATA,sha256=GXk4KjbVjumU8HK6nCwUuWjqR9AMrV4Vc_32S2GR5IQ,4246
12
+ copick_utils-0.6.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
13
+ copick_utils-0.6.1.dist-info/licenses/LICENSE,sha256=3UHKsYd99Gh_qf1a9s8G5sdKqafgbGs5WIMoeX0OcdY,1105
14
+ copick_utils-0.6.1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.1.3
2
+ Generator: hatchling 1.27.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
copick_utils/__about__.py DELETED
@@ -1,4 +0,0 @@
1
- # SPDX-FileCopyrightText: 2024-present Kyle Harrington <czi@kyleharrington.com>
2
- #
3
- # SPDX-License-Identifier: MIT
4
- __version__ = "0.0.1"
@@ -1,15 +0,0 @@
1
- copick_utils/__about__.py,sha256=7D13PJEcpdhEa6RrlKLt7IEkoVVVGuzqHJb5MQxgLiI,135
2
- copick_utils/__init__.py,sha256=v-RIkEuGuAXivakLMrneraDQd7cWN7zsdGLmjwLtDDw,113
3
- copick_utils/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- copick_utils/features/skimage.py,sha256=t38jpu-ntC7Zw--1qSxWhGIg9e02RLhVBZEySIa5dQs,4036
5
- copick_utils/io/readers.py,sha256=ajqe64e5tp67iZoqSV6AsjpGX25Rdz_q0tX8qXVVp7g,5902
6
- copick_utils/io/writers.py,sha256=KhdZUUZrZR02bzS5SWM9q-u4y2s1qxcySwN3MN5j5YA,3055
7
- copick_utils/pickers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- copick_utils/pickers/grid_picker.py,sha256=NhFbWxMQREb0fLKTho4602yzH7zE6DCkJY94dgJ-gIQ,2353
9
- copick_utils/segmentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- copick_utils/segmentation/picks_from_segmentation.py,sha256=Ne_RYfnEFaV_qwNw4uOwPfLLgGxHMEQ2uecp9X2_EVc,2951
11
- copick_utils/segmentation/segmentation_from_picks.py,sha256=3oIlFWRAR904j5IpC5Fo8Y4gP9iL6qUiwybRgJvhzmc,6820
12
- copick_utils-0.6.0.dist-info/LICENSE.txt,sha256=3UHKsYd99Gh_qf1a9s8G5sdKqafgbGs5WIMoeX0OcdY,1105
13
- copick_utils-0.6.0.dist-info/METADATA,sha256=TwbWK0CmHk8pB7tI401f3khfIG8mMEtTENOg2b1brOY,2742
14
- copick_utils-0.6.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
15
- copick_utils-0.6.0.dist-info/RECORD,,