copick-utils 0.5.0__py3-none-any.whl → 0.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- copick_utils/__init__.py +1 -0
- copick_utils/features/skimage.py +33 -13
- copick_utils/io/readers.py +135 -0
- copick_utils/{writers/write.py → io/writers.py} +9 -14
- copick_utils/pickers/grid_picker.py +5 -4
- copick_utils/segmentation/picks_from_segmentation.py +19 -5
- copick_utils/segmentation/segmentation_from_picks.py +40 -14
- {copick_utils-0.5.0.dist-info → copick_utils-0.6.1.dist-info}/METADATA +25 -13
- copick_utils-0.6.1.dist-info/RECORD +14 -0
- {copick_utils-0.5.0.dist-info → copick_utils-0.6.1.dist-info}/WHEEL +1 -1
- copick_utils/__about__.py +0 -4
- copick_utils/writers/__init__.py +0 -0
- copick_utils-0.5.0.dist-info/RECORD +0 -15
- /copick_utils-0.5.0.dist-info/LICENSE.txt → /copick_utils-0.6.1.dist-info/licenses/LICENSE +0 -0
copick_utils/__init__.py
CHANGED
copick_utils/features/skimage.py
CHANGED
|
@@ -1,19 +1,30 @@
|
|
|
1
1
|
import numpy as np
|
|
2
|
-
from skimage.feature import multiscale_basic_features
|
|
3
2
|
import zarr
|
|
4
3
|
from numcodecs import Blosc
|
|
4
|
+
from skimage.feature import multiscale_basic_features
|
|
5
|
+
|
|
5
6
|
|
|
6
|
-
def compute_skimage_features(
|
|
7
|
+
def compute_skimage_features(
|
|
8
|
+
tomogram,
|
|
9
|
+
feature_type,
|
|
10
|
+
copick_root,
|
|
11
|
+
intensity=True,
|
|
12
|
+
edges=True,
|
|
13
|
+
texture=True,
|
|
14
|
+
sigma_min=0.5,
|
|
15
|
+
sigma_max=16.0,
|
|
16
|
+
feature_chunk_size=None,
|
|
17
|
+
):
|
|
7
18
|
"""
|
|
8
19
|
Processes the tomogram chunkwise and computes the multiscale basic features.
|
|
9
20
|
Allows for optional feature chunk size.
|
|
10
21
|
"""
|
|
11
|
-
image = zarr.open(tomogram.zarr(), mode=
|
|
22
|
+
image = zarr.open(tomogram.zarr(), mode="r")["0"]
|
|
12
23
|
input_chunk_size = feature_chunk_size if feature_chunk_size else image.chunks
|
|
13
24
|
chunk_size = input_chunk_size if len(input_chunk_size) == 3 else input_chunk_size[1:]
|
|
14
|
-
|
|
25
|
+
|
|
15
26
|
overlap = int(chunk_size[0] / 2)
|
|
16
|
-
|
|
27
|
+
|
|
17
28
|
print(f"Processing image with shape {image.shape}")
|
|
18
29
|
print(f"Using chunk size: {chunk_size}, overlap: {overlap}")
|
|
19
30
|
|
|
@@ -25,7 +36,7 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
|
|
|
25
36
|
edges=edges,
|
|
26
37
|
texture=texture,
|
|
27
38
|
sigma_min=sigma_min,
|
|
28
|
-
sigma_max=sigma_max
|
|
39
|
+
sigma_max=sigma_max,
|
|
29
40
|
)
|
|
30
41
|
num_features = test_features.shape[-1]
|
|
31
42
|
|
|
@@ -43,10 +54,10 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
|
|
|
43
54
|
out_array = zarr.create(
|
|
44
55
|
shape=(num_features, *image.shape),
|
|
45
56
|
chunks=feature_chunk_size,
|
|
46
|
-
dtype=
|
|
47
|
-
compressor=Blosc(cname=
|
|
57
|
+
dtype="float32",
|
|
58
|
+
compressor=Blosc(cname="zstd", clevel=3, shuffle=2),
|
|
48
59
|
store=feature_store,
|
|
49
|
-
overwrite=True
|
|
60
|
+
overwrite=True,
|
|
50
61
|
)
|
|
51
62
|
|
|
52
63
|
# Process each chunk
|
|
@@ -67,7 +78,7 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
|
|
|
67
78
|
edges=edges,
|
|
68
79
|
texture=texture,
|
|
69
80
|
sigma_min=sigma_min,
|
|
70
|
-
sigma_max=sigma_max
|
|
81
|
+
sigma_max=sigma_max,
|
|
71
82
|
)
|
|
72
83
|
|
|
73
84
|
# Adjust indices for overlap
|
|
@@ -78,7 +89,12 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
|
|
|
78
89
|
# Ensure contiguous array and correct slicing
|
|
79
90
|
contiguous_chunk = np.ascontiguousarray(chunk_features[z_slice, y_slice, x_slice].transpose(3, 0, 1, 2))
|
|
80
91
|
|
|
81
|
-
out_array[
|
|
92
|
+
out_array[
|
|
93
|
+
0:num_features,
|
|
94
|
+
z : z + chunk_size[0],
|
|
95
|
+
y : y + chunk_size[1],
|
|
96
|
+
x : x + chunk_size[2],
|
|
97
|
+
] = contiguous_chunk
|
|
82
98
|
|
|
83
99
|
print(f"Features saved under feature type '{feature_type}'")
|
|
84
100
|
return copick_features
|
|
@@ -91,6 +107,10 @@ if __name__ == "__main__":
|
|
|
91
107
|
tomogram=tomo,
|
|
92
108
|
feature_type="skimageFeatures",
|
|
93
109
|
copick_root=root,
|
|
94
|
-
intensity=True,
|
|
95
|
-
|
|
110
|
+
intensity=True,
|
|
111
|
+
edges=True,
|
|
112
|
+
texture=True,
|
|
113
|
+
sigma_min=0.5,
|
|
114
|
+
sigma_max=16.0,
|
|
115
|
+
feature_chunk_size=None, # Default to detected chunk size
|
|
96
116
|
)
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def tomogram(run, voxel_size: float = 10, algorithm: str = "wbp", raise_error: bool = False):
|
|
5
|
+
voxel_spacing_obj = run.get_voxel_spacing(voxel_size)
|
|
6
|
+
|
|
7
|
+
if voxel_spacing_obj is None:
|
|
8
|
+
# Query Avaiable Voxel Spacings
|
|
9
|
+
availableVoxelSpacings = [tomo.voxel_size for tomo in run.voxel_spacings]
|
|
10
|
+
|
|
11
|
+
# Report to the user which voxel spacings they can use
|
|
12
|
+
message = (
|
|
13
|
+
f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
|
|
14
|
+
f"Available spacings are: {', '.join(map(str, availableVoxelSpacings))}"
|
|
15
|
+
)
|
|
16
|
+
if raise_error:
|
|
17
|
+
raise ValueError(message)
|
|
18
|
+
else:
|
|
19
|
+
print(message)
|
|
20
|
+
return None
|
|
21
|
+
|
|
22
|
+
tomogram = voxel_spacing_obj.get_tomogram(algorithm)
|
|
23
|
+
if tomogram is None:
|
|
24
|
+
# Get available algorithms
|
|
25
|
+
availableAlgorithms = [tomo.tomo_type for tomo in run.get_voxel_spacing(voxel_size).tomograms]
|
|
26
|
+
|
|
27
|
+
# Report to the user which algorithms are available
|
|
28
|
+
message = (
|
|
29
|
+
f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
|
|
30
|
+
f"Available algorithms are: {', '.join(availableAlgorithms)}"
|
|
31
|
+
)
|
|
32
|
+
if raise_error:
|
|
33
|
+
raise ValueError(message)
|
|
34
|
+
else:
|
|
35
|
+
print(message)
|
|
36
|
+
return None
|
|
37
|
+
|
|
38
|
+
return tomogram.numpy()
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def segmentation(run, voxel_spacing: float, segmentation_name: str, session_id=None, user_id=None, raise_error=False):
|
|
42
|
+
seg = run.get_segmentations(
|
|
43
|
+
name=segmentation_name,
|
|
44
|
+
session_id=session_id,
|
|
45
|
+
user_id=user_id,
|
|
46
|
+
voxel_size=voxel_spacing,
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
# No Segmentations Are Available, Result in Error
|
|
50
|
+
if len(seg) == 0:
|
|
51
|
+
# Get all available segmentations with their metadata
|
|
52
|
+
available_segs = run.get_segmentations(voxel_size=voxel_spacing)
|
|
53
|
+
seg_info = [(s.name, s.user_id, s.session_id) for s in available_segs]
|
|
54
|
+
|
|
55
|
+
# Format the information for display
|
|
56
|
+
seg_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in seg_info]
|
|
57
|
+
|
|
58
|
+
message = (
|
|
59
|
+
f"\nNo segmentation found matching:\n"
|
|
60
|
+
f" name: {segmentation_name}, user_id: {user_id}, session_id: {session_id}\n"
|
|
61
|
+
f"Available segmentations in {run.name} are:\n " + "\n ".join(seg_details)
|
|
62
|
+
)
|
|
63
|
+
if raise_error:
|
|
64
|
+
raise ValueError(message)
|
|
65
|
+
else:
|
|
66
|
+
print(message)
|
|
67
|
+
return None
|
|
68
|
+
|
|
69
|
+
# No Segmentations Are Available, Result in Error
|
|
70
|
+
if len(seg) > 1:
|
|
71
|
+
print(
|
|
72
|
+
f"[Warning] More Than 1 Segmentation is Available for the Query Information. "
|
|
73
|
+
f"Available Segmentations are: {seg} "
|
|
74
|
+
f"Defaulting to Loading: {seg[0]}\n",
|
|
75
|
+
)
|
|
76
|
+
seg = seg[0]
|
|
77
|
+
|
|
78
|
+
return seg.numpy()
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def coordinates(
|
|
82
|
+
run, # CoPick run object containing the segmentation data
|
|
83
|
+
name: str, # Name of the object or protein for which coordinates are being extracted
|
|
84
|
+
user_id: str, # Identifier of the user that generated the picks
|
|
85
|
+
session_id: str = None, # Identifier of the session that generated the picks
|
|
86
|
+
voxel_size: float = 10, # Voxel size of the tomogram, used for scaling the coordinates
|
|
87
|
+
raise_error: bool = False,
|
|
88
|
+
):
|
|
89
|
+
# Retrieve the pick points associated with the specified object and user ID
|
|
90
|
+
picks = run.get_picks(object_name=name, user_id=user_id, session_id=session_id)
|
|
91
|
+
|
|
92
|
+
if len(picks) == 0:
|
|
93
|
+
# Get all available segmentations with their metadata
|
|
94
|
+
|
|
95
|
+
available_picks = run.get_picks()
|
|
96
|
+
picks_info = [(s.pickable_object_name, s.user_id, s.session_id) for s in available_picks]
|
|
97
|
+
|
|
98
|
+
# Format the information for display
|
|
99
|
+
picks_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in picks_info]
|
|
100
|
+
|
|
101
|
+
message = (
|
|
102
|
+
f"\nNo picks found matching:\n"
|
|
103
|
+
f" name: {name}, user_id: {user_id}, session_id: {session_id}\n"
|
|
104
|
+
f"Available picks are:\n " + "\n ".join(picks_details)
|
|
105
|
+
)
|
|
106
|
+
if raise_error:
|
|
107
|
+
raise ValueError(message)
|
|
108
|
+
else:
|
|
109
|
+
print(message)
|
|
110
|
+
return None
|
|
111
|
+
elif len(picks) > 1:
|
|
112
|
+
# Format pick information for display
|
|
113
|
+
picks_info = [(p.pickable_object_name, p.user_id, p.session_id) for p in picks]
|
|
114
|
+
picks_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in picks_info]
|
|
115
|
+
|
|
116
|
+
print(
|
|
117
|
+
"[Warning] More than 1 pick is available for the query information."
|
|
118
|
+
"\nAvailable picks are:\n " + "\n ".join(picks_details) + f"\nDefaulting to loading:\n {picks[0]}\n",
|
|
119
|
+
)
|
|
120
|
+
points = picks[0].points
|
|
121
|
+
|
|
122
|
+
# Initialize an array to store the coordinates
|
|
123
|
+
nPoints = len(picks[0].points) # Number of points retrieved
|
|
124
|
+
coordinates = np.zeros([len(picks[0].points), 3]) # Create an empty array to hold the (z, y, x) coordinates
|
|
125
|
+
|
|
126
|
+
# Iterate over all points and convert their locations to coordinates in voxel space
|
|
127
|
+
for ii in range(nPoints):
|
|
128
|
+
coordinates[ii,] = [
|
|
129
|
+
points[ii].location.z / voxel_size, # Scale z-coordinate by voxel size
|
|
130
|
+
points[ii].location.y / voxel_size, # Scale y-coordinate by voxel size
|
|
131
|
+
points[ii].location.x / voxel_size,
|
|
132
|
+
] # Scale x-coordinate by voxel size
|
|
133
|
+
|
|
134
|
+
# Return the array of coordinates
|
|
135
|
+
return coordinates
|
|
@@ -1,12 +1,7 @@
|
|
|
1
|
-
from typing import Any, Dict, List
|
|
2
1
|
import numpy as np
|
|
3
2
|
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
input_volume,
|
|
7
|
-
voxel_size=10,
|
|
8
|
-
algorithm="wbp"
|
|
9
|
-
):
|
|
3
|
+
|
|
4
|
+
def tomogram(run, input_volume, voxel_size=10, algorithm="wbp"):
|
|
10
5
|
"""
|
|
11
6
|
Writes a volumetric tomogram into an OME-Zarr format within a Copick directory.
|
|
12
7
|
|
|
@@ -26,17 +21,17 @@ def tomogram(
|
|
|
26
21
|
copick.Tomogram
|
|
27
22
|
The created or modified tomogram object.
|
|
28
23
|
"""
|
|
29
|
-
|
|
24
|
+
|
|
30
25
|
# Retrieve or create voxel spacing
|
|
31
26
|
voxel_spacing = run.get_voxel_spacing(voxel_size)
|
|
32
27
|
if voxel_spacing is None:
|
|
33
28
|
voxel_spacing = run.new_voxel_spacing(voxel_size=voxel_size)
|
|
34
|
-
|
|
29
|
+
|
|
35
30
|
# Check if We Need to Create a New Tomogram for Given Algorithm
|
|
36
31
|
tomogram = voxel_spacing.get_tomogram(algorithm)
|
|
37
32
|
if tomogram is None:
|
|
38
33
|
tomogram = voxel_spacing.new_tomogram(tomo_type=algorithm)
|
|
39
|
-
|
|
34
|
+
|
|
40
35
|
# Write the tomogram data
|
|
41
36
|
tomogram.from_numpy(input_volume)
|
|
42
37
|
|
|
@@ -48,7 +43,7 @@ def segmentation(
|
|
|
48
43
|
name="segmentation",
|
|
49
44
|
session_id="0",
|
|
50
45
|
voxel_size=10,
|
|
51
|
-
multilabel=True
|
|
46
|
+
multilabel=True,
|
|
52
47
|
):
|
|
53
48
|
"""
|
|
54
49
|
Writes a segmentation into an OME-Zarr format within a Copick directory.
|
|
@@ -75,7 +70,7 @@ def segmentation(
|
|
|
75
70
|
copick.Segmentation
|
|
76
71
|
The created or modified segmentation object.
|
|
77
72
|
"""
|
|
78
|
-
|
|
73
|
+
|
|
79
74
|
# Retrieve or create a segmentation
|
|
80
75
|
segmentations = run.get_segmentations(name=name, user_id=user_id, session_id=session_id)
|
|
81
76
|
|
|
@@ -86,11 +81,11 @@ def segmentation(
|
|
|
86
81
|
name=name,
|
|
87
82
|
session_id=session_id,
|
|
88
83
|
is_multilabel=multilabel,
|
|
89
|
-
user_id=user_id
|
|
84
|
+
user_id=user_id,
|
|
90
85
|
)
|
|
91
86
|
else:
|
|
92
87
|
# Overwrite the current segmentation at the specified voxel size if it exists
|
|
93
88
|
segmentation = next(seg for seg in segmentations if seg.voxel_size == voxel_size)
|
|
94
|
-
|
|
89
|
+
|
|
95
90
|
# Write the segmentation data
|
|
96
91
|
segmentation.from_numpy(segmentation_volume, dtype=np.uint8)
|
|
@@ -2,6 +2,7 @@ import numpy as np
|
|
|
2
2
|
import zarr
|
|
3
3
|
from copick.models import CopickPoint
|
|
4
4
|
|
|
5
|
+
|
|
5
6
|
def grid_picker(pickable_obj, run, tomogram, grid_spacing_factor, session_id="0", user_id="gridPicker"):
|
|
6
7
|
"""
|
|
7
8
|
Creates a grid of picks for a pickable object based on a tomogram and grid spacing factor.
|
|
@@ -28,14 +29,14 @@ def grid_picker(pickable_obj, run, tomogram, grid_spacing_factor, session_id="0"
|
|
|
28
29
|
grid_spacing = radius * grid_spacing_factor
|
|
29
30
|
|
|
30
31
|
# Open the highest resolution of the tomogram
|
|
31
|
-
image = zarr.open(tomogram.zarr(), mode=
|
|
32
|
+
image = zarr.open(tomogram.zarr(), mode="r")["0"]
|
|
32
33
|
|
|
33
34
|
# Create a grid of points
|
|
34
35
|
points = []
|
|
35
36
|
for z in np.arange(0, image.shape[0], grid_spacing):
|
|
36
37
|
for y in np.arange(0, image.shape[1], grid_spacing):
|
|
37
38
|
for x in np.arange(0, image.shape[2], grid_spacing):
|
|
38
|
-
points.append(CopickPoint(location={
|
|
39
|
+
points.append(CopickPoint(location={"x": x, "y": y, "z": z}))
|
|
39
40
|
|
|
40
41
|
# Save the picks
|
|
41
42
|
pick_set = run.new_picks(obj_name, session_id, user_id)
|
|
@@ -45,6 +46,7 @@ def grid_picker(pickable_obj, run, tomogram, grid_spacing_factor, session_id="0"
|
|
|
45
46
|
print(f"Saved {len(points)} grid points for object {obj_name}.")
|
|
46
47
|
return pick_set
|
|
47
48
|
|
|
49
|
+
|
|
48
50
|
if __name__ == "__main__":
|
|
49
51
|
import copick
|
|
50
52
|
|
|
@@ -65,5 +67,4 @@ if __name__ == "__main__":
|
|
|
65
67
|
tomogram = voxel_spacing_obj.get_tomogram(tomo_type)
|
|
66
68
|
|
|
67
69
|
for pickable_obj in root.pickable_objects:
|
|
68
|
-
|
|
69
|
-
|
|
70
|
+
grid_picker(pickable_obj, run, tomogram, grid_spacing_factor, session_id, user_id)
|
|
@@ -1,11 +1,22 @@
|
|
|
1
1
|
import numpy as np
|
|
2
2
|
import scipy.ndimage as ndi
|
|
3
|
-
from skimage.segmentation import watershed
|
|
4
3
|
from skimage.measure import regionprops
|
|
5
|
-
from skimage.morphology import
|
|
4
|
+
from skimage.morphology import ball, binary_dilation, binary_erosion
|
|
5
|
+
from skimage.segmentation import watershed
|
|
6
6
|
|
|
7
7
|
|
|
8
|
-
def picks_from_segmentation(
|
|
8
|
+
def picks_from_segmentation(
|
|
9
|
+
segmentation,
|
|
10
|
+
segmentation_idx,
|
|
11
|
+
maxima_filter_size,
|
|
12
|
+
min_particle_size,
|
|
13
|
+
max_particle_size,
|
|
14
|
+
session_id,
|
|
15
|
+
user_id,
|
|
16
|
+
pickable_object,
|
|
17
|
+
run,
|
|
18
|
+
voxel_spacing=1,
|
|
19
|
+
):
|
|
9
20
|
"""
|
|
10
21
|
Process a specific label in the segmentation, extract centroids, and save them as picks.
|
|
11
22
|
|
|
@@ -36,7 +47,10 @@ def picks_from_segmentation(segmentation, segmentation_idx, maxima_filter_size,
|
|
|
36
47
|
|
|
37
48
|
# Distance transform and local maxima detection
|
|
38
49
|
distance = ndi.distance_transform_edt(dilated)
|
|
39
|
-
local_max =
|
|
50
|
+
local_max = distance == ndi.maximum_filter(
|
|
51
|
+
distance,
|
|
52
|
+
footprint=np.ones((maxima_filter_size, maxima_filter_size, maxima_filter_size)),
|
|
53
|
+
)
|
|
40
54
|
|
|
41
55
|
# Watershed segmentation
|
|
42
56
|
markers, _ = ndi.label(local_max)
|
|
@@ -55,7 +69,7 @@ def picks_from_segmentation(segmentation, segmentation_idx, maxima_filter_size,
|
|
|
55
69
|
positions = np.array(all_centroids)[:, [2, 1, 0]] * voxel_spacing
|
|
56
70
|
pick_set.from_numpy(positions=positions)
|
|
57
71
|
pick_set.store()
|
|
58
|
-
|
|
72
|
+
|
|
59
73
|
print(f"Centroids for label {segmentation_idx} saved successfully.")
|
|
60
74
|
return pick_set
|
|
61
75
|
else:
|
|
@@ -1,13 +1,9 @@
|
|
|
1
1
|
import numpy as np
|
|
2
2
|
import zarr
|
|
3
3
|
from scipy.ndimage import zoom
|
|
4
|
-
import copick
|
|
5
4
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
radius: float = 10.0,
|
|
9
|
-
label_value: int = 1,
|
|
10
|
-
voxel_spacing: float = 10):
|
|
5
|
+
|
|
6
|
+
def from_picks(pick, seg_volume, radius: float = 10.0, label_value: int = 1, voxel_spacing: float = 10):
|
|
11
7
|
"""
|
|
12
8
|
Paints picks into a segmentation volume as spheres.
|
|
13
9
|
|
|
@@ -26,12 +22,13 @@ def from_picks(pick,
|
|
|
26
22
|
Returns:
|
|
27
23
|
--------
|
|
28
24
|
numpy.ndarray
|
|
29
|
-
The modified segmentation volume with spheres inserted at pick locations.
|
|
25
|
+
The modified segmentation volume with spheres inserted at pick locations.
|
|
30
26
|
"""
|
|
27
|
+
|
|
31
28
|
def create_sphere(shape, center, radius, val):
|
|
32
29
|
zc, yc, xc = center
|
|
33
30
|
z, y, x = np.indices(shape)
|
|
34
|
-
distance_sq = (x - xc)**2 + (y - yc)**2 + (z - zc)**2
|
|
31
|
+
distance_sq = (x - xc) ** 2 + (y - yc) ** 2 + (z - zc) ** 2
|
|
35
32
|
sphere = np.zeros(shape, dtype=np.float32)
|
|
36
33
|
sphere[distance_sq <= radius**2] = val
|
|
37
34
|
return sphere
|
|
@@ -48,7 +45,11 @@ def from_picks(pick,
|
|
|
48
45
|
# Paint each pick as a sphere
|
|
49
46
|
for point in pick.points:
|
|
50
47
|
# Convert the pick's location from angstroms to voxel units
|
|
51
|
-
cx, cy, cz =
|
|
48
|
+
cx, cy, cz = (
|
|
49
|
+
point.location.x / voxel_spacing,
|
|
50
|
+
point.location.y / voxel_spacing,
|
|
51
|
+
point.location.z / voxel_spacing,
|
|
52
|
+
)
|
|
52
53
|
|
|
53
54
|
# Calculate subarray bounds
|
|
54
55
|
xLow, xHigh = get_relative_target_coordinates(cx, delta, seg_volume.shape[2])
|
|
@@ -65,7 +66,10 @@ def from_picks(pick,
|
|
|
65
66
|
sphere = create_sphere(subarray_shape, local_center, radius_voxel, label_value)
|
|
66
67
|
|
|
67
68
|
# Assign Sphere to Segmentation Target Volume
|
|
68
|
-
seg_volume[zLow:zHigh, yLow:yHigh, xLow:xHigh] = np.maximum(
|
|
69
|
+
seg_volume[zLow:zHigh, yLow:yHigh, xLow:xHigh] = np.maximum(
|
|
70
|
+
seg_volume[zLow:zHigh, yLow:yHigh, xLow:xHigh],
|
|
71
|
+
sphere,
|
|
72
|
+
)
|
|
69
73
|
|
|
70
74
|
return seg_volume
|
|
71
75
|
|
|
@@ -79,7 +83,17 @@ def downsample_to_exact_shape(array, target_shape):
|
|
|
79
83
|
return zoom(array, zoom_factors, order=0)
|
|
80
84
|
|
|
81
85
|
|
|
82
|
-
def segmentation_from_picks(
|
|
86
|
+
def segmentation_from_picks(
|
|
87
|
+
radius,
|
|
88
|
+
painting_segmentation_name,
|
|
89
|
+
run,
|
|
90
|
+
voxel_spacing,
|
|
91
|
+
tomo_type,
|
|
92
|
+
pickable_object,
|
|
93
|
+
pick_set,
|
|
94
|
+
user_id="paintedPicks",
|
|
95
|
+
session_id="0",
|
|
96
|
+
):
|
|
83
97
|
"""
|
|
84
98
|
Paints picks from a run into a multiscale segmentation array, representing them as spheres in 3D space.
|
|
85
99
|
|
|
@@ -115,7 +129,13 @@ def segmentation_from_picks(radius, painting_segmentation_name, run, voxel_spaci
|
|
|
115
129
|
raise ValueError("Tomogram not found for the given parameters.")
|
|
116
130
|
|
|
117
131
|
# Use copick to create a new segmentation if one does not exist
|
|
118
|
-
segs = run.get_segmentations(
|
|
132
|
+
segs = run.get_segmentations(
|
|
133
|
+
user_id=user_id,
|
|
134
|
+
session_id=session_id,
|
|
135
|
+
is_multilabel=True,
|
|
136
|
+
name=painting_segmentation_name,
|
|
137
|
+
voxel_size=voxel_spacing,
|
|
138
|
+
)
|
|
119
139
|
if len(segs) == 0:
|
|
120
140
|
seg = run.new_segmentation(voxel_spacing, painting_segmentation_name, session_id, True, user_id=user_id)
|
|
121
141
|
else:
|
|
@@ -142,7 +162,7 @@ def segmentation_from_picks(radius, painting_segmentation_name, run, voxel_spaci
|
|
|
142
162
|
segmentation_group[highest_res_name][:] = highest_res_seg
|
|
143
163
|
|
|
144
164
|
# Downsample to create lower resolution scales
|
|
145
|
-
multiscale_metadata = tomogram_zarr.attrs.get(
|
|
165
|
+
multiscale_metadata = tomogram_zarr.attrs.get("multiscales", [{}])[0].get("datasets", [])
|
|
146
166
|
for level_index, level_metadata in enumerate(multiscale_metadata):
|
|
147
167
|
if level_index == 0:
|
|
148
168
|
continue
|
|
@@ -154,7 +174,13 @@ def segmentation_from_picks(radius, painting_segmentation_name, run, voxel_spaci
|
|
|
154
174
|
scaled_array = downsample_to_exact_shape(highest_res_seg, expected_shape)
|
|
155
175
|
|
|
156
176
|
# Create/overwrite the Zarr array for this level
|
|
157
|
-
segmentation_group.create_dataset(
|
|
177
|
+
segmentation_group.create_dataset(
|
|
178
|
+
level_name,
|
|
179
|
+
shape=expected_shape,
|
|
180
|
+
data=scaled_array,
|
|
181
|
+
dtype=np.uint16,
|
|
182
|
+
overwrite=True,
|
|
183
|
+
)
|
|
158
184
|
|
|
159
185
|
segmentation_group[level_name][:] = scaled_array
|
|
160
186
|
|
|
@@ -1,27 +1,40 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: copick-utils
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.6.1
|
|
4
4
|
Summary: Utilities for copick
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
5
|
+
Project-URL: Repository, https://github.com/KyleHarrington/copick-utils.git
|
|
6
|
+
Project-URL: Issues, https://github.com/KyleHarrington/copick-utils/issues
|
|
7
|
+
Project-URL: Documentation, https://github.com/KyleHarrington/copick-utils#readme
|
|
8
|
+
Author-email: Kyle Harrington <czi@kyleharrington.com>, Jonathan Schwartz <jonathan.schwartz@czii.org>
|
|
9
|
+
License: MIT License
|
|
10
|
+
|
|
11
|
+
Copyright (c) 2024-present Kyle Harrington <czi@kyleharrington.com>
|
|
12
|
+
|
|
13
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
14
|
+
|
|
15
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
16
|
+
|
|
17
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
18
|
+
License-File: LICENSE
|
|
19
|
+
Keywords: annotation,copick,cryo-et,cryoet,tomography,utilities
|
|
9
20
|
Classifier: Development Status :: 4 - Beta
|
|
10
21
|
Classifier: License :: OSI Approved :: MIT License
|
|
11
22
|
Classifier: Programming Language :: Python
|
|
12
|
-
Classifier: Programming Language :: Python :: 3
|
|
13
23
|
Classifier: Programming Language :: Python :: 3.9
|
|
14
24
|
Classifier: Programming Language :: Python :: 3.10
|
|
15
25
|
Classifier: Programming Language :: Python :: 3.11
|
|
16
26
|
Classifier: Programming Language :: Python :: 3.12
|
|
17
27
|
Classifier: Programming Language :: Python :: 3.13
|
|
18
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
19
28
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
20
29
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
21
|
-
Requires-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
30
|
+
Requires-Python: >=3.9
|
|
31
|
+
Requires-Dist: copick>=0.8.0
|
|
32
|
+
Provides-Extra: dev
|
|
33
|
+
Requires-Dist: black>=25.1.0; extra == 'dev'
|
|
34
|
+
Requires-Dist: hatch-vcs>=0.4.0; extra == 'dev'
|
|
35
|
+
Requires-Dist: hatchling>=1.25.0; extra == 'dev'
|
|
36
|
+
Requires-Dist: pre-commit>=4.2.0; extra == 'dev'
|
|
37
|
+
Requires-Dist: ruff>=0.12.0; extra == 'dev'
|
|
25
38
|
Description-Content-Type: text/markdown
|
|
26
39
|
|
|
27
40
|
# copick-utils
|
|
@@ -70,4 +83,3 @@ This project adheres to the Contributor Covenant [code of conduct](https://githu
|
|
|
70
83
|
## Reporting Security Issues
|
|
71
84
|
|
|
72
85
|
If you believe you have found a security issue, please responsibly disclose by contacting us at [security@chanzuckerberg.com](mailto:security@chanzuckerberg.com).
|
|
73
|
-
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
copick_utils/__init__.py,sha256=FqcMzBIYexR9TCNHvUyjKXblBOdEaa9-pt1fv8TNEcA,135
|
|
2
|
+
copick_utils/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
+
copick_utils/features/skimage.py,sha256=Sz-348tTT44lLS825z14iIOM3L_tALXQctUF1HbnWnw,4209
|
|
4
|
+
copick_utils/io/readers.py,sha256=bE7IBPohNjsFgD6HRPTrWte6OjaJ0NrF4RS8Dwgf3nA,5435
|
|
5
|
+
copick_utils/io/writers.py,sha256=iYyNkpBgrD0_N0N-LoyCOfIrk46WHWocKvkUUQYXMRg,2985
|
|
6
|
+
copick_utils/pickers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
+
copick_utils/pickers/grid_picker.py,sha256=KKfdv3fDmeY7XwqiVADRQJibr1eyjYoG9ZpaihcrgHw,2345
|
|
8
|
+
copick_utils/segmentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
+
copick_utils/segmentation/picks_from_segmentation.py,sha256=Pu3079P4LNJPQAEwsYdrJ3K_X_i1zmo_xxbu16goKAo,3007
|
|
10
|
+
copick_utils/segmentation/segmentation_from_picks.py,sha256=oeP9NdOYcRATbpeKf3SkbdqbRuUEZCOPArl-KBseYc0,6991
|
|
11
|
+
copick_utils-0.6.1.dist-info/METADATA,sha256=GXk4KjbVjumU8HK6nCwUuWjqR9AMrV4Vc_32S2GR5IQ,4246
|
|
12
|
+
copick_utils-0.6.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
13
|
+
copick_utils-0.6.1.dist-info/licenses/LICENSE,sha256=3UHKsYd99Gh_qf1a9s8G5sdKqafgbGs5WIMoeX0OcdY,1105
|
|
14
|
+
copick_utils-0.6.1.dist-info/RECORD,,
|
copick_utils/__about__.py
DELETED
copick_utils/writers/__init__.py
DELETED
|
File without changes
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
copick_utils/__about__.py,sha256=7D13PJEcpdhEa6RrlKLt7IEkoVVVGuzqHJb5MQxgLiI,135
|
|
2
|
-
copick_utils/__init__.py,sha256=v-RIkEuGuAXivakLMrneraDQd7cWN7zsdGLmjwLtDDw,113
|
|
3
|
-
copick_utils/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
-
copick_utils/features/skimage.py,sha256=t38jpu-ntC7Zw--1qSxWhGIg9e02RLhVBZEySIa5dQs,4036
|
|
5
|
-
copick_utils/pickers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
-
copick_utils/pickers/grid_picker.py,sha256=NhFbWxMQREb0fLKTho4602yzH7zE6DCkJY94dgJ-gIQ,2353
|
|
7
|
-
copick_utils/segmentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
-
copick_utils/segmentation/picks_from_segmentation.py,sha256=Ne_RYfnEFaV_qwNw4uOwPfLLgGxHMEQ2uecp9X2_EVc,2951
|
|
9
|
-
copick_utils/segmentation/segmentation_from_picks.py,sha256=3oIlFWRAR904j5IpC5Fo8Y4gP9iL6qUiwybRgJvhzmc,6820
|
|
10
|
-
copick_utils/writers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
-
copick_utils/writers/write.py,sha256=KhdZUUZrZR02bzS5SWM9q-u4y2s1qxcySwN3MN5j5YA,3055
|
|
12
|
-
copick_utils-0.5.0.dist-info/LICENSE.txt,sha256=3UHKsYd99Gh_qf1a9s8G5sdKqafgbGs5WIMoeX0OcdY,1105
|
|
13
|
-
copick_utils-0.5.0.dist-info/METADATA,sha256=zd_eTHyOWidHZvieUbSzRJMTX-Lft3lhZE5uJz3qajM,2792
|
|
14
|
-
copick_utils-0.5.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
15
|
-
copick_utils-0.5.0.dist-info/RECORD,,
|
|
File without changes
|