small-fish-gui 1.8.1__py3-none-any.whl → 1.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- small_fish_gui/__init__.py +1 -1
- small_fish_gui/gui/{napari.py → napari_visualiser.py} +159 -21
- small_fish_gui/gui/prompts.py +1 -1
- small_fish_gui/gui/testing.ipynb +422 -0
- small_fish_gui/interface/inoutput.py +59 -1
- small_fish_gui/pipeline/_colocalisation.py +66 -41
- small_fish_gui/pipeline/actions.py +34 -24
- small_fish_gui/pipeline/detection.py +66 -15
- small_fish_gui/pipeline/main.py +7 -7
- small_fish_gui/pipeline/segmentation.py +1 -1
- small_fish_gui/pipeline/spots.py +7 -0
- small_fish_gui/pipeline/testing.ipynb +2067 -0
- {small_fish_gui-1.8.1.dist-info → small_fish_gui-1.9.0.dist-info}/METADATA +3 -2
- {small_fish_gui-1.8.1.dist-info → small_fish_gui-1.9.0.dist-info}/RECORD +16 -14
- {small_fish_gui-1.8.1.dist-info → small_fish_gui-1.9.0.dist-info}/WHEEL +1 -1
- {small_fish_gui-1.8.1.dist-info → small_fish_gui-1.9.0.dist-info}/licenses/LICENSE +0 -0
small_fish_gui/__init__.py
CHANGED
|
@@ -7,19 +7,121 @@ import napari.types
|
|
|
7
7
|
import numpy as np
|
|
8
8
|
import napari
|
|
9
9
|
|
|
10
|
-
from
|
|
10
|
+
from sklearn.cluster import DBSCAN
|
|
11
|
+
from sklearn.neighbors import NearestNeighbors
|
|
11
12
|
|
|
12
13
|
from magicgui import widgets
|
|
13
|
-
from magicgui import magicgui
|
|
14
14
|
|
|
15
15
|
from bigfish.stack import check_parameter
|
|
16
|
+
from bigfish.detection.cluster_detection import _extract_information
|
|
16
17
|
from ._napari_widgets import cell_label_eraser, segmentation_reseter, changes_propagater, free_label_picker
|
|
17
18
|
from ..utils import compute_anisotropy_coef
|
|
18
19
|
from ..pipeline._colocalisation import spots_multicolocalisation
|
|
19
20
|
|
|
20
21
|
#Post detection
|
|
21
22
|
|
|
22
|
-
def _update_clusters(
|
|
23
|
+
def _update_clusters(
|
|
24
|
+
old_spots : np.ndarray,
|
|
25
|
+
spot_cluster_id : np.ndarray,
|
|
26
|
+
new_spots : np.ndarray,
|
|
27
|
+
old_clusters : np.ndarray,
|
|
28
|
+
new_clusters : np.ndarray,
|
|
29
|
+
cluster_size : int,
|
|
30
|
+
min_number_spot : int,
|
|
31
|
+
voxel_size : tuple,
|
|
32
|
+
null_value = -2,
|
|
33
|
+
talks = False,
|
|
34
|
+
) :
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
new_spots get weight of 1.
|
|
38
|
+
spots already in cluster get weight 1
|
|
39
|
+
spots not in cluster before but now in cluster radius get weigth = min_number_spot/*number of spot in new cluster radius (>=1)*
|
|
40
|
+
spots in radius of deleted cluster get weight = 0 unless they are in radius of a new cluster.
|
|
41
|
+
|
|
42
|
+
Parameters
|
|
43
|
+
----------
|
|
44
|
+
new_spots : array (spots_number, space_dim + 1,) containing coordinates of each spots after napari correction as well as the id of belonging cluster. -1 if free spot, np.NaN if unknown.
|
|
45
|
+
old_clusters : array (spots_number, space_dim + 2,) containing coordinates of each clusters centroid before napari correction, number of spots in cluster and the id of cluster.
|
|
46
|
+
new_clusters : array (spots_number, space_dim + 2,) containing coordinates of each clusters centroid after napari correction, number of spots in cluster and the id of cluster. number of spots is NaN if new cluster.
|
|
47
|
+
cluster_size : size of cluster in nanometer passed to DBSCAN.
|
|
48
|
+
|
|
49
|
+
Returns
|
|
50
|
+
-------
|
|
51
|
+
corrected_spots : array with updated cluster id.
|
|
52
|
+
corrected_clusters : array with updated number of spot.
|
|
53
|
+
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
spots_weights = np.ones(len(new_spots), dtype=float)
|
|
57
|
+
|
|
58
|
+
if talks :
|
|
59
|
+
print("\nTALKS IN napari_visualiser._update_clusters")
|
|
60
|
+
print('new_spots_shape : ', new_spots.shape)
|
|
61
|
+
print('old_clusters : ', old_clusters.shape)
|
|
62
|
+
print('new_clusters : ', new_clusters.shape)
|
|
63
|
+
|
|
64
|
+
#Finding new and deleted clusters
|
|
65
|
+
deleted_cluster = old_clusters[~(np.isin(old_clusters[:,-1], new_clusters[:,-1]))]
|
|
66
|
+
added_cluster = new_clusters[new_clusters[:,-1] == null_value]
|
|
67
|
+
|
|
68
|
+
if talks :
|
|
69
|
+
print('deleted_cluster : ', deleted_cluster.shape)
|
|
70
|
+
print('added_cluster : ', added_cluster.shape)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
#Removing cluster_id from points clustered in deleted clusters
|
|
75
|
+
spots_0_weights = old_spots[np.isin(spot_cluster_id, deleted_cluster[:,-1])]
|
|
76
|
+
spots_weights[np.isin(new_spots, spots_0_weights).all(axis=1)] = 0 #Setting weigth to 0 for spots in deleted clusters.
|
|
77
|
+
|
|
78
|
+
if talks :
|
|
79
|
+
print("deleted cluster ids : ", deleted_cluster[:,-1])
|
|
80
|
+
print("spots in deleted cluster : \n", spots_0_weights)
|
|
81
|
+
|
|
82
|
+
#Finding spots in range of new clusters
|
|
83
|
+
if len(added_cluster) > 0 :
|
|
84
|
+
points_neighbors = NearestNeighbors(radius= cluster_size)
|
|
85
|
+
points_neighbors.fit(new_spots*voxel_size)
|
|
86
|
+
neighbor_query = points_neighbors.radius_neighbors(added_cluster[:,:-2]*voxel_size, return_distance=False)
|
|
87
|
+
|
|
88
|
+
for cluster_neighbor in neighbor_query :
|
|
89
|
+
neighboring_spot_number = len(cluster_neighbor)
|
|
90
|
+
if neighboring_spot_number == 0 : continue # will not add a cluster if there is not even one spot nearby.
|
|
91
|
+
weight = min_number_spot / neighboring_spot_number # >1
|
|
92
|
+
if weight <= 1 : print("napari._update_clusters warning : weight <= 1; this should not happen some clusters might be missed during post napari computation.")
|
|
93
|
+
if any(spots_weights[cluster_neighbor] > weight) : # Not replacing a weight for a smaller weigth to ensure all new clusters will be added.
|
|
94
|
+
mask = spots_weights[cluster_neighbor] > weight
|
|
95
|
+
cluster_neighbor = np.delete(cluster_neighbor, mask)
|
|
96
|
+
if len(cluster_neighbor) > 0 : spots_weights[cluster_neighbor] = weight
|
|
97
|
+
|
|
98
|
+
#Initiating new DBSCAN model
|
|
99
|
+
dbscan_model = DBSCAN(cluster_size, min_samples=min_number_spot)
|
|
100
|
+
dbscan_model.fit(new_spots*voxel_size, sample_weight=spots_weights)
|
|
101
|
+
|
|
102
|
+
#Constructing corrected_arrays
|
|
103
|
+
spots_labels = dbscan_model.labels_.reshape(len(new_spots), 1)
|
|
104
|
+
corrected_spots = np.concatenate([new_spots, spots_labels], axis=1).astype(int)
|
|
105
|
+
corrected_cluster = _extract_information(corrected_spots)
|
|
106
|
+
|
|
107
|
+
if talks :
|
|
108
|
+
print("spots with weigth 0 :", len(spots_weights[spots_weights == 0]))
|
|
109
|
+
print("spots with weigth > 1 :", len(spots_weights[spots_weights > 1]))
|
|
110
|
+
print("spots with weigth == 1 :", len(spots_weights[spots_weights == 1]))
|
|
111
|
+
print("spots with weigth < 1 :", len(spots_weights[np.logical_and(spots_weights < 1,spots_weights > 0)]))
|
|
112
|
+
|
|
113
|
+
print('corrected_spots : ', corrected_spots.shape)
|
|
114
|
+
print('corrected_cluster : ', corrected_cluster.shape)
|
|
115
|
+
print("END TALK\n")
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
return corrected_spots, corrected_cluster
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def __update_clusters(new_clusters: np.ndarray, spots: np.ndarray, voxel_size, cluster_size, shape) :
|
|
122
|
+
"""
|
|
123
|
+
Outdated. previous behaviour.
|
|
124
|
+
"""
|
|
23
125
|
if len(new_clusters) == 0 : return new_clusters
|
|
24
126
|
if len(spots) == 0 : return np.empty(shape=(0,2+len(voxel_size)))
|
|
25
127
|
|
|
@@ -40,7 +142,18 @@ def _update_clusters(new_clusters: np.ndarray, spots: np.ndarray, voxel_size, cl
|
|
|
40
142
|
|
|
41
143
|
return new_clusters
|
|
42
144
|
|
|
43
|
-
def correct_spots(
|
|
145
|
+
def correct_spots(
|
|
146
|
+
image,
|
|
147
|
+
spots,
|
|
148
|
+
voxel_size= (1,1,1),
|
|
149
|
+
clusters= None,
|
|
150
|
+
spot_cluster_id= None,
|
|
151
|
+
cluster_size=None,
|
|
152
|
+
min_spot_number=0,
|
|
153
|
+
cell_label= None,
|
|
154
|
+
nucleus_label= None,
|
|
155
|
+
other_images =[]
|
|
156
|
+
):
|
|
44
157
|
"""
|
|
45
158
|
Open Napari viewer for user to visualize and corrects spots, clusters.
|
|
46
159
|
|
|
@@ -68,9 +181,8 @@ def correct_spots(image, spots, voxel_size= (1,1,1), clusters= None, cluster_siz
|
|
|
68
181
|
Viewer = napari.Viewer(ndisplay=2, title= 'Spot correction', axis_labels=['z','y','x'], show= False)
|
|
69
182
|
Viewer.add_image(image, scale=scale, name= "rna signal", blending= 'additive', colormap='red', contrast_limits=[image.min(), image.max()])
|
|
70
183
|
other_colors = ['green', 'blue', 'gray', 'cyan', 'bop orange', 'bop purple'] * ((len(other_images)-1 // 7) + 1)
|
|
71
|
-
for im, color in zip(other_images, other_colors) :
|
|
184
|
+
for im, color in zip(other_images, other_colors) :
|
|
72
185
|
Viewer.add_image(im, scale=scale, blending='additive', visible=False, colormap=color, contrast_limits=[im.min(), im.max()])
|
|
73
|
-
layer_offset = len(other_images)
|
|
74
186
|
|
|
75
187
|
Viewer.add_points( # single molecule spots; this layer can be update by user.
|
|
76
188
|
spots,
|
|
@@ -82,16 +194,21 @@ def correct_spots(image, spots, voxel_size= (1,1,1), clusters= None, cluster_siz
|
|
|
82
194
|
name= 'single spots'
|
|
83
195
|
)
|
|
84
196
|
|
|
85
|
-
if type(clusters) != type(None) :
|
|
86
|
-
clusters
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
197
|
+
if type(clusters) != type(None) :
|
|
198
|
+
if len(clusters) > 0 :
|
|
199
|
+
clusters_coordinates = clusters[:, :dim]
|
|
200
|
+
else :
|
|
201
|
+
clusters_coordinates = np.empty(shape=(0,3))
|
|
202
|
+
Viewer.add_points( # cluster; this layer can be update by user.
|
|
203
|
+
clusters_coordinates,
|
|
204
|
+
size = 10,
|
|
205
|
+
scale=scale,
|
|
206
|
+
face_color= 'blue',
|
|
207
|
+
opacity= 0.7,
|
|
208
|
+
symbol= 'diamond',
|
|
209
|
+
name= 'foci',
|
|
210
|
+
features= {"spot_number" : clusters[:,dim], "id" : clusters[:,dim+1]},
|
|
211
|
+
feature_defaults= {"spot_number" : 0, "id" : -2} # napari features default will not work with np.NaN passing -2 instead.
|
|
95
212
|
)
|
|
96
213
|
|
|
97
214
|
if type(cell_label) != type(None) and not np.array_equal(nucleus_label, cell_label) : Viewer.add_labels(cell_label, scale=scale, opacity= 0.2, blending= 'additive')
|
|
@@ -103,16 +220,37 @@ def correct_spots(image, spots, voxel_size= (1,1,1), clusters= None, cluster_siz
|
|
|
103
220
|
new_spots = np.array(Viewer.layers['single spots'].data, dtype= int)
|
|
104
221
|
|
|
105
222
|
if type(clusters) != type(None) :
|
|
106
|
-
new_clusters = np.
|
|
107
|
-
|
|
223
|
+
new_clusters = np.round(Viewer.layers['foci'].data).astype(int)
|
|
224
|
+
if len(new_clusters) == 0 :
|
|
225
|
+
new_clusters = np.empty(shape=(0,5))
|
|
226
|
+
new_cluster_id = -1 * np.ones(len(new_spots))
|
|
227
|
+
new_spots = np.concatenate([new_spots, new_cluster_id], axis=1)
|
|
228
|
+
else :
|
|
229
|
+
new_cluster_id = Viewer.layers['foci'].features.to_numpy()
|
|
230
|
+
new_clusters = np.concatenate([new_clusters, new_cluster_id], axis=1)
|
|
231
|
+
|
|
232
|
+
print("After concatenate new clusters shape = {0}".format(new_clusters.shape))
|
|
233
|
+
|
|
234
|
+
new_spots, new_clusters = _update_clusters(
|
|
235
|
+
old_spots =spots,
|
|
236
|
+
spot_cluster_id = spot_cluster_id,
|
|
237
|
+
new_spots=new_spots,
|
|
238
|
+
old_clusters=clusters,
|
|
239
|
+
new_clusters=new_clusters,
|
|
240
|
+
cluster_size=cluster_size,
|
|
241
|
+
min_number_spot=min_spot_number,
|
|
242
|
+
voxel_size=voxel_size,
|
|
243
|
+
null_value= -2
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
print("After _update_cluster\nnew_clusters shape = {0}\nnew_spots shape = {1}".format(new_clusters.shape, new_spots.shape))
|
|
247
|
+
|
|
108
248
|
else : new_clusters = None
|
|
109
249
|
|
|
110
250
|
return new_spots, new_clusters
|
|
111
251
|
|
|
112
|
-
# Segmentation
|
|
113
|
-
|
|
114
|
-
|
|
115
252
|
|
|
253
|
+
# Segmentation
|
|
116
254
|
def show_segmentation(
|
|
117
255
|
nuc_image : np.ndarray,
|
|
118
256
|
nuc_label : np.ndarray,
|
small_fish_gui/gui/prompts.py
CHANGED
|
@@ -272,7 +272,7 @@ def _sumup_df(results: pd.DataFrame) :
|
|
|
272
272
|
|
|
273
273
|
return res
|
|
274
274
|
|
|
275
|
-
def hub_prompt(fov_results, do_segmentation=False) -> 'Union[Literal["Add detection", "Compute colocalisation", "Batch detection", "Rename acquisition", "Save results", "Delete acquisitions", "Reset segmentation", "Reset results", "Segment cells"], dict[Literal["result_table", ""]]]':
|
|
275
|
+
def hub_prompt(fov_results : pd.DataFrame, do_segmentation=False) -> 'Union[Literal["Add detection", "Compute colocalisation", "Batch detection", "Rename acquisition", "Save results", "Delete acquisitions", "Reset segmentation", "Reset results", "Segment cells"], dict[Literal["result_table", ""]]]':
|
|
276
276
|
|
|
277
277
|
sumup_df = _sumup_df(fov_results)
|
|
278
278
|
|