small-fish-gui 1.9.4__py3-none-any.whl → 1.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
small_fish_gui/README.md CHANGED
@@ -23,7 +23,7 @@ If you don't have a python installation yet I would recommend the [miniconda dis
23
23
  It is higly recommanded to create a specific [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html) or [virtual](https://docs.python.org/3.6/library/venv.html) environnement to install small fish.
24
24
 
25
25
  ```bash
26
- conda create -n small_fish python=3.8
26
+ conda create -n small_fish python=3.9
27
27
  conda activate small_fish
28
28
  ```
29
29
  Then download the small_fish package :
@@ -102,3 +102,4 @@ Optional features to include in future versions :
102
102
  * allows npz files with multiple masks in load segmentation by asking user which one to select
103
103
  * fix parquet format or replace to another compressed format
104
104
  * In Napari viewer, or add an extra spot layer to visualsize spots that are in foci or color spots that are in clusters in specific color.
105
+ * Foci merge tool in Napari
@@ -37,4 +37,8 @@ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37
37
  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
38
38
  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39
39
  """
40
- __version__ = "1.9.4"
40
+ __version__ = "1.10.1"
41
+ __wiki__ = "https://github.com/2Echoes/small_fish_gui/wiki"
42
+
43
+ import os
44
+ os.environ["QT_QPA_PLATFORM"] = "xcb"
@@ -1,18 +1,19 @@
1
- import sys, subprocess, traceback, os, re
1
+ import sys, subprocess, re
2
2
  from small_fish_gui import __version__
3
3
 
4
- def main():
5
- import small_fish_gui.pipeline.main
6
-
7
- def _get_version() :
8
- return __version__
9
-
10
4
  AVAILABLE_ARGUMENTS = {
11
5
  ('-v','--v','--version') : "Prompt the software version.",
12
6
  ('--launch', '-l') : "Launch small fish gui, equivalent to no arguments.",
13
7
  ('-h', '--help', '--h') : "Prompt this help menu."
14
8
  }
15
9
 
10
+
11
+ def main():
12
+ import small_fish_gui.pipeline.main
13
+
14
+ def _get_version() :
15
+ return __version__
16
+
16
17
  def is_last_version() :
17
18
 
18
19
  query = subprocess.run([sys.executable, '-m', 'pip', 'index', 'versions', 'small_fish_gui'], capture_output=True, text=True)
@@ -33,30 +34,19 @@ if __name__ == "__main__":
33
34
  if not is_last_version() :
34
35
  print("A new version of Small Fish is available. To update close small fish and type :\npip install --upgrade small_fish_gui")
35
36
 
36
- try :
37
- arguments = sys.argv
38
-
39
- if len(arguments) > 1 :
40
- if arguments[1] in ['-v','--v','--version'] :
41
- print(_get_version())
42
- quit()
43
- elif arguments[1] in ['--launch', '-l'] :
44
- pass
45
- elif arguments[1] in ['-h', '--help', '--h'] :
46
- for key, help in AVAILABLE_ARGUMENTS.items() :
47
- print(f"{key} : {help}")
48
- quit()
49
- else :
50
- print(f"Incorrect argument : {arguments}, to launch small fish don't pass any argument or pick amongst {AVAILABLE_ARGUMENTS.keys()}")
51
-
52
- sys.exit(main())
53
-
54
- except Exception as error :
55
- with open("error_log.txt",'a') as error_log :
56
- error_log.writelines([
57
- f"version {_get_version()}",
58
- f"error : {error}",
59
- f"traceback :\n{traceback.format_exc()}",
60
- ])
61
-
62
- print(f"error_log saved at {os.getcwd()}/error_log.txt. Please consider reporting this by opening an issue on github.")
37
+ arguments = sys.argv
38
+
39
+ if len(arguments) > 1 :
40
+ if arguments[1] in ['-v','--v','--version'] :
41
+ print(_get_version())
42
+ quit()
43
+ elif arguments[1] in ['--launch', '-l'] :
44
+ pass
45
+ elif arguments[1] in ['-h', '--help', '--h'] :
46
+ for key, help in AVAILABLE_ARGUMENTS.items() :
47
+ print(f"{key} : {help}")
48
+ quit()
49
+ else :
50
+ print(f"Incorrect argument : {arguments}, to launch small fish don't pass any argument or pick amongst {AVAILABLE_ARGUMENTS.keys()}")
51
+
52
+ sys.exit(main())
@@ -212,7 +212,7 @@ def batch_promp(
212
212
  #Hiding options for non batch mode
213
213
  window= window.finalize()
214
214
  napari_correction_elmt.update(disabled=True)
215
- get_elmt_from_key(tab_dict['Input'], key= 'image path').update(disabled=True)
215
+ get_elmt_from_key(tab_dict['Input'], key= 'image_path').update(disabled=True)
216
216
  for key in seg_keys_to_hide : get_elmt_from_key(tab_dict['Segmentation'], key=key).update(disabled=True)
217
217
  for key in detection_keys_to_hide : get_elmt_from_key(tab_dict['Detection'], key=key).update(disabled=True)
218
218
 
@@ -51,8 +51,8 @@ def update_detection_tab(
51
51
  deconvolution_kernel_z = get_elmt_from_key(tab_elmt, key= 'deconvolution_kernel_z')
52
52
 
53
53
  #Clustering
54
- cluster_size = get_elmt_from_key(tab_elmt, key= 'cluster size')
55
- min_number_of_spot = get_elmt_from_key(tab_elmt, key= 'min number of spots')
54
+ cluster_size = get_elmt_from_key(tab_elmt, key= 'cluster_size')
55
+ min_number_of_spot = get_elmt_from_key(tab_elmt, key= 'min_number_of_spots')
56
56
 
57
57
  #segmentation and multichannel
58
58
  nucleus_channel_signal = get_elmt_from_key(tab_elmt, key= 'nucleus channel signal')
@@ -2,7 +2,7 @@ List of keys for batch 'values' dict instance :
2
2
 
3
3
  Batch_folder
4
4
  0
5
- image path
5
+ image_path
6
6
  3D stack
7
7
  multichannel
8
8
  Dense regions deconvolution
@@ -5,7 +5,6 @@ This subpackge contains code related to graphical interface
5
5
  from .prompts import _error_popup
6
6
  from .prompts import _warning_popup
7
7
  from .prompts import prompt
8
- from .prompts import prompt_with_help
9
8
  from .prompts import input_image_prompt
10
9
  from .prompts import hub_prompt
11
10
  from .prompts import detection_parameters_promt
@@ -13,7 +12,6 @@ from .prompts import coloc_prompt
13
12
  from .prompts import output_image_prompt
14
13
  from .prompts import ask_cancel_detection
15
14
  from .prompts import ask_cancel_segmentation
16
- from .prompts import ask_help
17
15
  from .prompts import ask_detection_confirmation
18
16
  from .prompts import prompt_restore_main_menu
19
17
 
@@ -2,17 +2,85 @@
2
2
  Submodule containing custom class for napari widgets
3
3
  """
4
4
  import numpy as np
5
- from napari.layers import Labels
5
+ import pandas as pd
6
+ import bigfish.detection as detection
7
+ from napari.layers import Labels, Points
6
8
  from magicgui import magicgui
7
9
 
8
- class cell_label_eraser :
10
+ from abc import ABC, abstractmethod
11
+ from typing import Tuple
12
+
13
+ class NapariWidget(ABC) :
14
+ """
15
+ Common super class for custom widgets added to napari interface during run
16
+ Each sub class as a specific function, but the widget can be acess with attribute .widget
17
+ """
18
+ def __init__(self):
19
+ self.widget = self._create_widget()
20
+
21
+ @abstractmethod
22
+ def _create_widget(self) :
23
+ """
24
+ This should return a widget you can add to the napari (QWidget)
25
+ """
26
+ pass
27
+
28
+ class ClusterWidget(NapariWidget) :
29
+ """
30
+ Widget for clusters interaction are all init with cluster_layer and single_layer
31
+ """
32
+ def __init__(self, cluster_layer : Points, single_layer : Points):
33
+ self.cluster_layer = cluster_layer
34
+ self.single_layer = single_layer
35
+ super().__init__()
36
+
37
+ class ClusterWizard(ABC) :
38
+ """
39
+ Common super class for all classes that will interact on single layer and cluster layer to synchronise them or modify their display.
40
+ Their action is started through 'start_listening' method.
41
+ To register them in CLUSTER_WIZARD they should only take single_layer and cluster_layer as arguments
42
+ """
43
+
44
+ def __init__(self, single_layer : Points, cluster_layer : Points):
45
+ self.single_layer = single_layer
46
+ self.cluster_layer = cluster_layer
47
+ self.start_listening()
48
+
49
+ def start_listening(self) :
50
+ """
51
+ This activate the class function. Returns None
52
+ """
53
+ pass
54
+
55
+
56
+ CLUSTER_WIZARDS = []
57
+ def register_cluster_wizard(cls):
58
+ """
59
+ Helper to register all clusters related class
60
+ """
61
+ CLUSTER_WIZARDS.append(cls)
62
+ return cls
63
+
64
+ def initialize_all_cluster_wizards(single_layer: Points, cluster_layer: Points):
65
+ """
66
+ Initialize all wizards for cluster interaction in Napari
67
+ """
68
+ return [
69
+ cls(single_layer, cluster_layer)
70
+ for cls in CLUSTER_WIZARDS
71
+ ]
72
+
73
+
74
+ class CellLabelEraser(NapariWidget) :
9
75
  """
10
- Must be instanced within Napari Viewer definition range for update connection to work, cell deletion works fine anyway.
76
+ Widget for deleting cells from multiple label layers in a Napari viewer.
11
77
  """
12
78
  def __init__(self, label_list: 'list[Labels]'):
13
- self.widget = self._create_eraser(label_list)
14
- for label_layer in label_list :
79
+ self.label_list = label_list
80
+ if len(self.label_list) == 0 : raise ValueError("Empty label list")
81
+ for label_layer in self.label_list :
15
82
  label_layer.events.selected_label.connect((self, 'update'))
83
+ super().__init__()
16
84
 
17
85
  def update(self, event) :
18
86
  layer : Labels = event.source
@@ -20,74 +88,414 @@ class cell_label_eraser :
20
88
  self.widget.label_number.value = new_label
21
89
  self.widget.update()
22
90
 
23
- def _create_eraser(self, label_list: 'list[Labels]') :
91
+ def _create_widget(self) :
24
92
  @magicgui(
25
93
  call_button="Delete cell",
26
94
  auto_call=False
27
95
  )
28
96
  def label_eraser(label_number: int) -> None :
29
97
 
30
- for i, label in enumerate(label_list) :
31
- label_list[i].data[label.data == label_number] = 0
98
+ for i, label in enumerate(self.label_list) :
99
+ self.label_list[i].data[label.data == label_number] = 0
32
100
  label.refresh()
33
101
 
34
102
  return label_eraser
35
103
 
36
104
 
37
-
38
- class free_label_picker :
39
- def __init__(self, label_list):
40
- self.widget = self._create_free_label_picker(label_list)
105
+ class FreeLabelPicker(NapariWidget) :
106
+ """
107
+ This widget gives the user a free label number
108
+ """
109
+ def __init__(self, label_list : 'list[Labels]'):
110
+ self.label_list = label_list
111
+ if len(self.label_list) == 0 : raise ValueError("Empty label list")
112
+ super().__init__()
41
113
 
42
- def _create_free_label_picker(self, label_list : 'list[Labels]') :
114
+ def _create_widget(self) :
43
115
  @magicgui(
44
116
  call_button="Pick free label",
45
117
  auto_call=False
46
118
  )
47
119
  def label_pick()->None :
48
- max_list = [label_layer.data.max() for label_layer in label_list]
120
+ max_list = [label_layer.data.max() for label_layer in self.label_list]
49
121
  new_label = max(max_list) + 1
50
- for label_layer in label_list :
122
+ for label_layer in self.label_list :
51
123
  label_layer.selected_label = new_label
52
124
  label_layer.refresh()
53
125
 
54
126
  return label_pick
55
127
 
56
128
 
57
- class segmentation_reseter :
58
- def __init__(self, label_list):
59
- self.save = self._get_save(label_list)
60
- self.widget = self._create_widget(label_list)
129
+ class SegmentationReseter(NapariWidget) :
130
+ """
131
+ This widget reset the segmentation mask as it used to be when iniating the instance
132
+ """
133
+ def __init__(self, label_list: 'list[Labels]'):
134
+ self.label_list = label_list
135
+ if len(self.label_list) == 0 : raise ValueError("Empty label list")
136
+ self.save = self._get_save()
137
+ super().__init__()
61
138
 
62
139
 
63
140
  def _get_save(self, label_list : 'list[Labels]') :
64
141
  return [label.data.copy() for label in label_list]
65
142
 
66
- def _create_widget(self, label_list: 'list[Labels]') :
143
+ def _create_widget(self) :
67
144
  @magicgui(
68
145
  call_button= 'Reset segmentation',
69
146
  auto_call=False,
70
147
  )
71
148
  def reset_segmentation() -> None:
72
- for save_data, layer in zip(self.save, label_list) :
149
+ for save_data, layer in zip(self.save, self.label_list) :
73
150
  layer.data = save_data.copy()
74
151
  layer.refresh()
75
152
 
76
153
  return reset_segmentation
77
154
 
78
- class changes_propagater :
155
+ class ChangesPropagater(NapariWidget) :
156
+ """
157
+ Apply the changes across the vertical direction (Zstack) if confling values are found for a pixel, max label is kept.
158
+ """
79
159
  def __init__(self, label_list):
80
- self.widget = self._create_widget(label_list)
160
+ self.label_list = label_list
161
+ super().__init__()
81
162
 
82
- def _create_widget(self, label_list: 'list[Labels]') :
163
+ def _create_widget(self) :
83
164
  @magicgui(
84
165
  call_button='Apply changes',
85
166
  auto_call=False,
86
167
  )
87
168
  def apply_changes() -> None:
88
- for layer in label_list :
169
+ for layer in self.label_list :
89
170
  slices = layer.data.shape[0]
90
171
  layer_2D = np.max(layer.data, axis=0)
91
172
  layer.data = np.repeat(layer_2D[np.newaxis], slices, axis=0)
92
173
  layer.refresh()
93
174
  return apply_changes
175
+
176
+ class ClusterIDSetter(ClusterWidget) :
177
+ """
178
+ Allow user to set selected single spots to chosen cluster_id
179
+ """
180
+ def __init__(self, single_layer : Points, cluster_layer : Points):
181
+ super().__init__(cluster_layer, single_layer)
182
+
183
+ def _create_widget(self):
184
+
185
+ @magicgui(
186
+ call_button= "Set cluster ID",
187
+ auto_call= False,
188
+ cluster_id= {'min' : -1},
189
+ )
190
+ def set_cluster_id(cluster_id : int) :
191
+ if cluster_id == -1 or cluster_id in self.cluster_layer.features['cluster_id'] :
192
+ spots_selection = list(self.single_layer.selected_data)
193
+ cluster_id_in_selection = list(self.single_layer.features.loc[spots_selection,["cluster_id"]].to_numpy().flatten()) + [cluster_id]
194
+ self.single_layer.features.loc[spots_selection,["cluster_id"]] = cluster_id
195
+
196
+ for cluster_id in np.unique(cluster_id_in_selection): # Then update number of spots in cluster
197
+ if cluster_id == -1 : continue
198
+ new_spot_number = len(self.single_layer.features.loc[self.single_layer.features['cluster_id'] == cluster_id])
199
+ self.cluster_layer.features.loc[self.cluster_layer.features['cluster_id'] == cluster_id, ["spot_number"]] = new_spot_number
200
+ self.cluster_layer.events.features()
201
+
202
+ self.cluster_layer.selected_data.clear()
203
+
204
+ return set_cluster_id
205
+
206
+ class ClusterMerger(ClusterWidget) :
207
+ """
208
+ Merge all selected clusters by replacing cluster ids of all clusters and belonging points with min for cluster id.
209
+ """
210
+ def __init__(self, cluster_layer, single_layer):
211
+ super().__init__(cluster_layer, single_layer)
212
+
213
+
214
+ def _create_widget(self):
215
+
216
+ @magicgui(
217
+ call_button="Merge Clusters",
218
+ auto_call=False
219
+ )
220
+ def merge_cluster()-> None :
221
+ selected_clusters = list(self.cluster_layer.selected_data)
222
+ if len(selected_clusters) == 0 : return None
223
+
224
+ selected_cluster_ids = self.cluster_layer.features.loc[selected_clusters,['cluster_id']].to_numpy().flatten()
225
+ new_cluster_id = selected_cluster_ids.min()
226
+
227
+ #Dropping selected clusters
228
+ self.cluster_layer.data = np.delete(self.cluster_layer.data, selected_clusters, axis=0)
229
+
230
+ #Updating spots
231
+ belonging_spots = self.single_layer.features.loc[self.single_layer.features['cluster_id'].isin(selected_cluster_ids)].index
232
+ self.single_layer.features.loc[belonging_spots, ["cluster_id"]] = new_cluster_id
233
+
234
+ #Creating new cluster
235
+ centroid = list(self.single_layer.data[belonging_spots].mean(axis=0).round().astype(int))
236
+ spot_number = len(belonging_spots)
237
+ self.cluster_layer.data = np.append(
238
+ self.cluster_layer.data,
239
+ [centroid],
240
+ axis=0
241
+ )
242
+
243
+ last_index = len(self.cluster_layer.data) - 1
244
+ self.cluster_layer.features.loc[last_index, ['cluster_id']] = new_cluster_id
245
+ self.cluster_layer.features.loc[last_index, ['spot_number']] = spot_number
246
+
247
+ self.cluster_layer.selected_data.clear()
248
+ self.cluster_layer.refresh()
249
+
250
+ return merge_cluster
251
+
252
+
253
+
254
+
255
+ class ClusterUpdater(NapariWidget) :
256
+ """
257
+ Relaunch clustering algorithm taking into consideration new spots, new clusters and deleted clusters.
258
+ """
259
+ def __init__(
260
+ self,
261
+ single_layer : Points,
262
+ cluster_layer : Points,
263
+ default_cluster_radius : int,
264
+ default_min_spot : int,
265
+ voxel_size : 'tuple[int]'
266
+ ):
267
+ self.single_layer = single_layer
268
+ self.cluster_layer = cluster_layer
269
+ self.cluster_radius = default_cluster_radius
270
+ self.min_spot = default_min_spot
271
+ self.voxel_size = voxel_size
272
+ super().__init__()
273
+
274
+ def _compute_clusters(
275
+ self,
276
+ cluster_radius : int,
277
+ min_spot : int
278
+ ) -> Tuple[np.ndarray, np.ndarray, dict, dict] :
279
+ """
280
+ Compute clusters using bigfish detection.detect_clusters and seperate coordinates from features.
281
+ """
282
+
283
+ clustered_spots, clusters = detection.detect_clusters(
284
+ voxel_size=self.voxel_size,
285
+ spots= self.single_layer.data,
286
+ radius=cluster_radius,
287
+ nb_min_spots= min_spot
288
+ )
289
+
290
+ clusters_coordinates = clusters[:,:-2]
291
+ clusters_features = {
292
+ "spot_number" : clusters[:,-2],
293
+ "cluster_id" : clusters[:,-1],
294
+ }
295
+
296
+ spots_coordinates = clustered_spots[:,:-1]
297
+ spots_features = {
298
+ "cluster_id" : clustered_spots[:,-1]
299
+ }
300
+
301
+ return clusters_coordinates, spots_coordinates, clusters_features, spots_features
302
+
303
+ def _update_layers(
304
+ self,
305
+ clusters_coordinates : np.ndarray,
306
+ spots_coordinates : np.ndarray,
307
+ clusters_features : dict,
308
+ spots_features : dict
309
+ ) -> None :
310
+ """
311
+ Update Points layers inside napari viewer.
312
+ """
313
+
314
+ #Modify layers
315
+ self.single_layer.data = spots_coordinates
316
+ self.cluster_layer.data = clusters_coordinates
317
+ self.single_layer.features.loc[:,["cluster_id"]] = spots_features['cluster_id']
318
+ self.cluster_layer.features.loc[:,["cluster_id"]] = clusters_features['cluster_id']
319
+ self.cluster_layer.features.loc[:,["spot_number"]] = clusters_features['spot_number']
320
+
321
+ self.cluster_layer.selected_data.clear()
322
+ self.single_layer.refresh()
323
+ self.cluster_layer.refresh()
324
+
325
+
326
+
327
+ def _create_widget(self):
328
+
329
+ @magicgui(
330
+ call_button= "Relaunch Clustering",
331
+ auto_call= False
332
+ )
333
+ def relaunch_clustering(
334
+ cluster_radius : int = self.cluster_radius,
335
+ min_spot : int = self.min_spot,
336
+ ) :
337
+ clusters_coordinates, spots_coordinates, clusters_features, spots_features = self._compute_clusters(cluster_radius=cluster_radius, min_spot=min_spot)
338
+ self._update_layers(clusters_coordinates, spots_coordinates, clusters_features, spots_features )
339
+ self.cluster_radius = cluster_radius
340
+ self.min_spot = min_spot
341
+
342
+ return relaunch_clustering
343
+
344
+ class ClusterCreator(ClusterWidget) :
345
+ """
346
+ Create a cluster containing all and only selected spots located at the centroid of selected points.
347
+ """
348
+ def __init__(self, cluster_layer, single_layer):
349
+ super().__init__(cluster_layer, single_layer)
350
+
351
+ def _create_widget(self):
352
+
353
+ @magicgui(
354
+ call_button= "Create Cluster",
355
+ auto_call=False
356
+ )
357
+ def create_foci() -> None :
358
+ selected_spots_idx = pd.Index(list(self.single_layer.selected_data))
359
+ free_spots_idx : pd.Index = self.single_layer.features.loc[self.single_layer.features['cluster_id'] == -1].index
360
+ selected_spots_idx = selected_spots_idx[selected_spots_idx.isin(free_spots_idx)]
361
+
362
+ spot_number = len(selected_spots_idx)
363
+ if spot_number == 0 :
364
+ print("To create a cluster please select at least 1 spot")
365
+ else :
366
+
367
+ #Foci creation
368
+ spots_coordinates = self.single_layer.data[selected_spots_idx]
369
+ new_cluster_id = self.cluster_layer.features['cluster_id'].max() + 1
370
+ centroid = list(spots_coordinates.mean(axis=0).round().astype(int))
371
+
372
+ self.cluster_layer.data = np.concatenate([
373
+ self.cluster_layer.data,
374
+ [centroid]
375
+ ], axis=0)
376
+
377
+ last_index = len(self.cluster_layer.data) - 1
378
+ self.cluster_layer.features.loc[last_index, ['cluster_id']] = new_cluster_id
379
+ self.cluster_layer.features.loc[last_index, ['spot_number']] = spot_number
380
+
381
+ #Update spots cluster_id
382
+ self.single_layer.features.loc[selected_spots_idx,["cluster_id"]] = new_cluster_id
383
+
384
+ return create_foci
385
+
386
+ @register_cluster_wizard
387
+ class ClusterInspector :
388
+ """
389
+ Listen to event on cluster layer to color spots belonging to clusters in green
390
+ """
391
+ def __init__(self, single_layer : Points, cluster_layer : Points):
392
+ self.single_layer = single_layer
393
+ self.cluster_layer = cluster_layer
394
+ self.start_listening()
395
+
396
+ def reset_single_colors(self) -> None:
397
+ self.single_layer.face_color = [0,0,0,0] #transparent
398
+ self.single_layer.refresh()
399
+
400
+ def start_listening(self) :
401
+
402
+ def color_single_molecule_in_foci() -> None:
403
+ self.reset_single_colors()
404
+ selected_cluster_indices = self.cluster_layer.selected_data
405
+ for idx in selected_cluster_indices :
406
+ selected_cluster = self.cluster_layer.features.at[idx,"cluster_id"]
407
+ belonging_single_idex = self.single_layer.features.loc[self.single_layer.features['cluster_id'] == selected_cluster].index.to_numpy()
408
+ self.single_layer.face_color[belonging_single_idex] = [0,1,0,1] #Green
409
+ self.single_layer.refresh()
410
+
411
+ self.cluster_layer.selected_data.events.items_changed.connect(color_single_molecule_in_foci)
412
+
413
+ @register_cluster_wizard
414
+ class ClusterEraser(ClusterWizard) :
415
+ """
416
+ When a foci is deleted, update spots feature table accordingly.
417
+ """
418
+
419
+ def __init__(self, single_layer, cluster_layer):
420
+ super().__init__(single_layer, cluster_layer)
421
+
422
+ def start_listening(self):
423
+ self.original_remove_selected = self.cluster_layer.remove_selected
424
+
425
+ def remove_selected_cluster() :
426
+ selected_cluster = self.cluster_layer.selected_data
427
+ for cluster_idx in selected_cluster : #First we update spots data
428
+ cluster_id = self.cluster_layer.features.at[cluster_idx, "cluster_id"]
429
+ self.single_layer.features.loc[self.single_layer.features['cluster_id'] == cluster_id, ['cluster_id']] = -1
430
+
431
+ self.original_remove_selected() # Then we launch the usual napari method
432
+
433
+ self.cluster_layer.remove_selected = remove_selected_cluster
434
+
435
+ @register_cluster_wizard
436
+ class ClusterAdditionDisabler(ClusterWizard) :
437
+ """
438
+ Remove the action when user uses points addition tool for Foci, forcing him to use the FociCreator tool to add new cluster.
439
+ """
440
+
441
+ def __init__(self, single_layer, cluster_layer):
442
+ super().__init__(single_layer, cluster_layer)
443
+
444
+ def start_listening(self):
445
+
446
+ def print_excuse(*args, **kwargs):
447
+ print("Spot addition is disabled for cluster layer. Use the foci creation tool below after selecting spots you want to cluster")
448
+
449
+ self.cluster_layer.add = print_excuse
450
+
451
+ @register_cluster_wizard
452
+ class SingleEraser(ClusterWizard) :
453
+ """
454
+ When a single is deleted, update clusters feature table accordingly
455
+ """
456
+
457
+ def __init__(self, single_layer, cluster_layer):
458
+ super().__init__(single_layer, cluster_layer)
459
+
460
+ def start_listening(self):
461
+ self._origin_remove_single = self.single_layer.remove_selected
462
+
463
+ def delete_single(*args, **kwargs) :
464
+ selected_single_idx = list(self.single_layer.selected_data)
465
+ modified_cluster_ids = self.single_layer.features.loc[selected_single_idx, ["cluster_id"]].to_numpy().flatten()
466
+
467
+ print(np.unique(modified_cluster_ids, return_counts=True))
468
+ for cluster_id, count in zip(*np.unique(modified_cluster_ids, return_counts=True)): # Then update number of spots in cluster
469
+ if cluster_id == -1 : continue
470
+ new_spot_number = len(self.single_layer.features.loc[self.single_layer.features['cluster_id'] == cluster_id]) - count #minus number of spot with this cluster id we remove
471
+ print("new spot number : ", new_spot_number)
472
+ print('target cluster id : ', cluster_id)
473
+ self.cluster_layer.features.loc[self.cluster_layer.features['cluster_id'] == cluster_id, ["spot_number"]] = new_spot_number
474
+ self._origin_remove_single()
475
+ self.cluster_layer.events.features()
476
+
477
+ self.single_layer.remove_selected = delete_single
478
+
479
+
480
+ @register_cluster_wizard
481
+ class ClusterCleaner(ClusterWizard) :
482
+ """
483
+ Deletes clusters if they drop to 0 single molecules.
484
+ """
485
+
486
+ def __init__(self, single_layer, cluster_layer):
487
+ super().__init__(single_layer, cluster_layer)
488
+
489
+ def start_listening(self):
490
+
491
+ def delete_empty_cluster() :
492
+ drop_idx = self.cluster_layer.features[self.cluster_layer.features['spot_number'] == 0].index
493
+ print("drop_idx : ",drop_idx)
494
+
495
+ if len(drop_idx) > 0 :
496
+ print("Removing {} empty cluster(s)".format(len(drop_idx)))
497
+ self.cluster_layer.data = np.delete(self.cluster_layer.data, drop_idx, axis=0)
498
+ self.cluster_layer.refresh()
499
+
500
+ self.cluster_layer.events.features.connect(delete_empty_cluster)
501
+