small-fish-gui 1.6.0__py3-none-any.whl → 1.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -38,4 +38,4 @@ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38
38
  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39
39
 
40
40
  """
41
- __version__ = "1.6.0"
41
+ __version__ = "1.7.1"
@@ -0,0 +1,93 @@
1
+ """
2
+ Submodule containing custom class for napari widgets
3
+ """
4
+ import numpy as np
5
+ from napari.layers import Labels
6
+ from magicgui import magicgui
7
+
8
+ class cell_label_eraser :
9
+ """
10
+ Must be instanced within Napari Viewer definition range for update connection to work, cell deletion works fine anyway.
11
+ """
12
+ def __init__(self, label_list: 'list[Labels]'):
13
+ self.widget = self._create_eraser(label_list)
14
+ for label_layer in label_list :
15
+ label_layer.events.selected_label.connect((self, 'update'))
16
+
17
+ def update(self, event) :
18
+ layer : Labels = event.source
19
+ new_label = layer.selected_label
20
+ self.widget.label_number.value = new_label
21
+ self.widget.update()
22
+
23
+ def _create_eraser(self, label_list: 'list[Labels]') :
24
+ @magicgui(
25
+ call_button="Delete cell",
26
+ auto_call=False
27
+ )
28
+ def label_eraser(label_number: int) -> None :
29
+
30
+ for i, label in enumerate(label_list) :
31
+ label_list[i].data[label.data == label_number] = 0
32
+ label.refresh()
33
+
34
+ return label_eraser
35
+
36
+
37
+
38
+ class free_label_picker :
39
+ def __init__(self, label_list):
40
+ self.widget = self._create_free_label_picker(label_list)
41
+
42
+ def _create_free_label_picker(self, label_list : 'list[Labels]') :
43
+ @magicgui(
44
+ call_button="Pick free label",
45
+ auto_call=False
46
+ )
47
+ def label_pick()->None :
48
+ max_list = [label_layer.data.max() for label_layer in label_list]
49
+ new_label = max(max_list) + 1
50
+ for label_layer in label_list :
51
+ label_layer.selected_label = new_label
52
+ label_layer.refresh()
53
+
54
+ return label_pick
55
+
56
+
57
+ class segmentation_reseter :
58
+ def __init__(self, label_list):
59
+ self.save = self._get_save(label_list)
60
+ self.widget = self._create_widget(label_list)
61
+
62
+
63
+ def _get_save(self, label_list : 'list[Labels]') :
64
+ return [label.data.copy() for label in label_list]
65
+
66
+ def _create_widget(self, label_list: 'list[Labels]') :
67
+ @magicgui(
68
+ call_button= 'Reset segmentation',
69
+ auto_call=False,
70
+ )
71
+ def reset_segmentation() -> None:
72
+ for save_data, layer in zip(self.save, label_list) :
73
+ layer.data = save_data.copy()
74
+ layer.refresh()
75
+
76
+ return reset_segmentation
77
+
78
+ class changes_propagater :
79
+ def __init__(self, label_list):
80
+ self.widget = self._create_widget(label_list)
81
+
82
+ def _create_widget(self, label_list: 'list[Labels]') :
83
+ @magicgui(
84
+ call_button='Apply changes',
85
+ auto_call=False,
86
+ )
87
+ def apply_changes() -> None:
88
+ for layer in label_list :
89
+ slices = layer.data.shape[0]
90
+ layer_2D = np.max(layer.data, axis=0)
91
+ layer.data = np.repeat(layer_2D[np.newaxis], slices, axis=0)
92
+ layer.refresh()
93
+ return apply_changes
@@ -7,11 +7,19 @@ import napari.types
7
7
  import numpy as np
8
8
  import napari
9
9
 
10
+ from napari.layers import Labels
11
+
12
+ from magicgui import widgets
13
+ from magicgui import magicgui
14
+
10
15
  from bigfish.stack import check_parameter
16
+ from ._napari_widgets import cell_label_eraser, segmentation_reseter, changes_propagater, free_label_picker
11
17
  from ..utils import compute_anisotropy_coef
12
- from ._colocalisation import spots_multicolocalisation
18
+ from ..pipeline._colocalisation import spots_multicolocalisation
19
+
20
+ #Post detection
13
21
 
14
- def _update_clusters(new_clusters: np.ndarray, spots: np.ndarray, voxel_size, cluster_size, min_spot_number, shape) :
22
+ def _update_clusters(new_clusters: np.ndarray, spots: np.ndarray, voxel_size, cluster_size, shape) :
15
23
  if len(new_clusters) == 0 : return new_clusters
16
24
  if len(spots) == 0 : return np.empty(shape=(0,2+len(voxel_size)))
17
25
 
@@ -28,7 +36,7 @@ def _update_clusters(new_clusters: np.ndarray, spots: np.ndarray, voxel_size, cl
28
36
  new_clusters[:,-2] = spots_multicolocalisation(new_clusters[:,:-2], spots, radius_nm= cluster_size, voxel_size=voxel_size, image_shape=shape)
29
37
 
30
38
  # delete too small clusters
31
- new_clusters = np.delete(new_clusters, new_clusters[:,-2] < min_spot_number, 0)
39
+ new_clusters = np.delete(new_clusters, new_clusters[:,-2] == 0, 0)
32
40
 
33
41
  return new_clusters
34
42
 
@@ -96,11 +104,15 @@ def correct_spots(image, spots, voxel_size= (1,1,1), clusters= None, cluster_siz
96
104
 
97
105
  if type(clusters) != type(None) :
98
106
  new_clusters = np.array(Viewer.layers['foci'].data, dtype= int)
99
- new_clusters = _update_clusters(new_clusters, new_spots, voxel_size=voxel_size, cluster_size=cluster_size, min_spot_number=min_spot_number, shape=image.shape)
107
+ new_clusters = _update_clusters(new_clusters, new_spots, voxel_size=voxel_size, cluster_size=cluster_size, shape=image.shape)
100
108
  else : new_clusters = None
101
109
 
102
110
  return new_spots, new_clusters
103
111
 
112
+ # Segmentation
113
+
114
+
115
+
104
116
  def show_segmentation(
105
117
  nuc_image : np.ndarray,
106
118
  nuc_label : np.ndarray,
@@ -131,21 +143,35 @@ def show_segmentation(
131
143
  )
132
144
 
133
145
  #Init Napari viewer
134
- Viewer = napari.Viewer(ndisplay=2, title= 'Show segmentation', axis_labels=['z','y','x'] if dim == 3 else ['y', 'x'], show= False)
146
+ Viewer = napari.Viewer(ndisplay=2, title= 'Show segmentation', axis_labels=['z','y','x'] if dim == 3 else ['y', 'x'])
135
147
 
136
- # Adding channels
148
+ # Adding nuclei
137
149
  nuc_signal_layer = Viewer.add_image(nuc_image, name= "nucleus signal", blending= 'additive', colormap='blue', contrast_limits=[nuc_image.min(), nuc_image.max()])
138
- nuc_label_layer = Viewer.add_labels(nuc_label, opacity= 0.5, blending= 'additive', name= 'nucleus_label',)
150
+ nuc_label_layer = Viewer.add_labels(nuc_label, opacity= 0.6, name= 'nucleus_label',)
139
151
  nuc_label_layer.preserve_labels = True
152
+ labels_layer_list = [nuc_label_layer]
140
153
 
141
- #Adding labels
142
- if type(cyto_image) != type(None) : Viewer.add_image(cyto_image, name= "cytoplasm signal", blending= 'additive', colormap='red', contrast_limits=[cyto_image.min(), cyto_image.max()])
154
+ #Adding cytoplasm
143
155
  if (type(cyto_label) != type(None) and not np.array_equal(cyto_label, nuc_label) ) or (type(cyto_label) != type(None) and cyto_label.max() == 0):
144
- cyto_label_layer = Viewer.add_labels(cyto_label, opacity= 0.4, blending= 'additive', name= 'cytoplasm_label')
156
+ Viewer.add_image(cyto_image, name= "cytoplasm signal", blending= 'additive', colormap='red', contrast_limits=[cyto_image.min(), cyto_image.max()])
157
+ cyto_label_layer = Viewer.add_labels(cyto_label, opacity= 0.6, name= 'cytoplasm_label')
145
158
  cyto_label_layer.preserve_labels = True
146
-
159
+ labels_layer_list += [cyto_label_layer]
160
+
161
+ #Adding widget
162
+ label_eraser = cell_label_eraser(labels_layer_list)
163
+ label_picker = free_label_picker(labels_layer_list)
164
+ label_reseter = segmentation_reseter(labels_layer_list)
165
+ changes_applier = changes_propagater(labels_layer_list)
166
+
167
+ buttons_container = widgets.Container(widgets=[label_picker.widget, changes_applier.widget, label_reseter.widget], labels=False, layout='horizontal')
168
+ tools_container = widgets.Container(
169
+ widgets = [buttons_container, label_eraser.widget],
170
+ labels=False,
171
+ )
172
+ Viewer.window.add_dock_widget(tools_container, name='SmallFish', area='left')
173
+
147
174
  #Launch Napari
148
- Viewer.show(block=False)
149
175
  napari.run()
150
176
 
151
177
  new_nuc_label = Viewer.layers['nucleus_label'].data
@@ -136,8 +136,8 @@ def output_image_prompt(filename) :
136
136
  excel_filename = values['filename'] + ".xlsx"
137
137
  feather_filename = values['filename'] + ".feather"
138
138
 
139
- if not values['Excel'] and not values['Feather'] :
140
- sg.popup("Please check at least one box : Excel/Feather")
139
+ if not values['Excel'] and not values['Feather'] and not values['csv'] :
140
+ sg.popup("Please check at least one box : Excel/Feather/csv")
141
141
  relaunch = True
142
142
  elif not os.path.isdir(values['folder']) :
143
143
  sg.popup("Incorrect folder")
@@ -10,11 +10,9 @@ def _cast_spot_to_tuple(spot) :
10
10
  def _cast_spots_to_tuple(spots) :
11
11
  return tuple(list(map(_cast_spot_to_tuple, spots)))
12
12
 
13
- def write_results(dataframe: pd.DataFrame, path:str, filename:str, do_excel= True, do_feather= False, do_csv=False, overwrite=False) :
13
+ def write_results(dataframe: pd.DataFrame, path:str, filename:str, do_excel= True, do_feather= False, do_csv=False, overwrite=False, reset_index=True) :
14
14
  check_parameter(dataframe= pd.DataFrame, path= str, filename = str, do_excel = bool, do_feather = bool)
15
15
 
16
- dataframe.columns = dataframe.columns.astype(str) # assert columns header are string for feather
17
-
18
16
  if len(dataframe) == 0 : return True
19
17
  if not do_excel and not do_feather and not do_csv :
20
18
  return False
@@ -22,32 +20,38 @@ def write_results(dataframe: pd.DataFrame, path:str, filename:str, do_excel= Tru
22
20
  if not path.endswith('/') : path +='/'
23
21
  assert os.path.isdir(path)
24
22
 
23
+ #Casting cols name to str for feather format
24
+ index_dim = dataframe.columns.nlevels
25
+ if index_dim == 1 :
26
+ dataframe.columns = dataframe.columns.astype(str)
27
+ else :
28
+ casted_cols = [dataframe.columns.get_level_values(level).astype(str) for level in range(index_dim)]
29
+ casted_cols = zip(*casted_cols)
30
+ dataframe.columns = pd.MultiIndex.from_tuples(casted_cols)
25
31
 
26
32
  new_filename = filename
27
33
  i= 1
28
34
 
29
35
  if not overwrite :
30
- while new_filename + '.xlsx' in os.listdir(path) or new_filename + '.feather' in os.listdir(path) or new_filename + '.csv' in os.listdir(path) :
36
+ while new_filename + '.xlsx' in os.listdir(path) or new_filename + '.parquet' in os.listdir(path) or new_filename + '.csv' in os.listdir(path) :
31
37
  new_filename = filename + '_{0}'.format(i)
32
38
  i+=1
33
39
 
34
- if 'image' in dataframe.columns :
35
- dataframe = dataframe.drop(['image'], axis=1)
40
+ COLUMNS_TO_DROP = ['image', 'spots', 'clusters', 'rna_coords', 'cluster_coords']
41
+ for col in COLUMNS_TO_DROP :
42
+ if col in dataframe.columns : dataframe = dataframe.drop(columns=col)
36
43
 
37
- if 'spots' in dataframe.columns :
38
- dataframe = dataframe.drop(['spots'], axis= 1)
39
-
40
- if 'clusters' in dataframe.columns :
41
- dataframe = dataframe.drop(['clusters'], axis= 1)
44
+ if reset_index : dataframe = dataframe.reset_index(drop=True)
42
45
 
43
- if do_feather : dataframe.reset_index(drop=True).to_feather(path + new_filename + '.feather')
44
- if do_csv : dataframe.reset_index(drop=True).to_csv(path + new_filename + '.csv', sep=";")
46
+ if do_csv : dataframe.to_csv(path + new_filename + '.csv', sep=";")
45
47
  if do_excel :
46
48
  if len(dataframe) < MAX_LEN_EXCEL :
47
- dataframe.reset_index(drop=True).to_excel(path + new_filename + '.xlsx')
49
+ dataframe.to_excel(path + new_filename + '.xlsx')
48
50
  else :
49
51
  print("Error : Table too big to be saved in excel format.")
50
52
  return False
51
53
 
54
+ if do_feather :
55
+ dataframe.to_parquet(path + new_filename + '.parquet')
52
56
 
53
- return True
57
+ return True
@@ -79,7 +79,7 @@ def spots_multicolocalisation(spots_list, anchor_list, radius_nm, image_shape, v
79
79
  Example in 2D
80
80
  --------
81
81
 
82
- >>> Anchors Spots Radius (2px) Count
82
+ >>> Anchors spots Radius (2px) Count
83
83
  >>> 0 0 0 0 0 0 0 X 0 0 X 0 1 0 1 0 0 0 0
84
84
  >>> 0 X 0 0 0 0 X 0 0 X 0 0 1 1 1 1 0 0 0 0 0
85
85
  >>> 0 X 0 0 0 0 X X 0 0 0 0 1 1 2 0 0 0 0 --> 5
@@ -116,7 +116,7 @@ def spots_multicolocalisation(spots_list, anchor_list, radius_nm, image_shape, v
116
116
 
117
117
  return res
118
118
 
119
- def spots_colocalisation(image_shape, spot_list1:list, spot_list2:list, distance: float, voxel_size)-> int :
119
+ def spots_colocalisation(spot_list1:list, spot_list2:list, distance: float, voxel_size)-> int :
120
120
  """
121
121
  Return number of spots from spot_list1 located closer(large) than distance to at least one spot of spot_list2.
122
122
 
@@ -134,10 +134,9 @@ def spots_colocalisation(image_shape, spot_list1:list, spot_list2:list, distance
134
134
  if len(spot_list1[0]) != len(spot_list2[0]) :
135
135
  raise MissMatchError("dimensionalities of spots 1 and spots 2 don't match.")
136
136
 
137
- if len(voxel_size) == 3 :
138
- image_shape = image_shape[-3:]
139
- else :
140
- image_shape = image_shape[-2:]
137
+ shape1 = np.max(spot_list1,axis=0)
138
+ shape2 = np.max(spot_list2,axis=0)
139
+ image_shape = np.max([shape1, shape2],axis=0) + 1
141
140
 
142
141
  signal2 = reconstruct_boolean_signal(image_shape, spot_list2)
143
142
  mask = np.logical_not(signal2)
@@ -173,8 +172,7 @@ def initiate_colocalisation(result_tables) :
173
172
  break
174
173
  return colocalisation_distance
175
174
 
176
- @add_default_loading
177
- def launch_colocalisation(result_tables, result_dataframe, colocalisation_distance) :
175
+ def _global_coloc(acquisition_id1,acquisition_id2, result_dataframe, colocalisation_distance) :
178
176
  """
179
177
 
180
178
  Target :
@@ -190,8 +188,10 @@ def launch_colocalisation(result_tables, result_dataframe, colocalisation_distan
190
188
 
191
189
  """
192
190
 
193
- acquisition1 = result_dataframe.iloc[result_tables[0]]
194
- acquisition2 = result_dataframe.iloc[result_tables[1]]
191
+ acquisition1 = result_dataframe.loc[result_dataframe['acquisition_id'] == acquisition_id1]
192
+ acquisition2 = result_dataframe.loc[result_dataframe['acquisition_id'] == acquisition_id2]
193
+
194
+ acquisition_couple = (acquisition_id1,acquisition_id2)
195
195
 
196
196
  voxel_size1 = acquisition1.at['voxel_size']
197
197
  voxel_size2 = acquisition2.at['voxel_size']
@@ -208,7 +208,6 @@ def launch_colocalisation(result_tables, result_dataframe, colocalisation_distan
208
208
  else :
209
209
  shape = shape1
210
210
 
211
- acquisition_couple = (acquisition1.at['acquisition_id'], acquisition2.at['acquisition_id'])
212
211
 
213
212
  spots1 = acquisition1['spots']
214
213
  spots2 = acquisition2['spots']
@@ -217,8 +216,8 @@ def launch_colocalisation(result_tables, result_dataframe, colocalisation_distan
217
216
  spot2_total = len(spots2)
218
217
 
219
218
  try :
220
- fraction_spots1_coloc_spots2 = spots_colocalisation(image_shape=shape, spot_list1=spots1, spot_list2=spots2, distance= colocalisation_distance, voxel_size=voxel_size) / spot1_total
221
- fraction_spots2_coloc_spots1 = spots_colocalisation(image_shape=shape, spot_list1=spots2, spot_list2=spots1, distance= colocalisation_distance, voxel_size=voxel_size) / spot2_total
219
+ fraction_spots1_coloc_spots2 = spots_colocalisation(spot_list1=spots1, spot_list2=spots2, distance= colocalisation_distance, voxel_size=voxel_size) / spot1_total
220
+ fraction_spots2_coloc_spots1 = spots_colocalisation(spot_list1=spots2, spot_list2=spots1, distance= colocalisation_distance, voxel_size=voxel_size) / spot2_total
222
221
  except MissMatchError as e :
223
222
  sg.popup(str(e))
224
223
  fraction_spots1_coloc_spots2 = np.NaN
@@ -227,11 +226,11 @@ def launch_colocalisation(result_tables, result_dataframe, colocalisation_distan
227
226
  if 'clusters' in acquisition1.index :
228
227
  try :
229
228
  clusters1 = acquisition1['clusters'][:,:len(voxel_size)]
230
- fraction_spots2_coloc_cluster1 = spots_colocalisation(image_shape=shape, spot_list1=spots2, spot_list2=clusters1, distance= colocalisation_distance, voxel_size=voxel_size) / spot2_total
229
+ fraction_spots2_coloc_cluster1 = spots_colocalisation(spot_list1=spots2, spot_list2=clusters1, distance= colocalisation_distance, voxel_size=voxel_size) / spot2_total
231
230
  except MissMatchError as e :
232
231
  sg.popup(str(e))
233
232
  fraction_spots2_coloc_cluster1 = np.NaN
234
- except TypeError : # Clusters not computed
233
+ except TypeError : # clusters not computed
235
234
  fraction_spots2_coloc_cluster1 = np.NaN
236
235
 
237
236
 
@@ -240,8 +239,8 @@ def launch_colocalisation(result_tables, result_dataframe, colocalisation_distan
240
239
  if 'clusters' in acquisition2.index :
241
240
  try :
242
241
  clusters2 = acquisition2['clusters'][:,:len(voxel_size)]
243
- fraction_spots1_coloc_cluster2 = spots_colocalisation(image_shape=shape, spot_list1=spots1, spot_list2=clusters2, distance= colocalisation_distance, voxel_size=voxel_size) / spot1_total
244
- except MissMatchError as e :# Clusters not computed
242
+ fraction_spots1_coloc_cluster2 = spots_colocalisation(spot_list1=spots1, spot_list2=clusters2, distance= colocalisation_distance, voxel_size=voxel_size) / spot1_total
243
+ except MissMatchError as e :# clusters not computed
245
244
  sg.popup(str(e))
246
245
  fraction_spots1_coloc_cluster2 = np.NaN
247
246
  except TypeError :
@@ -272,4 +271,159 @@ def launch_colocalisation(result_tables, result_dataframe, colocalisation_distan
272
271
  coloc_df['name2'] = acquisition2.at['name']
273
272
  coloc_df = coloc_df.loc[:,['name1','name2'] + coloc_df_col]
274
273
 
275
- return coloc_df
274
+ return coloc_df
275
+
276
+ def _cell_coloc(
277
+ acquisition_id1: int,
278
+ acquisition_id2: int,
279
+ result_dataframe : pd.DataFrame,
280
+ cell_dataframe : pd.DataFrame,
281
+ colocalisation_distance : float,
282
+ ) :
283
+
284
+ acquisition1 = result_dataframe.loc[result_dataframe['acquisition_id'] == acquisition_id1]
285
+ acquisition2 = result_dataframe.loc[result_dataframe['acquisition_id'] == acquisition_id2]
286
+
287
+ acquisition_name_id1 = acquisition1['name'].iat[0]
288
+ acquisition_name_id2 = acquisition2['name'].iat[0]
289
+ cluster_radius1 = acquisition1['cluster size'].iat[0]
290
+ cluster_radius2 = acquisition2['cluster size'].iat[0]
291
+ result_dataframe = result_dataframe.set_index('acquisition_id', drop=False)
292
+ coloc_name = '{0}nm_{1}{2}_{3}{4}'.format(colocalisation_distance, acquisition_id1,acquisition_name_id1, acquisition_id2,acquisition_name_id2)
293
+
294
+ #Getting shape
295
+ if not result_dataframe.at[acquisition_id1, 'reordered_shape'] == result_dataframe.at[acquisition_id2, 'reordered_shape'] :
296
+ raise ValueError("Selected acquisitions have different shapes. Most likely they don't belong to the same fov.")
297
+
298
+ #Getting voxel_size
299
+ if not result_dataframe.at[acquisition_id1, 'voxel_size'] == result_dataframe.at[acquisition_id2, 'voxel_size'] :
300
+ raise ValueError("Selected acquisitions have different voxel_size. Most likely they don't belong to the same fov.")
301
+ voxel_size = result_dataframe.at[acquisition_id1, 'voxel_size']
302
+
303
+ #Selecting relevant cells in Cell table
304
+ cell_dataframe = cell_dataframe.loc[(cell_dataframe['acquisition_id'] == acquisition_id1)|(cell_dataframe['acquisition_id'] == acquisition_id2)]
305
+
306
+ #Putting spots lists in 2 cols for corresponding cells
307
+ pivot_values_columns = ['rna_coords', 'total_rna_number']
308
+ if 'clusters' in acquisition2.columns or 'clusters' in acquisition1.columns :
309
+ pivot_values_columns.extend(['cluster_coords','foci_number'])
310
+ colocalisation_df = cell_dataframe.pivot(
311
+ columns=['name', 'acquisition_id'],
312
+ values= pivot_values_columns,
313
+ index= 'cell_id'
314
+ )
315
+ #spots _vs spots
316
+ colocalisation_df[("spots_to_spots_count",coloc_name,"forward")] = colocalisation_df['rna_coords'].apply(
317
+ lambda x: spots_colocalisation(
318
+ spot_list1= x[(acquisition_name_id1,acquisition_id1)],
319
+ spot_list2= x[(acquisition_name_id2,acquisition_id2)],
320
+ distance=colocalisation_distance,
321
+ voxel_size=voxel_size
322
+ ),axis=1
323
+ )
324
+ colocalisation_df[("spots_to_spots_fraction",coloc_name,"forward")] = colocalisation_df[("spots_to_spots_count",coloc_name,"forward")].astype(float) / colocalisation_df[('total_rna_number',acquisition_name_id1,acquisition_id1)].astype(float)
325
+
326
+ colocalisation_df[("spots_to_spots_count",coloc_name,"backward")] = colocalisation_df['rna_coords'].apply(
327
+ lambda x: spots_colocalisation(
328
+ spot_list1= x[(acquisition_name_id2,acquisition_id2)],
329
+ spot_list2= x[(acquisition_name_id1,acquisition_id1)],
330
+ distance=colocalisation_distance,
331
+ voxel_size=voxel_size
332
+ ),axis=1
333
+ )
334
+ colocalisation_df[("spots_to_spots_fraction",coloc_name,"backward")] = colocalisation_df[("spots_to_spots_count",coloc_name,"backward")].astype(float) / colocalisation_df[('total_rna_number',acquisition_name_id2,acquisition_id2)].astype(float)
335
+
336
+ if acquisition2['Cluster computation'].iat[0] :
337
+ if len(acquisition2['clusters'].iat[0]) > 0 :
338
+
339
+ #spots to clusters
340
+ colocalisation_df[("spots_to_clusters_count",coloc_name,"forward")] = colocalisation_df.apply(
341
+ lambda x: spots_colocalisation(
342
+ spot_list1= x[('rna_coords',acquisition_name_id1,acquisition_id1)],
343
+ spot_list2= x[('cluster_coords',acquisition_name_id2,acquisition_id2)][:,:len(voxel_size)],
344
+ distance=colocalisation_distance + cluster_radius2,
345
+ voxel_size=voxel_size
346
+ ),axis=1
347
+ )
348
+ colocalisation_df[("spots_to_clusters_fraction",coloc_name,"forward")] = colocalisation_df[("spots_to_clusters_count",coloc_name,"forward")].astype(float) / colocalisation_df[('total_rna_number',acquisition_name_id1,acquisition_id1)].astype(float)
349
+
350
+ if acquisition1['Cluster computation'].iat[0] :
351
+ if len(acquisition1['clusters'].iat[0]) > 0 :
352
+ colocalisation_df[("spots_to_clusters_count",coloc_name,"backward")] = colocalisation_df.apply(
353
+ lambda x: spots_colocalisation(
354
+ spot_list1= x[('rna_coords',acquisition_name_id2,acquisition_id2)],
355
+ spot_list2= x[('cluster_coords',acquisition_name_id1,acquisition_id1)][:,:len(voxel_size)],
356
+ distance=colocalisation_distance + cluster_radius1,
357
+ voxel_size=voxel_size
358
+ ),axis=1
359
+ )
360
+ colocalisation_df[("spots_to_clusters_fraction",coloc_name,"backward")] = colocalisation_df[("spots_to_clusters_count",coloc_name,"backward")].astype(float) / colocalisation_df[('total_rna_number',acquisition_name_id2,acquisition_id2)].astype(float)
361
+
362
+ if acquisition2['Cluster computation'].iat[0] and acquisition1['Cluster computation'].iat[0] :
363
+ if len(acquisition1['clusters'].iat[0]) > 0 and len(acquisition2['clusters'].iat[0]) > 0 :
364
+ #clusters to clusters
365
+ colocalisation_df[("clusters_to_clusters_count",coloc_name,"forward")] = colocalisation_df.apply(
366
+ lambda x: spots_colocalisation(
367
+ spot_list1= x[('cluster_coords',acquisition_name_id1,acquisition_id1)][:,:len(voxel_size)],
368
+ spot_list2= x[('cluster_coords',acquisition_name_id2,acquisition_id2)][:,:len(voxel_size)],
369
+ distance=colocalisation_distance + cluster_radius1 + cluster_radius2,
370
+ voxel_size=voxel_size
371
+ ),axis=1
372
+ )
373
+ colocalisation_df[("clusters_to_clusters_fraction",coloc_name,"forward")] = colocalisation_df[("clusters_to_clusters_count",coloc_name,"forward")].astype(float) / colocalisation_df[('foci_number',acquisition_name_id1,acquisition_id1)].astype(float)
374
+
375
+ colocalisation_df[("clusters_to_clusters_count",coloc_name,"backward")] = colocalisation_df.apply(
376
+ lambda x: spots_colocalisation(
377
+ spot_list1= x[('cluster_coords',acquisition_name_id2,acquisition_id2)][:,:len(voxel_size)],
378
+ spot_list2= x[('cluster_coords',acquisition_name_id1,acquisition_id1)][:,:len(voxel_size)],
379
+ distance=colocalisation_distance + cluster_radius1 + cluster_radius2,
380
+ voxel_size=voxel_size
381
+ ),axis=1
382
+ )
383
+ colocalisation_df[("clusters_to_clusters_fraction",coloc_name,"backward")] = colocalisation_df[("clusters_to_clusters_count",coloc_name,"backward")].astype(float) / colocalisation_df[('foci_number',acquisition_name_id2,acquisition_id2)].astype(float)
384
+
385
+ colocalisation_df = colocalisation_df.sort_index(axis=0).sort_index(axis=1, level=0)
386
+
387
+ if 'cluster_coords' in cell_dataframe.columns : colocalisation_df = colocalisation_df.drop('cluster_coords', axis=1)
388
+ colocalisation_df = colocalisation_df.drop('rna_coords', axis=1)
389
+
390
+ return colocalisation_df
391
+
392
+ @add_default_loading
393
+ def launch_colocalisation(result_tables, result_dataframe, cell_result_dataframe, colocalisation_distance, global_coloc_df, cell_coloc_df: pd.DataFrame) :
394
+
395
+ acquisition1 = result_dataframe.iloc[result_tables[0]]
396
+ acquisition2 = result_dataframe.iloc[result_tables[1]]
397
+
398
+ acquisition_id1, acquisition_id2 = (acquisition1.at['acquisition_id'], acquisition2.at['acquisition_id'])
399
+
400
+ if acquisition_id1 in cell_result_dataframe['acquisition_id'] and acquisition_id2 in cell_result_dataframe['acquisition_id'] :
401
+ print("Launching cell to cell colocalisation.")
402
+ new_coloc = _cell_coloc(
403
+ acquisition_id1 = acquisition_id1,
404
+ acquisition_id2 = acquisition_id2,
405
+ result_dataframe = result_dataframe,
406
+ cell_dataframe=cell_result_dataframe,
407
+ colocalisation_distance=colocalisation_distance
408
+ )
409
+ cell_coloc_df = pd.concat([
410
+ cell_coloc_df,
411
+ new_coloc,
412
+ ], axis=1).sort_index(axis=1, level=0)
413
+
414
+
415
+ else :
416
+ print("Launching global colocalisation.")
417
+ new_coloc = _global_coloc(
418
+ acquisition_id1=acquisition_id1,
419
+ acquisition_id2=acquisition_id2,
420
+ result_dataframe=result_dataframe,
421
+ colocalisation_distance=colocalisation_distance,
422
+ )
423
+ global_coloc_df = pd.concat([
424
+ global_coloc_df,
425
+ new_coloc,
426
+ ], axis=0).reset_index(drop=True)
427
+
428
+
429
+ return global_coloc_df, cell_coloc_df
@@ -7,7 +7,7 @@ from skimage.measure import label
7
7
  from ..gui.layout import _segmentation_layout
8
8
  from ..gui import prompt, prompt_with_help, ask_cancel_segmentation
9
9
  from ..interface import open_image
10
- from ._napari_wrapper import show_segmentation as napari_show_segmentation
10
+ from ..gui.napari import show_segmentation as napari_show_segmentation
11
11
  from .utils import from_label_get_centeroidscoords
12
12
  from matplotlib.colors import ListedColormap
13
13
 
@@ -149,7 +149,7 @@ def launch_segmentation(image: np.ndarray, user_parameters: dict) :
149
149
  relaunch=True
150
150
  values['other_nucleus_image'] = user_parameters.setdefault('other_nucleus_image', None)
151
151
 
152
- elif nucleus_image.shape != image[cytoplasm_channel] :
152
+ elif nucleus_image.shape != image[cytoplasm_channel].shape :
153
153
  sg.popup("Nucleus image shape missmatched. Expected same shape as cytoplasm_image \ncytoplasm shape : {0}, nucleus shape : {1}".format(image[cytoplasm_channel].shape, nucleus_image.shape))
154
154
  nucleus_image = None
155
155
  relaunch=True
@@ -118,7 +118,7 @@ def add_detection(user_parameters, segmentation_done, acquisition_id, cytoplasm_
118
118
  )
119
119
  return new_results_df, new_cell_results_df, acquisition_id, user_parameters, segmentation_done, cytoplasm_label, nucleus_label
120
120
 
121
- def save_results(result_df, cell_result_df, coloc_df) :
121
+ def save_results(result_df, cell_result_df, global_coloc_df, cell_coloc_df) :
122
122
  if len(result_df) != 0 :
123
123
  dic = output_image_prompt(filename=result_df.iloc[0].at['filename'])
124
124
 
@@ -128,34 +128,46 @@ def save_results(result_df, cell_result_df, coloc_df) :
128
128
  do_excel = dic['Excel']
129
129
  do_feather = dic['Feather']
130
130
  do_csv = dic['csv']
131
+
132
+ if 'rna_coords' in cell_result_df.columns : cell_result_df = cell_result_df.drop(columns='rna_coords')
133
+
131
134
  sucess1 = write_results(result_df, path= path, filename=filename, do_excel= do_excel, do_feather= do_feather, do_csv=do_csv)
132
135
  sucess2 = write_results(cell_result_df, path= path, filename=filename + '_cell_result', do_excel= do_excel, do_feather= do_feather, do_csv=do_csv)
133
- sucess3 = write_results(coloc_df, path= path, filename=filename + '_coloc_result', do_excel= do_excel, do_feather= do_feather, do_csv=do_csv)
134
- if sucess1 and sucess2 and sucess3 : sg.popup("Sucessfully saved at {0}.".format(path))
136
+ sucess3 = write_results(global_coloc_df, path= path, filename=filename + 'global_coloc_result', do_excel= do_excel, do_feather= do_feather, do_csv=do_csv)
137
+ sucess4 = write_results(cell_coloc_df, path= path, filename=filename + 'cell2cell_coloc_result', do_excel= do_excel, do_feather= do_feather, do_csv=do_csv, reset_index=False)
138
+ if all([sucess1,sucess2, sucess3, sucess4,]) : sg.popup("Sucessfully saved at {0}.".format(path))
135
139
 
136
140
  else :
137
141
  dic = None
138
142
  sg.popup('No results to save.')
139
143
 
140
- def compute_colocalisation(result_tables, result_dataframe) :
144
+ def compute_colocalisation(result_tables, result_dataframe, cell_result_dataframe, global_coloc_df, cell_coloc_df) :
141
145
  colocalisation_distance = initiate_colocalisation(result_tables)
142
146
 
143
147
  if colocalisation_distance == False :
144
- res_coloc = pd.DataFrame() # popup handled in initiate_colocalisation
148
+ pass
145
149
  else :
146
- res_coloc = launch_colocalisation(result_tables, result_dataframe=result_dataframe, colocalisation_distance=colocalisation_distance)
150
+ global_coloc_df, cell_coloc_df = launch_colocalisation(
151
+ result_tables,
152
+ result_dataframe=result_dataframe,
153
+ cell_result_dataframe=cell_result_dataframe,
154
+ colocalisation_distance=colocalisation_distance,
155
+ global_coloc_df=global_coloc_df,
156
+ cell_coloc_df=cell_coloc_df,
157
+ )
147
158
 
148
- return res_coloc
159
+ return global_coloc_df, cell_coloc_df
149
160
 
150
161
  def delete_acquisitions(selected_acquisitions : pd.DataFrame,
151
162
  result_df : pd.DataFrame,
152
163
  cell_result_df : pd.DataFrame,
153
- coloc_df : pd.DataFrame
164
+ global_coloc_df : pd.DataFrame,
165
+ cell_coloc_df : pd.DataFrame,
154
166
  ) :
155
167
 
156
168
  if len(result_df) == 0 :
157
169
  sg.popup("No acquisition to delete.")
158
- return result_df, cell_result_df, coloc_df
170
+ return result_df, cell_result_df, global_coloc_df
159
171
 
160
172
  if len(selected_acquisitions) == 0 :
161
173
  sg.popup("Please select the acquisitions you would like to delete.")
@@ -169,40 +181,62 @@ def delete_acquisitions(selected_acquisitions : pd.DataFrame,
169
181
  print("{0} cells deleted.".format(len(cell_result_df_drop_idx)))
170
182
  cell_result_df = cell_result_df.drop(cell_result_df_drop_idx, axis=0)
171
183
 
172
- if len(coloc_df) > 0 :
173
- coloc_df_drop_idx = coloc_df[(coloc_df["acquisition_id_1"].isin(acquisition_ids)) | (coloc_df['acquisition_id_2'].isin(acquisition_ids))].index
184
+ if len(global_coloc_df) > 0 :
185
+ coloc_df_drop_idx = global_coloc_df[(global_coloc_df["acquisition_id_1"].isin(acquisition_ids)) | (global_coloc_df['acquisition_id_2'].isin(acquisition_ids))].index
174
186
  print("{0} coloc measurement deleted.".format(len(coloc_df_drop_idx)))
175
- coloc_df = coloc_df.drop(coloc_df_drop_idx, axis=0)
187
+ global_coloc_df = global_coloc_df.drop(coloc_df_drop_idx, axis=0)
188
+
189
+ if len(cell_coloc_df) > 0 :
190
+ for acquisition_id in acquisition_ids :
191
+ cell_coloc_df = cell_coloc_df.drop(acquisition_id, axis=1, level=2) #Delete spot number and foci number
192
+ coloc_columns = cell_coloc_df.columns.get_level_values(1)
193
+ coloc_columns = coloc_columns[coloc_columns.str.contains(str(acquisition_id))]
194
+ cell_coloc_df = cell_coloc_df.drop(labels=coloc_columns, axis=1, level=1)
176
195
 
177
196
  result_df = result_df.drop(result_drop_idx, axis=0)
178
197
 
179
- return result_df, cell_result_df, coloc_df
198
+ return result_df, cell_result_df, global_coloc_df, cell_coloc_df
180
199
 
181
200
  def rename_acquisitions(
182
201
  selected_acquisitions : pd.DataFrame,
183
202
  result_df : pd.DataFrame,
184
203
  cell_result_df : pd.DataFrame,
185
- coloc_df : pd.DataFrame
204
+ global_coloc_df : pd.DataFrame,
205
+ cell_coloc_df : pd.DataFrame,
186
206
  ) :
187
207
 
188
208
  if len(result_df) == 0 :
189
209
  sg.popup("No acquisition to rename.")
190
- return result_df, cell_result_df, coloc_df
210
+ return result_df, cell_result_df, global_coloc_df
191
211
 
192
212
  if len(selected_acquisitions) == 0 :
193
213
  sg.popup("Please select the acquisitions you would like to rename.")
194
214
 
195
215
  else :
196
216
  name = rename_prompt()
197
- print("entered : ",name)
198
- if not name : return result_df, cell_result_df, coloc_df #User didn't put a name or canceled
217
+ if not name : return result_df, cell_result_df, global_coloc_df #User didn't put a name or canceled
199
218
  name : str = name.replace(' ','_')
200
219
  acquisition_ids = list(result_df.iloc[list(selected_acquisitions)]['acquisition_id'])
220
+ old_names = list(result_df.loc[result_df['acquisition_id'].isin(acquisition_ids)]['name'])
221
+ old_names.sort(key=len) #We order this list by elmt length
222
+ old_names.reverse() #From longer to smaller
201
223
 
202
224
  result_df.loc[result_df['acquisition_id'].isin(acquisition_ids),['name']] = name
203
225
  if len(cell_result_df) > 0 : cell_result_df.loc[cell_result_df['acquisition_id'].isin(acquisition_ids),['name']] = name
204
- if len(coloc_df) > 0 :
205
- coloc_df.loc[coloc_df['acquisition_id_1'].isin(acquisition_ids), ['name1']] = name
206
- coloc_df.loc[coloc_df['acquisition_id_2'].isin(acquisition_ids), ['name2']] = name
226
+ if len(global_coloc_df) > 0 :
227
+ global_coloc_df.loc[global_coloc_df['acquisition_id_1'].isin(acquisition_ids), ['name1']] = name
228
+ global_coloc_df.loc[global_coloc_df['acquisition_id_2'].isin(acquisition_ids), ['name2']] = name
229
+ if len(cell_coloc_df) > 0 :
230
+ target_columns = cell_coloc_df.columns.get_level_values(1)
231
+ for old_name in old_names : #Note list was ordered by elmt len (decs) to avoid conflict when one name is contained by another one. if the shorter is processed first then the longer will not be able to be properly renamed.
232
+ target_columns = target_columns.str.replace(old_name, name)
233
+
234
+ new_columns = zip(
235
+ cell_coloc_df.columns.get_level_values(0),
236
+ target_columns,
237
+ cell_coloc_df.columns.get_level_values(2),
238
+ )
239
+
240
+ cell_coloc_df.columns = pd.MultiIndex.from_tuples(new_columns)
207
241
 
208
- return result_df, cell_result_df, coloc_df
242
+ return result_df, cell_result_df, global_coloc_df, cell_coloc_df
@@ -5,7 +5,7 @@ Contains code to handle detection as well as bigfish wrappers related to spot de
5
5
  from ._preprocess import ParameterInputError
6
6
  from ._preprocess import check_integrity, convert_parameters_types
7
7
  from ._signaltonoise import compute_snr_spots
8
- from ._napari_wrapper import correct_spots, _update_clusters, threshold_selection
8
+ from ..gui.napari import correct_spots, _update_clusters, threshold_selection
9
9
  from ..gui import add_default_loading
10
10
  from ..gui import detection_parameters_promt, input_image_prompt
11
11
  from ..utils import compute_anisotropy_coef
@@ -487,6 +487,7 @@ def launch_cell_extraction(acquisition_id, spots, clusters, image, nucleus_signa
487
487
  features_names += ['nucleus_mean_signal', 'nucleus_median_signal', 'nucleus_max_signal', 'nucleus_min_signal']
488
488
  features_names += ['snr_mean', 'snr_median', 'snr_std']
489
489
  features_names += ['cell_center_coord','foci_number','foci_in_nuc_number']
490
+ features_names += ['rna_coords','cluster_coords']
490
491
 
491
492
  result_frame = pd.DataFrame()
492
493
 
@@ -566,6 +567,7 @@ def launch_cell_extraction(acquisition_id, spots, clusters, image, nucleus_signa
566
567
  features += [cell_center, foci_number, foci_in_nuc_number]
567
568
 
568
569
  features = [acquisition_id, cell_id, cell_bbox] + features
570
+ features += [rna_coords, foci_coords]
569
571
 
570
572
  result_frame = pd.concat([
571
573
  result_frame,
@@ -632,7 +634,7 @@ def launch_detection(
632
634
 
633
635
  if do_clustering :
634
636
  clusters = launch_clustering(spots, user_parameters, hide_loading = hide_loading) #012 are coordinates #3 is number of spots per cluster, #4 is cluster index
635
- clusters = _update_clusters(clusters, spots, voxel_size=user_parameters['voxel_size'], cluster_size=user_parameters['cluster size'], min_spot_number= user_parameters['min number of spots'], shape=image.shape)
637
+ clusters = _update_clusters(clusters, spots, voxel_size=user_parameters['voxel_size'], cluster_size=user_parameters['cluster size'], shape=image.shape)
636
638
 
637
639
  else : clusters = None
638
640
 
@@ -702,6 +704,7 @@ def launch_features_computation(acquisition_id, image, nucleus_signal, spots, cl
702
704
  cell_result_dframe['name'] = name
703
705
  frame_results = frame_results.loc[:,['name'] + result_col]
704
706
  cell_result_dframe = cell_result_dframe.loc[:,['name'] + cell_result_col]
707
+ cell_result_dframe['total_rna_number'] = cell_result_dframe['nb_rna_in_nuc'] + cell_result_dframe['nb_rna_out_nuc']
705
708
 
706
709
  return frame_results, cell_result_dframe
707
710
 
@@ -14,15 +14,31 @@ user_parameters = dict() # Very important instance containg all choice from user
14
14
  acquisition_id = -1
15
15
  result_df = pd.DataFrame()
16
16
  cell_result_df = pd.DataFrame()
17
- coloc_df = pd.DataFrame()
17
+ global_coloc_df = pd.DataFrame()
18
+ cell_coloc_df = pd.DataFrame()
18
19
  segmentation_done = False
19
20
  cytoplasm_label = None
20
21
  nucleus_label = None
21
22
 
23
+ #Use for dev purpose
24
+ MAKE_NEW_SAVE = False
25
+ PATH = "/home/floricslimani/Documents/small_fish_workshop/save"
26
+ LOAD_SAVE = False
27
+
22
28
  while True : #Break this loop to close small_fish
23
- result_df = result_df.reset_index(drop=True)
24
- cell_result_df = cell_result_df.reset_index(drop=True)
25
- coloc_df = coloc_df.reset_index(drop=True)
29
+
30
+ if LOAD_SAVE :
31
+ result_df = pd.read_csv(PATH + "/result.csv", sep='|')
32
+ cell_result_df = pd.read_csv(PATH + "/cell_result_df.csv", sep='|')
33
+ global_coloc_df = pd.read_csv(PATH + "/global_coloc_df.csv", sep='|')
34
+ cell_coloc_df = pd.read_csv(PATH + "/cell_coloc_df.csv", sep='|')
35
+
36
+
37
+ else :
38
+ result_df = result_df.reset_index(drop=True)
39
+ cell_result_df = cell_result_df.reset_index(drop=True)
40
+ global_coloc_df = global_coloc_df.reset_index(drop=True)
41
+ cell_coloc_df = cell_coloc_df.reset_index(drop=True)
26
42
  try :
27
43
  event, values = hub_prompt(result_df, segmentation_done)
28
44
 
@@ -43,25 +59,26 @@ while True : #Break this loop to close small_fish
43
59
  save_results(
44
60
  result_df=result_df,
45
61
  cell_result_df=cell_result_df,
46
- coloc_df=coloc_df
62
+ global_coloc_df=global_coloc_df,
63
+ cell_coloc_df = cell_coloc_df,
47
64
  )
48
65
 
49
66
  elif event == 'Compute colocalisation' :
50
67
  result_tables = values.setdefault('result_table', []) #Contains the lines selected by the user on the sum-up array.
51
68
 
52
- res_coloc= compute_colocalisation(
69
+ global_coloc_df, cell_coloc_df = compute_colocalisation(
53
70
  result_tables,
54
- result_dataframe=result_df
71
+ result_dataframe=result_df,
72
+ cell_result_dataframe=cell_result_df,
73
+ global_coloc_df=global_coloc_df,
74
+ cell_coloc_df=cell_coloc_df,
55
75
  )
56
76
 
57
- coloc_df = pd.concat(
58
- [coloc_df,res_coloc],
59
- axis= 0)
60
-
61
77
  elif event == "Reset results" :
62
78
  result_df = pd.DataFrame()
63
79
  cell_result_df = pd.DataFrame()
64
- coloc_df = pd.DataFrame()
80
+ global_coloc_df = pd.DataFrame()
81
+ cell_coloc_df = pd.DataFrame()
65
82
  acquisition_id = -1
66
83
  segmentation_done = False
67
84
  cytoplasm_label = None
@@ -74,7 +91,7 @@ while True : #Break this loop to close small_fish
74
91
 
75
92
  elif event == "Delete acquisitions" :
76
93
  selected_acquisitions = values.setdefault('result_table', []) #Contains the lines selected by the user on the sum-up array.
77
- result_df, cell_result_df, coloc_df = delete_acquisitions(selected_acquisitions, result_df, cell_result_df, coloc_df)
94
+ result_df, cell_result_df, global_coloc_df, cell_coloc_df = delete_acquisitions(selected_acquisitions, result_df, cell_result_df, global_coloc_df, cell_coloc_df)
78
95
 
79
96
  elif event == "Batch detection" :
80
97
  result_df, cell_result_df, acquisition_id, user_parameters, segmentation_done, cytoplasm_label,nucleus_label = batch_promp(
@@ -86,10 +103,16 @@ while True : #Break this loop to close small_fish
86
103
 
87
104
  elif event == "Rename acquisition" :
88
105
  selected_acquisitions = values.setdefault('result_table', []) #Contains the lines selected by the user on the sum-up array.
89
- result_df, cell_result_df, coloc_df = rename_acquisitions(selected_acquisitions, result_df, cell_result_df, coloc_df)
106
+ result_df, cell_result_df, global_coloc_df, cell_coloc_df = rename_acquisitions(selected_acquisitions, result_df, cell_result_df, global_coloc_df, cell_coloc_df)
90
107
 
91
108
  else :
92
109
  break
110
+
111
+ if MAKE_NEW_SAVE :
112
+ result_df.reset_index(drop=True).to_csv(PATH + "/result.csv", sep='|')
113
+ cell_result_df.reset_index(drop=True).to_csv(PATH + "/cell_result_df.csv", sep='|')
114
+ cell_coloc_df.reset_index(drop=True).to_csv(PATH + "/cell_coloc_df.csv", sep='|')
115
+ global_coloc_df.reset_index(drop=True).to_csv(PATH + "/global_coloc_df.csv", sep='|')
93
116
 
94
117
  except Exception as error :
95
118
  sg.popup(str(error))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: small_fish_gui
3
- Version: 1.6.0
3
+ Version: 1.7.1
4
4
  Summary: Small Fish is a python application for the analysis of smFish images. It provides a ready to use graphical interface to combine famous python packages for cell analysis without any need for coding.
5
5
  Project-URL: Homepage, https://github.com/2Echoes/small_fish
6
6
  Project-URL: Issues, https://github.com/2Echoes/small_fish/issues
@@ -2,7 +2,7 @@ small_fish_gui/.readthedocs.yaml,sha256=r2T0e_In8X8l0_ZwgPvuoWQ9c0PE9bSpFzV2W6Ez
2
2
  small_fish_gui/LICENSE,sha256=-iFy8VGBYs5VsHglKpk4D-hxqQ2jMJaqmfq_ulIzDks,1303
3
3
  small_fish_gui/README.md,sha256=4RpEXKZW5vH6sUWeZb88yr1TLLPi20PqOk7KdA9O9Hk,4234
4
4
  small_fish_gui/Segmentation example.jpg,sha256=opfiSbjmfF6z8kBs08sg_FNR2Om0AcMPU5sSwSLHdoQ,215038
5
- small_fish_gui/__init__.py,sha256=5p-W0XXzvadf_q5TZCbYLXaSLIBLOBlhwt7rTtg760w,1941
5
+ small_fish_gui/__init__.py,sha256=a5xjNERakIQns7eLzivIHcpSK_YbhUvicolmTosN_ic,1941
6
6
  small_fish_gui/__main__.py,sha256=jjFNnf-l4jCJI16epq2KOaKmgtUAe9lSNdPj5fpxrDk,1143
7
7
  small_fish_gui/napari_detection_example.png,sha256=l5EZlrbXemLiGqb5inSVsD6Kko1Opz528-go-fBfrw8,977350
8
8
  small_fish_gui/requirements.txt,sha256=9OMfUAnLdHevq6w_fVoDmVmkSMJeFofkOK_86_fu9C0,321
@@ -21,32 +21,33 @@ small_fish_gui/batch/values.py,sha256=C1hRlCpTIDsg89DMKIIW5NUxeK876ODRUuJ2D-mJv6
21
21
  small_fish_gui/batch/values.txt,sha256=PVxzIaaF6DGFRx_CMaStXZI6OrbjNub1-jR3pklXVjc,991
22
22
  small_fish_gui/docs/conf.py,sha256=6YU8UEpTenKGMiz7H4aG42Of72_n4uLadDfHJvziqRk,16
23
23
  small_fish_gui/gui/__init__.py,sha256=xQ_BfYcnQmKZtx_0leO4OmbkLNLv49ZPqEu_UXMgmDc,867
24
+ small_fish_gui/gui/_napari_widgets.py,sha256=8IMppaPZU37ANdZwTZOhwqCEly0hokzYL7UIVIixGns,3022
24
25
  small_fish_gui/gui/animation.py,sha256=rnNP5FPp06Hu-R33c4AVTCknALBbxT2YlsKFCXHAp9k,981
25
26
  small_fish_gui/gui/general_help_screenshot.png,sha256=X4E6Td5f04K-pBUPDaBJRAE3D5b8fuEdiAUKhkIDr-0,54210
26
27
  small_fish_gui/gui/help_module.py,sha256=PmgkkDs7bZ2-po83A_PK9uldQcHjehYmqre21nYb6DQ,9600
27
28
  small_fish_gui/gui/layout.py,sha256=oB8Kg6s0rCA8yB4WM8JQY8BpjoPiBqTGb6YoOKDqEA8,13855
28
29
  small_fish_gui/gui/mapping_help_screenshot.png,sha256=HcuRh5TYciUogUasza5vZ_QSshaiHsskQK23mh9vQS8,34735
29
- small_fish_gui/gui/prompts.py,sha256=rdO2X_whpBgMzCrysFoJEUWfZwbC7mZOp_DYctftNT0,13634
30
+ small_fish_gui/gui/napari.py,sha256=XiahTyq7QEQAuF6EK3-e--3-A8yBPVn0oaVZZyJo0qo,8607
31
+ small_fish_gui/gui/prompts.py,sha256=CONXMmSa0a-l93fyXAPz7h1skql0BEZtLzWJMVepPQ0,13660
30
32
  small_fish_gui/gui/segmentation_help_screenshot.png,sha256=rbSgIydT0gZtfMh1qk4mdMbEIyCaakvHmxa2eOrLwO0,118944
31
33
  small_fish_gui/interface/__init__.py,sha256=PB86R4Y9kV80aGZ-vP0ZW2KeaCwGbBbCtFCmbN2yl28,275
32
34
  small_fish_gui/interface/image.py,sha256=X1L7S5svxUwdoDcI3QM1PbN-c4Nz5w30hixq3IgqSn8,1130
33
- small_fish_gui/interface/output.py,sha256=5jC37tobgXgsiVJYx3RWaES09I-YFmbXKk65lHflTHc,1867
35
+ small_fish_gui/interface/output.py,sha256=eMz9QXBf6LIWuNGvd6Z3Yswuz-Jg9pezUN3OJSb_MIg,2090
34
36
  small_fish_gui/interface/parameters.py,sha256=lUugD-4W2TZyJF3TH1q70TlktEYhhPtcPCrvxm5Dk50,36
35
37
  small_fish_gui/interface/testing.py,sha256=MY5-GcPOUHagcrwR8A7QOjAmjZIDVC8Wz3NibLe3KQw,321
36
38
  small_fish_gui/pipeline/__init__.py,sha256=_Ey20GG8fJtqZvixbXNNYX6wTWMnCUArmARPqsNEhuQ,743
37
- small_fish_gui/pipeline/_colocalisation.py,sha256=pWyObNoACWNW04OYzKf7bgq2OezWpEWh7Vl5DShTI1A,10118
39
+ small_fish_gui/pipeline/_colocalisation.py,sha256=vVHDOvAfqaRFUuX-8HBtDLVrXgoSeUOxa19hmm7lllo,18978
38
40
  small_fish_gui/pipeline/_custom_errors.py,sha256=tQ-AUhgzIFpK30AZiQQrtHCHyGVRDdAoIjzL0Fk-1pA,43
39
- small_fish_gui/pipeline/_napari_wrapper.py,sha256=Yjpo-uXQxLfLESsWr4kIBZgQNVXJtcTFcrsvS9sk4No,7832
40
41
  small_fish_gui/pipeline/_preprocess.py,sha256=ddocTXwc0vYq2VGUbWYaN9eUiHPyfiCuBpYQ2p6rQ8g,13084
41
- small_fish_gui/pipeline/_segmentation.py,sha256=gcanidUOC9nULF6UffWLFmfIup-EOMxeckBz7Xldp3I,18852
42
+ small_fish_gui/pipeline/_segmentation.py,sha256=bB7U_EhebFAssyZcGimnz706aNLbajVMOUj6nbVflwA,18854
42
43
  small_fish_gui/pipeline/_signaltonoise.py,sha256=7A9t7xu7zghI6cr201Ldm-LjJ5NOuP56VSeJ8KIzcUo,8497
43
- small_fish_gui/pipeline/actions.py,sha256=YGW7IFwtRksf0nktl5C2uVLP-Z-7vVznIBxIdJGZJZw,9447
44
- small_fish_gui/pipeline/detection.py,sha256=rnA0qMqr0dNUAUPd1ANYZ7dyfllsDQrihO_gjEpenvA,34817
45
- small_fish_gui/pipeline/main.py,sha256=7iBHO8xNEvdONaUo78jkShBkHFuwgGryMoI4YwP4coI,3817
44
+ small_fish_gui/pipeline/actions.py,sha256=JqcEYtVf3rr-YB_C8SF9U0dpoBktjUhm_Ko0FxZbxy4,11636
45
+ small_fish_gui/pipeline/detection.py,sha256=ORs3OR7MYIz4l1GX3Ayjzpxp2poRnTHhoicJdF7XL_E,34976
46
+ small_fish_gui/pipeline/main.py,sha256=0DrN9dXZJTqLOD0tZaHTVFE1oolzLPU1w5LNgWC3iuU,5072
46
47
  small_fish_gui/pipeline/spots.py,sha256=yHvqf1eD25UltELpzcouYXhLkxiXI_mOL1ANSzXK5pw,1907
47
48
  small_fish_gui/pipeline/test.py,sha256=w4ZMGDmUDXxVgWTlZ2TKw19W8q5gcE9gLMKe0SWnRrw,2827
48
49
  small_fish_gui/pipeline/utils.py,sha256=run6qtqCAe_mFnE3o1CnmF1xBBmK3ydgc8-jOV9P-_w,448
49
- small_fish_gui-1.6.0.dist-info/METADATA,sha256=lNNHwO008d4nmzHaYpeNKi9DFptJ56O0GxfmxWiRxI4,2567
50
- small_fish_gui-1.6.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
51
- small_fish_gui-1.6.0.dist-info/licenses/LICENSE,sha256=-iFy8VGBYs5VsHglKpk4D-hxqQ2jMJaqmfq_ulIzDks,1303
52
- small_fish_gui-1.6.0.dist-info/RECORD,,
50
+ small_fish_gui-1.7.1.dist-info/METADATA,sha256=6RAZQN04nvJvaJkLSx3Hquu7YVuozCna6Qg60fmSb4w,2567
51
+ small_fish_gui-1.7.1.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
52
+ small_fish_gui-1.7.1.dist-info/licenses/LICENSE,sha256=-iFy8VGBYs5VsHglKpk4D-hxqQ2jMJaqmfq_ulIzDks,1303
53
+ small_fish_gui-1.7.1.dist-info/RECORD,,