celldetective 1.4.2__py3-none-any.whl → 1.5.0b0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (151) hide show
  1. celldetective/__init__.py +25 -0
  2. celldetective/__main__.py +62 -43
  3. celldetective/_version.py +1 -1
  4. celldetective/extra_properties.py +477 -399
  5. celldetective/filters.py +192 -97
  6. celldetective/gui/InitWindow.py +541 -411
  7. celldetective/gui/__init__.py +0 -15
  8. celldetective/gui/about.py +44 -39
  9. celldetective/gui/analyze_block.py +120 -84
  10. celldetective/gui/base/__init__.py +0 -0
  11. celldetective/gui/base/channel_norm_generator.py +335 -0
  12. celldetective/gui/base/components.py +249 -0
  13. celldetective/gui/base/feature_choice.py +92 -0
  14. celldetective/gui/base/figure_canvas.py +52 -0
  15. celldetective/gui/base/list_widget.py +133 -0
  16. celldetective/gui/{styles.py → base/styles.py} +92 -36
  17. celldetective/gui/base/utils.py +33 -0
  18. celldetective/gui/base_annotator.py +900 -767
  19. celldetective/gui/classifier_widget.py +6 -22
  20. celldetective/gui/configure_new_exp.py +777 -671
  21. celldetective/gui/control_panel.py +635 -524
  22. celldetective/gui/dynamic_progress.py +449 -0
  23. celldetective/gui/event_annotator.py +2023 -1662
  24. celldetective/gui/generic_signal_plot.py +1292 -944
  25. celldetective/gui/gui_utils.py +899 -1289
  26. celldetective/gui/interactions_block.py +658 -0
  27. celldetective/gui/interactive_timeseries_viewer.py +447 -0
  28. celldetective/gui/json_readers.py +48 -15
  29. celldetective/gui/layouts/__init__.py +5 -0
  30. celldetective/gui/layouts/background_model_free_layout.py +537 -0
  31. celldetective/gui/layouts/channel_offset_layout.py +134 -0
  32. celldetective/gui/layouts/local_correction_layout.py +91 -0
  33. celldetective/gui/layouts/model_fit_layout.py +372 -0
  34. celldetective/gui/layouts/operation_layout.py +68 -0
  35. celldetective/gui/layouts/protocol_designer_layout.py +96 -0
  36. celldetective/gui/pair_event_annotator.py +3130 -2435
  37. celldetective/gui/plot_measurements.py +586 -267
  38. celldetective/gui/plot_signals_ui.py +724 -506
  39. celldetective/gui/preprocessing_block.py +395 -0
  40. celldetective/gui/process_block.py +1678 -1831
  41. celldetective/gui/seg_model_loader.py +580 -473
  42. celldetective/gui/settings/__init__.py +0 -7
  43. celldetective/gui/settings/_cellpose_model_params.py +181 -0
  44. celldetective/gui/settings/_event_detection_model_params.py +95 -0
  45. celldetective/gui/settings/_segmentation_model_params.py +159 -0
  46. celldetective/gui/settings/_settings_base.py +77 -65
  47. celldetective/gui/settings/_settings_event_model_training.py +752 -526
  48. celldetective/gui/settings/_settings_measurements.py +1133 -964
  49. celldetective/gui/settings/_settings_neighborhood.py +574 -488
  50. celldetective/gui/settings/_settings_segmentation_model_training.py +779 -564
  51. celldetective/gui/settings/_settings_signal_annotator.py +329 -305
  52. celldetective/gui/settings/_settings_tracking.py +1304 -1094
  53. celldetective/gui/settings/_stardist_model_params.py +98 -0
  54. celldetective/gui/survival_ui.py +422 -312
  55. celldetective/gui/tableUI.py +1665 -1701
  56. celldetective/gui/table_ops/_maths.py +295 -0
  57. celldetective/gui/table_ops/_merge_groups.py +140 -0
  58. celldetective/gui/table_ops/_merge_one_hot.py +95 -0
  59. celldetective/gui/table_ops/_query_table.py +43 -0
  60. celldetective/gui/table_ops/_rename_col.py +44 -0
  61. celldetective/gui/thresholds_gui.py +382 -179
  62. celldetective/gui/viewers/__init__.py +0 -0
  63. celldetective/gui/viewers/base_viewer.py +700 -0
  64. celldetective/gui/viewers/channel_offset_viewer.py +331 -0
  65. celldetective/gui/viewers/contour_viewer.py +394 -0
  66. celldetective/gui/viewers/size_viewer.py +153 -0
  67. celldetective/gui/viewers/spot_detection_viewer.py +341 -0
  68. celldetective/gui/viewers/threshold_viewer.py +309 -0
  69. celldetective/gui/workers.py +304 -126
  70. celldetective/log_manager.py +92 -0
  71. celldetective/measure.py +1895 -1478
  72. celldetective/napari/__init__.py +0 -0
  73. celldetective/napari/utils.py +1025 -0
  74. celldetective/neighborhood.py +1914 -1448
  75. celldetective/preprocessing.py +1620 -1220
  76. celldetective/processes/__init__.py +0 -0
  77. celldetective/processes/background_correction.py +271 -0
  78. celldetective/processes/compute_neighborhood.py +894 -0
  79. celldetective/processes/detect_events.py +246 -0
  80. celldetective/processes/measure_cells.py +565 -0
  81. celldetective/processes/segment_cells.py +760 -0
  82. celldetective/processes/track_cells.py +435 -0
  83. celldetective/processes/train_segmentation_model.py +694 -0
  84. celldetective/processes/train_signal_model.py +265 -0
  85. celldetective/processes/unified_process.py +292 -0
  86. celldetective/regionprops/_regionprops.py +358 -317
  87. celldetective/relative_measurements.py +987 -710
  88. celldetective/scripts/measure_cells.py +313 -212
  89. celldetective/scripts/measure_relative.py +90 -46
  90. celldetective/scripts/segment_cells.py +165 -104
  91. celldetective/scripts/segment_cells_thresholds.py +96 -68
  92. celldetective/scripts/track_cells.py +198 -149
  93. celldetective/scripts/train_segmentation_model.py +324 -201
  94. celldetective/scripts/train_signal_model.py +87 -45
  95. celldetective/segmentation.py +844 -749
  96. celldetective/signals.py +3514 -2861
  97. celldetective/tracking.py +30 -15
  98. celldetective/utils/__init__.py +0 -0
  99. celldetective/utils/cellpose_utils/__init__.py +133 -0
  100. celldetective/utils/color_mappings.py +42 -0
  101. celldetective/utils/data_cleaning.py +630 -0
  102. celldetective/utils/data_loaders.py +450 -0
  103. celldetective/utils/dataset_helpers.py +207 -0
  104. celldetective/utils/downloaders.py +197 -0
  105. celldetective/utils/event_detection/__init__.py +8 -0
  106. celldetective/utils/experiment.py +1782 -0
  107. celldetective/utils/image_augmenters.py +308 -0
  108. celldetective/utils/image_cleaning.py +74 -0
  109. celldetective/utils/image_loaders.py +926 -0
  110. celldetective/utils/image_transforms.py +335 -0
  111. celldetective/utils/io.py +62 -0
  112. celldetective/utils/mask_cleaning.py +348 -0
  113. celldetective/utils/mask_transforms.py +5 -0
  114. celldetective/utils/masks.py +184 -0
  115. celldetective/utils/maths.py +351 -0
  116. celldetective/utils/model_getters.py +325 -0
  117. celldetective/utils/model_loaders.py +296 -0
  118. celldetective/utils/normalization.py +380 -0
  119. celldetective/utils/parsing.py +465 -0
  120. celldetective/utils/plots/__init__.py +0 -0
  121. celldetective/utils/plots/regression.py +53 -0
  122. celldetective/utils/resources.py +34 -0
  123. celldetective/utils/stardist_utils/__init__.py +104 -0
  124. celldetective/utils/stats.py +90 -0
  125. celldetective/utils/types.py +21 -0
  126. {celldetective-1.4.2.dist-info → celldetective-1.5.0b0.dist-info}/METADATA +1 -1
  127. celldetective-1.5.0b0.dist-info/RECORD +187 -0
  128. {celldetective-1.4.2.dist-info → celldetective-1.5.0b0.dist-info}/WHEEL +1 -1
  129. tests/gui/test_new_project.py +129 -117
  130. tests/gui/test_project.py +127 -79
  131. tests/test_filters.py +39 -15
  132. tests/test_notebooks.py +8 -0
  133. tests/test_tracking.py +232 -13
  134. tests/test_utils.py +123 -77
  135. celldetective/gui/base_components.py +0 -23
  136. celldetective/gui/layouts.py +0 -1602
  137. celldetective/gui/processes/compute_neighborhood.py +0 -594
  138. celldetective/gui/processes/measure_cells.py +0 -360
  139. celldetective/gui/processes/segment_cells.py +0 -499
  140. celldetective/gui/processes/track_cells.py +0 -303
  141. celldetective/gui/processes/train_segmentation_model.py +0 -270
  142. celldetective/gui/processes/train_signal_model.py +0 -108
  143. celldetective/gui/table_ops/merge_groups.py +0 -118
  144. celldetective/gui/viewers.py +0 -1354
  145. celldetective/io.py +0 -3663
  146. celldetective/utils.py +0 -3108
  147. celldetective-1.4.2.dist-info/RECORD +0 -123
  148. /celldetective/{gui/processes → processes}/downloader.py +0 -0
  149. {celldetective-1.4.2.dist-info → celldetective-1.5.0b0.dist-info}/entry_points.txt +0 -0
  150. {celldetective-1.4.2.dist-info → celldetective-1.5.0b0.dist-info}/licenses/LICENSE +0 -0
  151. {celldetective-1.4.2.dist-info → celldetective-1.5.0b0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,894 @@
1
+ from multiprocessing import Process
2
+ import time
3
+ import os
4
+
5
+ from celldetective.utils.image_loaders import locate_labels
6
+ from celldetective.utils.data_loaders import get_position_table, get_position_pickle
7
+
8
+ from tqdm import tqdm
9
+ import numpy as np
10
+ import pandas as pd
11
+ from art import tprint
12
+
13
+ from celldetective.neighborhood import (
14
+ _fill_distance_neighborhood_at_t,
15
+ set_live_status,
16
+ compute_attention_weight,
17
+ compute_neighborhood_metrics,
18
+ mean_neighborhood_after_event,
19
+ mean_neighborhood_before_event,
20
+ _compute_mask_contact_dist_map,
21
+ _fill_contact_neighborhood_at_t,
22
+ )
23
+ from celldetective.utils.data_cleaning import extract_identity_col
24
+ from scipy.spatial.distance import cdist
25
+ from celldetective.relative_measurements import measure_pair_signals_at_position
26
+
27
+
28
+ class NeighborhoodProcess(Process):
29
+
30
+ def __init__(self, queue=None, process_args=None):
31
+
32
+ super().__init__()
33
+
34
+ self.queue = queue
35
+
36
+ if process_args is not None:
37
+ for key, value in process_args.items():
38
+ setattr(self, key, value)
39
+
40
+ self.column_labels = {
41
+ "track": "TRACK_ID",
42
+ "time": "FRAME",
43
+ "x": "POSITION_X",
44
+ "y": "POSITION_Y",
45
+ }
46
+
47
+ tprint("Neighborhood")
48
+
49
+ if not hasattr(self, "well_progress"):
50
+ self.well_progress = 0
51
+ if not hasattr(self, "pos_progress"):
52
+ self.pos_progress = 0
53
+ if not hasattr(self, "measure_pairs"):
54
+ self.measure_pairs = False
55
+
56
+ self.sum_done = 0
57
+ self.t0 = time.time()
58
+
59
+ def mask_contact_neighborhood(
60
+ self,
61
+ setA,
62
+ setB,
63
+ labelsA,
64
+ labelsB,
65
+ distance,
66
+ mode="two-pop",
67
+ status=None,
68
+ not_status_option=None,
69
+ compute_cum_sum=True,
70
+ attention_weight=True,
71
+ symmetrize=True,
72
+ include_dead_weight=True,
73
+ column_labels={
74
+ "track": "TRACK_ID",
75
+ "time": "FRAME",
76
+ "x": "POSITION_X",
77
+ "y": "POSITION_Y",
78
+ "mask_id": "class_id",
79
+ },
80
+ ):
81
+
82
+ if setA is not None and setB is not None:
83
+ setA, setB, status = set_live_status(setA, setB, status, not_status_option)
84
+ else:
85
+ return None, None
86
+
87
+ # Check distance option
88
+ if not isinstance(distance, list):
89
+ distance = [distance]
90
+
91
+ cl = []
92
+ for s in [setA, setB]:
93
+
94
+ # Check whether data can be tracked
95
+ temp_column_labels = column_labels.copy()
96
+
97
+ id_col = extract_identity_col(s)
98
+ temp_column_labels.update({"track": id_col})
99
+ if id_col == "ID":
100
+ compute_cum_sum = False
101
+
102
+ cl.append(temp_column_labels)
103
+
104
+ setA = setA.loc[~setA[cl[0]["track"]].isnull(), :].copy()
105
+ setB = setB.loc[~setB[cl[1]["track"]].isnull(), :].copy()
106
+
107
+ if labelsB is None:
108
+ labelsB = [None] * len(labelsA)
109
+
110
+ for d in distance:
111
+ # loop over each provided distance
112
+ if mode == "two-pop":
113
+ neigh_col = f"neighborhood_2_contact_{d}_px"
114
+ elif mode == "self":
115
+ neigh_col = f"neighborhood_self_contact_{d}_px"
116
+ else:
117
+ print("Please provide a valid mode between `two-pop` and `self`...")
118
+ return None
119
+
120
+ setA[neigh_col] = np.nan
121
+ setA[neigh_col] = setA[neigh_col].astype(object)
122
+
123
+ setB[neigh_col] = np.nan
124
+ setB[neigh_col] = setB[neigh_col].astype(object)
125
+
126
+ # Loop over each available timestep
127
+ timeline = np.unique(
128
+ np.concatenate(
129
+ [setA[cl[0]["time"]].to_numpy(), setB[cl[1]["time"]].to_numpy()]
130
+ )
131
+ ).astype(int)
132
+
133
+ self.sum_done = 0
134
+ self.t0 = time.time()
135
+
136
+ for t in tqdm(timeline):
137
+
138
+ setA_t = setA.loc[setA[cl[0]["time"]] == t, :].copy()
139
+ setB_t = setB.loc[setB[cl[1]["time"]] == t, :].copy()
140
+
141
+ if len(setA_t) > 0 and len(setB_t) > 0:
142
+ dist_map, intersection_map = _compute_mask_contact_dist_map(
143
+ setA_t,
144
+ setB_t,
145
+ labelsA[t],
146
+ labelsB[t],
147
+ distance=d,
148
+ mode=mode,
149
+ column_labelsA=cl[0],
150
+ column_labelsB=cl[1],
151
+ )
152
+
153
+ d_filter = 1.0e05
154
+ if attention_weight:
155
+ status_A = setA_t[status[0]].to_numpy()
156
+ ids_A = setA_t[cl[0]["track"]].to_numpy()
157
+ weights, closest_A = compute_attention_weight(
158
+ dist_map,
159
+ d_filter,
160
+ status_A,
161
+ ids_A,
162
+ axis=1,
163
+ include_dead_weight=include_dead_weight,
164
+ )
165
+ else:
166
+ weights = None
167
+ closest_A = None
168
+
169
+ _fill_contact_neighborhood_at_t(
170
+ t,
171
+ setA,
172
+ setB,
173
+ dist_map,
174
+ intersection_map=intersection_map,
175
+ attention_weight=attention_weight,
176
+ include_dead_weight=include_dead_weight,
177
+ symmetrize=symmetrize,
178
+ compute_cum_sum=compute_cum_sum,
179
+ weights=weights,
180
+ closest_A=closest_A,
181
+ neigh_col=neigh_col,
182
+ column_labelsA=cl[0],
183
+ column_labelsB=cl[1],
184
+ statusA=status[0],
185
+ statusB=status[1],
186
+ d_filter=d_filter,
187
+ )
188
+
189
+ self.sum_done += 1 / len(timeline) * 100
190
+ mean_exec_per_step = (time.time() - self.t0) / (
191
+ self.sum_done * len(timeline) / 100 + 1
192
+ )
193
+ pred_time = (
194
+ len(timeline) - (self.sum_done * len(timeline) / 100 + 1)
195
+ ) * mean_exec_per_step
196
+ self.queue.put(
197
+ {
198
+ "frame_progress": self.sum_done,
199
+ "frame_time": f"Time left: {round(pred_time, 1)}s",
200
+ "well_progress": self.well_progress,
201
+ "pos_progress": self.pos_progress,
202
+ }
203
+ )
204
+
205
+ return setA, setB
206
+
207
+ def distance_cut_neighborhood(
208
+ self,
209
+ setA,
210
+ setB,
211
+ distance,
212
+ mode="two-pop",
213
+ status=None,
214
+ not_status_option=None,
215
+ compute_cum_sum=True,
216
+ attention_weight=True,
217
+ symmetrize=True,
218
+ include_dead_weight=True,
219
+ column_labels={
220
+ "track": "TRACK_ID",
221
+ "time": "FRAME",
222
+ "x": "POSITION_X",
223
+ "y": "POSITION_Y",
224
+ },
225
+ ):
226
+ # Check live_status option
227
+ if setA is not None and setB is not None:
228
+ setA, setB, status = set_live_status(setA, setB, status, not_status_option)
229
+ else:
230
+ return None, None
231
+
232
+ # Check distance option
233
+ if not isinstance(distance, list):
234
+ distance = [distance]
235
+
236
+ for d in distance:
237
+ # loop over each provided distance
238
+
239
+ if mode == "two-pop":
240
+ neigh_col = f"neighborhood_2_circle_{d}_px"
241
+ elif mode == "self":
242
+ neigh_col = f"neighborhood_self_circle_{d}_px"
243
+
244
+ cl = []
245
+ for s in [setA, setB]:
246
+
247
+ # Check whether data can be tracked
248
+ temp_column_labels = column_labels.copy()
249
+
250
+ id_col = extract_identity_col(s)
251
+ temp_column_labels.update({"track": id_col})
252
+ if id_col == "ID":
253
+ compute_cum_sum = (
254
+ False # if no tracking data then cum_sum is not relevant
255
+ )
256
+ cl.append(temp_column_labels)
257
+
258
+ # Remove nan tracks (cells that do not belong to a track)
259
+ s[neigh_col] = np.nan
260
+ s[neigh_col] = s[neigh_col].astype(object)
261
+ s.dropna(subset=[cl[-1]["track"]], inplace=True)
262
+
263
+ # Loop over each available timestep
264
+ timeline = np.unique(
265
+ np.concatenate(
266
+ [setA[cl[0]["time"]].to_numpy(), setB[cl[1]["time"]].to_numpy()]
267
+ )
268
+ ).astype(int)
269
+
270
+ self.sum_done = 0
271
+ self.t0 = time.time()
272
+
273
+ for t in tqdm(timeline):
274
+
275
+ coordinates_A = setA.loc[
276
+ setA[cl[0]["time"]] == t, [cl[0]["x"], cl[0]["y"]]
277
+ ].to_numpy()
278
+ ids_A = setA.loc[setA[cl[0]["time"]] == t, cl[0]["track"]].to_numpy()
279
+ status_A = setA.loc[setA[cl[0]["time"]] == t, status[0]].to_numpy()
280
+
281
+ coordinates_B = setB.loc[
282
+ setB[cl[1]["time"]] == t, [cl[1]["x"], cl[1]["y"]]
283
+ ].to_numpy()
284
+ ids_B = setB.loc[setB[cl[1]["time"]] == t, cl[1]["track"]].to_numpy()
285
+
286
+ if len(ids_A) > 0 and len(ids_B) > 0:
287
+
288
+ # compute distance matrix
289
+ dist_map = cdist(coordinates_A, coordinates_B, metric="euclidean")
290
+
291
+ if attention_weight:
292
+ weights, closest_A = compute_attention_weight(
293
+ dist_map,
294
+ d,
295
+ status_A,
296
+ ids_A,
297
+ axis=1,
298
+ include_dead_weight=include_dead_weight,
299
+ )
300
+
301
+ _fill_distance_neighborhood_at_t(
302
+ t,
303
+ setA,
304
+ setB,
305
+ dist_map,
306
+ attention_weight=attention_weight,
307
+ include_dead_weight=include_dead_weight,
308
+ symmetrize=symmetrize,
309
+ compute_cum_sum=compute_cum_sum,
310
+ weights=weights,
311
+ closest_A=closest_A,
312
+ neigh_col=neigh_col,
313
+ column_labelsA=cl[0],
314
+ column_labelsB=cl[1],
315
+ statusA=status[0],
316
+ statusB=status[1],
317
+ distance=d,
318
+ )
319
+
320
+ self.sum_done += 1 / len(timeline) * 100
321
+ mean_exec_per_step = (time.time() - self.t0) / (
322
+ self.sum_done * len(timeline) / 100 + 1
323
+ )
324
+ pred_time = (
325
+ len(timeline) - (self.sum_done * len(timeline) / 100 + 1)
326
+ ) * mean_exec_per_step
327
+ self.queue.put(
328
+ {
329
+ "frame_progress": self.sum_done,
330
+ "frame_time": f"Time left: {round(pred_time, 1)}s",
331
+ "well_progress": self.well_progress,
332
+ "pos_progress": self.pos_progress,
333
+ }
334
+ )
335
+
336
+ return setA, setB
337
+
338
+ def compute_neighborhood_at_position(
339
+ self,
340
+ pos,
341
+ distance,
342
+ population=["targets", "effectors"],
343
+ theta_dist=None,
344
+ img_shape=(2048, 2048),
345
+ return_tables=False,
346
+ clear_neigh=False,
347
+ event_time_col=None,
348
+ neighborhood_kwargs={
349
+ "mode": "two-pop",
350
+ "status": None,
351
+ "not_status_option": None,
352
+ "include_dead_weight": True,
353
+ "compute_cum_sum": False,
354
+ "attention_weight": True,
355
+ "symmetrize": True,
356
+ },
357
+ ):
358
+
359
+ pos = pos.replace("\\", "/")
360
+ pos = rf"{pos}"
361
+ assert os.path.exists(pos), f"Position {pos} is not a valid path."
362
+
363
+ if isinstance(population, str):
364
+ population = [population, population]
365
+
366
+ if not isinstance(distance, list):
367
+ distance = [distance]
368
+ if not theta_dist is None and not isinstance(theta_dist, list):
369
+ theta_dist = [theta_dist]
370
+
371
+ if theta_dist is None:
372
+ theta_dist = [0.9 * d for d in distance]
373
+ assert len(theta_dist) == len(
374
+ distance
375
+ ), "Incompatible number of distances and number of edge thresholds."
376
+
377
+ if population[0] == population[1]:
378
+ neighborhood_kwargs.update({"mode": "self"})
379
+ if population[1] != population[0]:
380
+ neighborhood_kwargs.update({"mode": "two-pop"})
381
+
382
+ df_A, path_A = get_position_table(
383
+ pos, population=population[0], return_path=True
384
+ )
385
+ df_B, path_B = get_position_table(
386
+ pos, population=population[1], return_path=True
387
+ )
388
+ if df_A is None or df_B is None:
389
+ return None
390
+
391
+ if clear_neigh:
392
+ if os.path.exists(path_A.replace(".csv", ".pkl")):
393
+ os.remove(path_A.replace(".csv", ".pkl"))
394
+ if os.path.exists(path_B.replace(".csv", ".pkl")):
395
+ os.remove(path_B.replace(".csv", ".pkl"))
396
+ df_pair, pair_path = get_position_table(
397
+ pos, population="pairs", return_path=True
398
+ )
399
+ if df_pair is not None:
400
+ os.remove(pair_path)
401
+
402
+ df_A_pkl = get_position_pickle(pos, population=population[0], return_path=False)
403
+ df_B_pkl = get_position_pickle(pos, population=population[1], return_path=False)
404
+
405
+ if df_A_pkl is not None:
406
+ pkl_columns = np.array(df_A_pkl.columns)
407
+ neigh_columns = np.array(
408
+ [c.startswith("neighborhood") for c in pkl_columns]
409
+ )
410
+ cols = list(pkl_columns[neigh_columns]) + ["FRAME"]
411
+
412
+ id_col = extract_identity_col(df_A_pkl)
413
+ cols.append(id_col)
414
+ on_cols = [id_col, "FRAME"]
415
+
416
+ print(f"Recover {cols} from the pickle file...")
417
+ try:
418
+ df_A = pd.merge(df_A, df_A_pkl.loc[:, cols], how="outer", on=on_cols)
419
+ print(df_A.columns)
420
+ except Exception as e:
421
+ print(f"Failure to merge pickle and csv files: {e}")
422
+
423
+ if df_B_pkl is not None and df_B is not None:
424
+ pkl_columns = np.array(df_B_pkl.columns)
425
+ neigh_columns = np.array(
426
+ [c.startswith("neighborhood") for c in pkl_columns]
427
+ )
428
+ cols = list(pkl_columns[neigh_columns]) + ["FRAME"]
429
+
430
+ id_col = extract_identity_col(df_B_pkl)
431
+ cols.append(id_col)
432
+ on_cols = [id_col, "FRAME"]
433
+
434
+ print(f"Recover {cols} from the pickle file...")
435
+ try:
436
+ df_B = pd.merge(df_B, df_B_pkl.loc[:, cols], how="outer", on=on_cols)
437
+ except Exception as e:
438
+ print(f"Failure to merge pickle and csv files: {e}")
439
+
440
+ if clear_neigh:
441
+ unwanted = df_A.columns[df_A.columns.str.contains("neighborhood")]
442
+ df_A = df_A.drop(columns=unwanted)
443
+ unwanted = df_B.columns[df_B.columns.str.contains("neighborhood")]
444
+ df_B = df_B.drop(columns=unwanted)
445
+
446
+ df_A, df_B = self.distance_cut_neighborhood(
447
+ df_A, df_B, distance, **neighborhood_kwargs
448
+ )
449
+
450
+ if df_A is None or df_B is None or len(df_A) == 0:
451
+ return None
452
+
453
+ for td, d in zip(theta_dist, distance):
454
+
455
+ if neighborhood_kwargs["mode"] == "two-pop":
456
+ neigh_col = f"neighborhood_2_circle_{d}_px"
457
+
458
+ elif neighborhood_kwargs["mode"] == "self":
459
+ neigh_col = f"neighborhood_self_circle_{d}_px"
460
+
461
+ # edge_filter_A = (df_A['POSITION_X'] > td)&(df_A['POSITION_Y'] > td)&(df_A['POSITION_Y'] < (img_shape[0] - td))&(df_A['POSITION_X'] < (img_shape[1] - td))
462
+ # edge_filter_B = (df_B['POSITION_X'] > td)&(df_B['POSITION_Y'] > td)&(df_B['POSITION_Y'] < (img_shape[0] - td))&(df_B['POSITION_X'] < (img_shape[1] - td))
463
+ # df_A.loc[~edge_filter_A, neigh_col] = np.nan
464
+ # df_B.loc[~edge_filter_B, neigh_col] = np.nan
465
+
466
+ print("Count neighborhood...")
467
+ df_A = compute_neighborhood_metrics(
468
+ df_A,
469
+ neigh_col,
470
+ metrics=["inclusive", "exclusive", "intermediate"],
471
+ decompose_by_status=True,
472
+ )
473
+ # if neighborhood_kwargs['symmetrize']:
474
+ # df_B = compute_neighborhood_metrics(df_B, neigh_col, metrics=['inclusive','exclusive','intermediate'], decompose_by_status=True)
475
+ print("Done...")
476
+
477
+ if "TRACK_ID" in list(df_A.columns):
478
+ if not np.all(df_A["TRACK_ID"].isnull()):
479
+ print("Estimate average neighborhood before/after event...")
480
+ df_A = mean_neighborhood_before_event(
481
+ df_A, neigh_col, event_time_col
482
+ )
483
+ if event_time_col is not None:
484
+ df_A = mean_neighborhood_after_event(
485
+ df_A, neigh_col, event_time_col
486
+ )
487
+ print("Done...")
488
+
489
+ if not population[0] == population[1]:
490
+ # Remove neighborhood column from neighbor table, rename with actual population name
491
+ for td, d in zip(theta_dist, distance):
492
+ if neighborhood_kwargs["mode"] == "two-pop":
493
+ neigh_col = f"neighborhood_2_circle_{d}_px"
494
+ new_neigh_col = neigh_col.replace(
495
+ "_2_", f"_({population[0]}-{population[1]})_"
496
+ )
497
+ df_A = df_A.rename(columns={neigh_col: new_neigh_col})
498
+ elif neighborhood_kwargs["mode"] == "self":
499
+ neigh_col = f"neighborhood_self_circle_{d}_px"
500
+ df_B = df_B.drop(columns=[neigh_col])
501
+ df_B.to_pickle(path_B.replace(".csv", ".pkl"))
502
+
503
+ cols_to_rename = [
504
+ c
505
+ for c in list(df_A.columns)
506
+ if c.startswith("intermediate_count_")
507
+ or c.startswith("inclusive_count_")
508
+ or c.startswith("exclusive_count_")
509
+ or c.startswith("mean_count_")
510
+ ]
511
+ new_col_names = [
512
+ c.replace("_2_", f"_({population[0]}-{population[1]})_")
513
+ for c in cols_to_rename
514
+ ]
515
+ new_name_map = {}
516
+ for k, c in enumerate(cols_to_rename):
517
+ new_name_map.update({c: new_col_names[k]})
518
+ df_A = df_A.rename(columns=new_name_map)
519
+
520
+ df_A.to_pickle(path_A.replace(".csv", ".pkl"))
521
+
522
+ unwanted = df_A.columns[df_A.columns.str.startswith("neighborhood_")]
523
+ df_A2 = df_A.drop(columns=unwanted)
524
+ df_A2.to_csv(path_A, index=False)
525
+
526
+ if not population[0] == population[1]:
527
+ unwanted = df_B.columns[df_B.columns.str.startswith("neighborhood_")]
528
+ df_B_csv = df_B.drop(unwanted, axis=1, inplace=False)
529
+ df_B_csv.to_csv(path_B, index=False)
530
+
531
+ if return_tables:
532
+ return df_A, df_B
533
+
534
+ def compute_contact_neighborhood_at_position(
535
+ self,
536
+ pos,
537
+ distance,
538
+ population=["targets", "effectors"],
539
+ theta_dist=None,
540
+ img_shape=(2048, 2048),
541
+ return_tables=False,
542
+ clear_neigh=False,
543
+ event_time_col=None,
544
+ neighborhood_kwargs={
545
+ "mode": "two-pop",
546
+ "status": None,
547
+ "not_status_option": None,
548
+ "include_dead_weight": True,
549
+ "compute_cum_sum": False,
550
+ "attention_weight": True,
551
+ "symmetrize": True,
552
+ },
553
+ ):
554
+
555
+ pos = pos.replace("\\", "/")
556
+ pos = rf"{pos}"
557
+ assert os.path.exists(pos), f"Position {pos} is not a valid path."
558
+
559
+ if isinstance(population, str):
560
+ population = [population, population]
561
+
562
+ if not isinstance(distance, list):
563
+ distance = [distance]
564
+ if not theta_dist is None and not isinstance(theta_dist, list):
565
+ theta_dist = [theta_dist]
566
+
567
+ if theta_dist is None:
568
+ theta_dist = [0 for d in distance] # 0.9*d
569
+ assert len(theta_dist) == len(
570
+ distance
571
+ ), "Incompatible number of distances and number of edge thresholds."
572
+
573
+ if population[0] == population[1]:
574
+ neighborhood_kwargs.update({"mode": "self"})
575
+ if population[1] != population[0]:
576
+ neighborhood_kwargs.update({"mode": "two-pop"})
577
+
578
+ df_A, path_A = get_position_table(
579
+ pos, population=population[0], return_path=True
580
+ )
581
+ df_B, path_B = get_position_table(
582
+ pos, population=population[1], return_path=True
583
+ )
584
+ if df_A is None or df_B is None:
585
+ return None
586
+
587
+ if clear_neigh:
588
+ if os.path.exists(path_A.replace(".csv", ".pkl")):
589
+ os.remove(path_A.replace(".csv", ".pkl"))
590
+ if os.path.exists(path_B.replace(".csv", ".pkl")):
591
+ os.remove(path_B.replace(".csv", ".pkl"))
592
+ df_pair, pair_path = get_position_table(
593
+ pos, population="pairs", return_path=True
594
+ )
595
+ if df_pair is not None:
596
+ os.remove(pair_path)
597
+
598
+ df_A_pkl = get_position_pickle(pos, population=population[0], return_path=False)
599
+ df_B_pkl = get_position_pickle(pos, population=population[1], return_path=False)
600
+
601
+ if df_A_pkl is not None:
602
+ pkl_columns = np.array(df_A_pkl.columns)
603
+ neigh_columns = np.array(
604
+ [c.startswith("neighborhood") for c in pkl_columns]
605
+ )
606
+ cols = list(pkl_columns[neigh_columns]) + ["FRAME"]
607
+
608
+ id_col = extract_identity_col(df_A_pkl)
609
+ cols.append(id_col)
610
+ on_cols = [id_col, "FRAME"]
611
+
612
+ print(f"Recover {cols} from the pickle file...")
613
+ try:
614
+ df_A = pd.merge(df_A, df_A_pkl.loc[:, cols], how="outer", on=on_cols)
615
+ print(df_A.columns)
616
+ except Exception as e:
617
+ print(f"Failure to merge pickle and csv files: {e}")
618
+
619
+ if df_B_pkl is not None and df_B is not None:
620
+ pkl_columns = np.array(df_B_pkl.columns)
621
+ neigh_columns = np.array(
622
+ [c.startswith("neighborhood") for c in pkl_columns]
623
+ )
624
+ cols = list(pkl_columns[neigh_columns]) + ["FRAME"]
625
+
626
+ id_col = extract_identity_col(df_B_pkl)
627
+ cols.append(id_col)
628
+ on_cols = [id_col, "FRAME"]
629
+
630
+ print(f"Recover {cols} from the pickle file...")
631
+ try:
632
+ df_B = pd.merge(df_B, df_B_pkl.loc[:, cols], how="outer", on=on_cols)
633
+ except Exception as e:
634
+ print(f"Failure to merge pickle and csv files: {e}")
635
+
636
+ labelsA = locate_labels(pos, population=population[0])
637
+ if population[1] == population[0]:
638
+ labelsB = None
639
+ else:
640
+ labelsB = locate_labels(pos, population=population[1])
641
+
642
+ if clear_neigh:
643
+ unwanted = df_A.columns[df_A.columns.str.contains("neighborhood")]
644
+ df_A = df_A.drop(columns=unwanted)
645
+ unwanted = df_B.columns[df_B.columns.str.contains("neighborhood")]
646
+ df_B = df_B.drop(columns=unwanted)
647
+
648
+ print(f"Distance: {distance} for mask contact")
649
+ df_A, df_B = self.mask_contact_neighborhood(
650
+ df_A, df_B, labelsA, labelsB, distance, **neighborhood_kwargs
651
+ )
652
+ if df_A is None or df_B is None or len(df_A) == 0:
653
+ return None
654
+
655
+ for td, d in zip(theta_dist, distance):
656
+
657
+ if neighborhood_kwargs["mode"] == "two-pop":
658
+ neigh_col = f"neighborhood_2_contact_{d}_px"
659
+ elif neighborhood_kwargs["mode"] == "self":
660
+ neigh_col = f"neighborhood_self_contact_{d}_px"
661
+ else:
662
+ print("Invalid mode...")
663
+ return None
664
+
665
+ df_A.loc[df_A["class_id"].isnull(), neigh_col] = np.nan
666
+
667
+ # edge_filter_A = (df_A['POSITION_X'] > td)&(df_A['POSITION_Y'] > td)&(df_A['POSITION_Y'] < (img_shape[0] - td))&(df_A['POSITION_X'] < (img_shape[1] - td))
668
+ # edge_filter_B = (df_B['POSITION_X'] > td)&(df_B['POSITION_Y'] > td)&(df_B['POSITION_Y'] < (img_shape[0] - td))&(df_B['POSITION_X'] < (img_shape[1] - td))
669
+ # df_A.loc[~edge_filter_A, neigh_col] = np.nan
670
+ # df_B.loc[~edge_filter_B, neigh_col] = np.nan
671
+
672
+ df_A = compute_neighborhood_metrics(
673
+ df_A,
674
+ neigh_col,
675
+ metrics=["inclusive", "intermediate"],
676
+ decompose_by_status=True,
677
+ )
678
+ if "TRACK_ID" in list(df_A.columns):
679
+ if not np.all(df_A["TRACK_ID"].isnull()):
680
+ df_A = mean_neighborhood_before_event(
681
+ df_A,
682
+ neigh_col,
683
+ event_time_col,
684
+ metrics=["inclusive", "intermediate"],
685
+ )
686
+ if event_time_col is not None:
687
+ df_A = mean_neighborhood_after_event(
688
+ df_A,
689
+ neigh_col,
690
+ event_time_col,
691
+ metrics=["inclusive", "intermediate"],
692
+ )
693
+ print("Done...")
694
+
695
+ if not population[0] == population[1]:
696
+ # Remove neighborhood column from neighbor table, rename with actual population name
697
+ for td, d in zip(theta_dist, distance):
698
+ if neighborhood_kwargs["mode"] == "two-pop":
699
+ neigh_col = f"neighborhood_2_contact_{d}_px"
700
+ new_neigh_col = neigh_col.replace(
701
+ "_2_", f"_({population[0]}-{population[1]})_"
702
+ )
703
+ df_A = df_A.rename(columns={neigh_col: new_neigh_col})
704
+ elif neighborhood_kwargs["mode"] == "self":
705
+ neigh_col = f"neighborhood_self_contact_{d}_px"
706
+ else:
707
+ print("Invalid mode...")
708
+ return None
709
+ df_B = df_B.drop(columns=[neigh_col])
710
+ df_B.to_pickle(path_B.replace(".csv", ".pkl"))
711
+
712
+ cols_to_rename = [
713
+ c
714
+ for c in list(df_A.columns)
715
+ if c.startswith("intermediate_count_")
716
+ or c.startswith("inclusive_count_")
717
+ or c.startswith("exclusive_count_")
718
+ or c.startswith("mean_count_")
719
+ ]
720
+ new_col_names = [
721
+ c.replace("_2_", f"_({population[0]}-{population[1]})_")
722
+ for c in cols_to_rename
723
+ ]
724
+ new_name_map = {}
725
+ for k, c in enumerate(cols_to_rename):
726
+ new_name_map.update({c: new_col_names[k]})
727
+ df_A = df_A.rename(columns=new_name_map)
728
+
729
+ print(f"{df_A.columns=}")
730
+ df_A.to_pickle(path_A.replace(".csv", ".pkl"))
731
+
732
+ unwanted = df_A.columns[df_A.columns.str.startswith("neighborhood_")]
733
+ df_A2 = df_A.drop(columns=unwanted)
734
+ df_A2.to_csv(path_A, index=False)
735
+
736
+ if not population[0] == population[1]:
737
+ unwanted = df_B.columns[df_B.columns.str.startswith("neighborhood_")]
738
+ df_B_csv = df_B.drop(unwanted, axis=1, inplace=False)
739
+ df_B_csv.to_csv(path_B, index=False)
740
+
741
+ if return_tables:
742
+ return df_A, df_B
743
+
744
+ def run(self):
745
+ self.queue.put({"status": "Computing neighborhood..."})
746
+ print(f"Launching the neighborhood computation...")
747
+ if self.protocol["neighborhood_type"] == "distance_threshold":
748
+ self.compute_neighborhood_at_position(
749
+ self.pos,
750
+ self.protocol["distance"],
751
+ population=self.protocol["population"],
752
+ theta_dist=None,
753
+ img_shape=self.img_shape,
754
+ return_tables=False,
755
+ clear_neigh=self.protocol["clear_neigh"],
756
+ event_time_col=self.protocol["event_time_col"],
757
+ neighborhood_kwargs=self.protocol["neighborhood_kwargs"],
758
+ )
759
+ print(f"Computation done!")
760
+ elif self.protocol["neighborhood_type"] == "mask_contact":
761
+ print(f"Compute contact neigh!!")
762
+ self.compute_contact_neighborhood_at_position(
763
+ self.pos,
764
+ self.protocol["distance"],
765
+ population=self.protocol["population"],
766
+ theta_dist=None,
767
+ img_shape=self.img_shape,
768
+ return_tables=False,
769
+ clear_neigh=self.protocol["clear_neigh"],
770
+ event_time_col=self.protocol["event_time_col"],
771
+ neighborhood_kwargs=self.protocol["neighborhood_kwargs"],
772
+ )
773
+ print(f"Computation done!")
774
+
775
+ if self.measure_pairs:
776
+ self.queue.put({"status": "Measuring pairs..."})
777
+ print(f"Measuring pairs...")
778
+
779
+ distances = self.protocol["distance"]
780
+ if not isinstance(distances, list):
781
+ distances = [distances]
782
+
783
+ for d in distances:
784
+ # Construct the protocol dictionary expected by measure_pair_signals_at_position
785
+ if self.protocol["population"][0] == self.protocol["population"][1]:
786
+ mode = "self"
787
+ else:
788
+ mode = "two-pop"
789
+
790
+ if self.protocol["neighborhood_type"] == "distance_threshold":
791
+ neigh_type = "circle"
792
+ if mode == "two-pop":
793
+ neigh_col = f"neighborhood_2_circle_{d}_px"
794
+ elif mode == "self":
795
+ neigh_col = f"neighborhood_self_circle_{d}_px"
796
+ elif self.protocol["neighborhood_type"] == "mask_contact":
797
+ neigh_type = "contact"
798
+ if mode == "two-pop":
799
+ neigh_col = f"neighborhood_2_contact_{d}_px"
800
+ elif mode == "self":
801
+ neigh_col = f"neighborhood_self_contact_{d}_px"
802
+
803
+ pair_protocol = {
804
+ "reference": self.protocol["population"][0],
805
+ "neighbor": self.protocol["population"][1],
806
+ "type": neigh_type,
807
+ "distance": d,
808
+ "description": neigh_col,
809
+ }
810
+
811
+ print(f"Processing pairs for {neigh_col}...")
812
+ df_pairs = measure_pair_signals_at_position(self.pos, pair_protocol)
813
+
814
+ if df_pairs is not None:
815
+ if "REFERENCE_ID" in list(df_pairs.columns):
816
+ previous_pair_table_path = self.pos + os.sep.join(
817
+ ["output", "tables", "trajectories_pairs.csv"]
818
+ )
819
+
820
+ if os.path.exists(previous_pair_table_path):
821
+ df_prev = pd.read_csv(previous_pair_table_path)
822
+ cols = [
823
+ c
824
+ for c in list(df_prev.columns)
825
+ if c in list(df_pairs.columns)
826
+ ]
827
+ df_pairs = pd.merge(df_prev, df_pairs, how="outer", on=cols)
828
+
829
+ try:
830
+ df_pairs = df_pairs.sort_values(
831
+ by=[
832
+ "reference_population",
833
+ "neighbor_population",
834
+ "REFERENCE_ID",
835
+ "NEIGHBOR_ID",
836
+ "FRAME",
837
+ ]
838
+ )
839
+ except KeyError:
840
+ pass
841
+
842
+ df_pairs.to_csv(previous_pair_table_path, index=False)
843
+ print(f"Pair measurements saved to {previous_pair_table_path}")
844
+
845
+ # self.indices = list(range(self.img_num_channels.shape[1]))
846
+ # chunks = np.array_split(self.indices, self.n_threads)
847
+ #
848
+ # self.timestep_dataframes = []
849
+ # with concurrent.futures.ThreadPoolExecutor(max_workers=self.n_threads) as executor:
850
+ # results = executor.map(self.parallel_job,
851
+ # chunks) # list(map(lambda x: executor.submit(self.parallel_job, x), chunks))
852
+ # try:
853
+ # for i, return_value in enumerate(results):
854
+ # print(f'Thread {i} completed...')
855
+ # self.timestep_dataframes.extend(return_value)
856
+ # except Exception as e:
857
+ # print("Exception: ", e)
858
+ #
859
+ # print('Measurements successfully performed...')
860
+ #
861
+ # if len(self.timestep_dataframes) > 0:
862
+ #
863
+ # df = pd.concat(self.timestep_dataframes)
864
+ #
865
+ # if self.trajectories is not None:
866
+ # df = df.sort_values(by=[self.column_labels['track'], self.column_labels['time']])
867
+ # df = df.dropna(subset=[self.column_labels['track']])
868
+ # else:
869
+ # df['ID'] = np.arange(len(df))
870
+ # df = df.sort_values(by=[self.column_labels['time'], 'ID'])
871
+ #
872
+ # df = df.reset_index(drop=True)
873
+ # df = _remove_invalid_cols(df)
874
+ #
875
+ # df.to_csv(self.pos + os.sep.join(["output", "tables", self.table_name]), index=False)
876
+ # print(f'Measurement table successfully exported in {os.sep.join(["output", "tables"])}...')
877
+ # print('Done.')
878
+ # else:
879
+ # print('No measurement could be performed. Check your inputs.')
880
+ # print('Done.')
881
+
882
+ # Send end signal
883
+ self.queue.put("finished")
884
+ self.queue.close()
885
+
886
+ def end_process(self):
887
+
888
+ self.terminate()
889
+ self.queue.put("finished")
890
+
891
+ def abort_process(self):
892
+
893
+ self.terminate()
894
+ self.queue.put("error")