celldetective 1.3.1__py3-none-any.whl → 1.3.3.post1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. celldetective/_version.py +1 -1
  2. celldetective/events.py +2 -0
  3. celldetective/gui/classifier_widget.py +51 -3
  4. celldetective/gui/control_panel.py +9 -3
  5. celldetective/gui/generic_signal_plot.py +161 -2
  6. celldetective/gui/gui_utils.py +90 -1
  7. celldetective/gui/measurement_options.py +35 -32
  8. celldetective/gui/plot_signals_ui.py +8 -3
  9. celldetective/gui/process_block.py +36 -114
  10. celldetective/gui/retrain_segmentation_model_options.py +3 -1
  11. celldetective/gui/signal_annotator.py +53 -26
  12. celldetective/gui/signal_annotator2.py +17 -30
  13. celldetective/gui/survival_ui.py +7 -3
  14. celldetective/gui/tableUI.py +300 -183
  15. celldetective/gui/thresholds_gui.py +195 -199
  16. celldetective/gui/viewers.py +267 -13
  17. celldetective/io.py +110 -10
  18. celldetective/measure.py +128 -88
  19. celldetective/models/segmentation_effectors/ricm_bf_all_last/config_input.json +79 -0
  20. celldetective/models/segmentation_effectors/ricm_bf_all_last/ricm_bf_all_last +0 -0
  21. celldetective/models/segmentation_effectors/ricm_bf_all_last/training_instructions.json +37 -0
  22. celldetective/models/segmentation_effectors/test-transfer/config_input.json +39 -0
  23. celldetective/models/segmentation_effectors/test-transfer/test-transfer +0 -0
  24. celldetective/neighborhood.py +154 -69
  25. celldetective/relative_measurements.py +128 -4
  26. celldetective/scripts/measure_cells.py +3 -3
  27. celldetective/signals.py +207 -213
  28. celldetective/utils.py +16 -0
  29. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/METADATA +11 -10
  30. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/RECORD +34 -29
  31. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/WHEEL +1 -1
  32. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/LICENSE +0 -0
  33. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/entry_points.txt +0 -0
  34. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/top_level.txt +0 -0
@@ -7,6 +7,8 @@ from celldetective.utils import contour_of_instance_segmentation, extract_identi
7
7
  from scipy.spatial.distance import cdist
8
8
  from celldetective.io import locate_labels, get_position_pickle, get_position_table
9
9
 
10
+ import matplotlib.pyplot as plt
11
+
10
12
  abs_path = os.sep.join([os.path.split(os.path.dirname(os.path.realpath(__file__)))[0], 'celldetective'])
11
13
 
12
14
 
@@ -818,10 +820,7 @@ def contact_neighborhood(labelsA, labelsB=None, border=3, connectivity=2):
818
820
  if labelsB is not None:
819
821
  labelsB = labelsB.astype(float)
820
822
 
821
- print(f"Border = {border}")
822
-
823
823
  if border > 0:
824
- print(labelsA.shape, border * (-1))
825
824
  labelsA_edge = contour_of_instance_segmentation(label=labelsA, distance=border * (-1)).astype(float)
826
825
  labelsA[np.where(labelsA_edge > 0)] = labelsA_edge[np.where(labelsA_edge > 0)]
827
826
  if labelsB is not None:
@@ -851,6 +850,7 @@ def contact_neighborhood(labelsA, labelsB=None, border=3, connectivity=2):
851
850
  return neighs
852
851
 
853
852
  def merge_labels(labelsA, labelsB):
853
+
854
854
  labelsA = labelsA.astype(float)
855
855
  labelsB = labelsB.astype(float)
856
856
 
@@ -861,6 +861,7 @@ def merge_labels(labelsA, labelsB):
861
861
 
862
862
 
863
863
  def find_contact_neighbors(labels, connectivity=2):
864
+
864
865
  assert labels.ndim == 2, "Wrong dimension for labels..."
865
866
  g, nodes = pixel_graph(labels, mask=labels.astype(bool), connectivity=connectivity)
866
867
  g.eliminate_zeros()
@@ -906,6 +907,15 @@ def mask_contact_neighborhood(setA, setB, labelsA, labelsB, distance, mode='two-
906
907
  """
907
908
 
908
909
  # Check live_status option
910
+ # if setA is not None:
911
+ # setA_id = extract_identity_col(setA)
912
+ # if setA_id=="TRACK_ID":
913
+ # setA = setA.loc[~setA['TRACK_ID'].isnull(),:].copy()
914
+ # if setB is not None:
915
+ # setB_id = extract_identity_col(setB)
916
+ # if setB_id=="TRACK_ID":
917
+ # setB = setB.loc[~setB['TRACK_ID'].isnull(),:].copy()
918
+
909
919
  if setA is not None and setB is not None:
910
920
  setA, setB, status = set_live_status(setA, setB, status, not_status_option)
911
921
  else:
@@ -915,6 +925,25 @@ def mask_contact_neighborhood(setA, setB, labelsA, labelsB, distance, mode='two-
915
925
  if not isinstance(distance, list):
916
926
  distance = [distance]
917
927
 
928
+ cl = []
929
+ for s in [setA, setB]:
930
+
931
+ # Check whether data can be tracked
932
+ temp_column_labels = column_labels.copy()
933
+
934
+ id_col = extract_identity_col(s)
935
+ temp_column_labels.update({'track': id_col})
936
+ if id_col=='ID':
937
+ compute_cum_sum = False
938
+
939
+ cl.append(temp_column_labels)
940
+
941
+ setA = setA.loc[~setA[cl[0]['track']].isnull(),:].copy()
942
+ setB = setB.loc[~setB[cl[1]['track']].isnull(),:].copy()
943
+
944
+ if labelsB is None:
945
+ labelsB = [None] * len(labelsA)
946
+
918
947
  for d in distance:
919
948
  # loop over each provided distance
920
949
 
@@ -923,22 +952,11 @@ def mask_contact_neighborhood(setA, setB, labelsA, labelsB, distance, mode='two-
923
952
  elif mode == 'self':
924
953
  neigh_col = f'neighborhood_self_contact_{d}_px'
925
954
 
926
- cl = []
927
- for s in [setA, setB]:
955
+ setA[neigh_col] = np.nan
956
+ setA[neigh_col] = setA[neigh_col].astype(object)
928
957
 
929
- # Check whether data can be tracked
930
- temp_column_labels = column_labels.copy()
931
-
932
- id_col = extract_identity_col(s)
933
- temp_column_labels.update({'track': id_col})
934
- if id_col=='ID':
935
- compute_cum_sum = False # if no tracking data then cum_sum is not relevant
936
- cl.append(temp_column_labels)
937
-
938
- # Remove nan tracks (cells that do not belong to a track)
939
- s[neigh_col] = np.nan
940
- s[neigh_col] = s[neigh_col].astype(object)
941
- s.dropna(subset=[cl[-1]['track']], inplace=True)
958
+ setB[neigh_col] = np.nan
959
+ setB[neigh_col] = setB[neigh_col].astype(object)
942
960
 
943
961
  # Loop over each available timestep
944
962
  timeline = np.unique(np.concatenate([setA[cl[0]['time']].to_numpy(), setB[cl[1]['time']].to_numpy()])).astype(
@@ -946,38 +964,34 @@ def mask_contact_neighborhood(setA, setB, labelsA, labelsB, distance, mode='two-
946
964
  for t in tqdm(timeline):
947
965
 
948
966
  index_A = list(setA.loc[setA[cl[0]['time']] == t].index)
949
- coordinates_A = setA.loc[setA[cl[0]['time']] == t, [cl[0]['x'], cl[0]['y']]].to_numpy()
950
- ids_A = setA.loc[setA[cl[0]['time']] == t, cl[0]['track']].to_numpy()
951
- mask_ids_A = setA.loc[setA[cl[0]['time']] == t, cl[0]['mask_id']].to_numpy()
952
- status_A = setA.loc[setA[cl[0]['time']] == t, status[0]].to_numpy()
967
+ dataA = setA.loc[setA[cl[0]['time']] == t, [cl[0]['x'], cl[0]['y'], cl[0]['track'], cl[0]['mask_id'], status[0]]].to_numpy()
968
+ coordinates_A = dataA[:,[0,1]]; ids_A = dataA[:,2]; mask_ids_A = dataA[:,3]; status_A = dataA[:,4];
953
969
 
954
970
  index_B = list(setB.loc[setB[cl[1]['time']] == t].index)
955
- coordinates_B = setB.loc[setB[cl[1]['time']] == t, [cl[1]['x'], cl[1]['y']]].to_numpy()
956
- ids_B = setB.loc[setB[cl[1]['time']] == t, cl[1]['track']].to_numpy()
957
- mask_ids_B = setB.loc[setB[cl[1]['time']] == t, cl[1]['mask_id']].to_numpy()
958
- status_B = setB.loc[setB[cl[1]['time']] == t, status[1]].to_numpy()
971
+ dataB = setB.loc[setB[cl[1]['time']] == t, [cl[1]['x'], cl[1]['y'], cl[1]['track'], cl[1]['mask_id'], status[1]]].to_numpy()
972
+ coordinates_B = dataB[:,[0,1]]; ids_B = dataB[:,2]; mask_ids_B = dataB[:,3]; status_B = dataB[:,4]
959
973
 
960
- print(f"Frame {t}")
961
- print(f"{mask_ids_A=}", f"{mask_ids_B}")
962
-
963
- if len(ids_A) > 0 and len(ids_B) > 0:
974
+ if len(coordinates_A) > 0 and len(coordinates_B) > 0:
964
975
 
965
976
  # compute distance matrix
966
977
  dist_map = cdist(coordinates_A, coordinates_B, metric="euclidean")
978
+ intersection_map = np.zeros_like(dist_map).astype(float)
967
979
 
968
980
  # Do the mask contact computation
969
- if labelsB is not None:
970
- lblB = labelsB[t]
971
- else:
972
- lblB = labelsB
981
+ lblA = labelsA[t]
982
+ lblA = np.where(np.isin(lblA, mask_ids_A), lblA, 0.)
983
+
984
+ lblB = labelsB[t]
985
+ if lblB is not None:
986
+ lblB = np.where(np.isin(lblB, mask_ids_B), lblB, 0.)
973
987
 
974
- print(f"Distance {d} for contact as border")
975
- contact_pairs = contact_neighborhood(labelsA[t], labelsB=lblB, border=d, connectivity=2)
988
+ contact_pairs = contact_neighborhood(lblA, labelsB=lblB, border=d, connectivity=2)
976
989
 
977
- print(t, f"{np.unique(labelsA[t])=}")
978
- print(f"Frame {t}: found the following contact pairs: {contact_pairs}...")
979
990
  # Put infinite distance to all non-contact pairs (something like this)
980
991
  plot_map = False
992
+ flatA = lblA.flatten()
993
+ if lblB is not None:
994
+ flatB = lblB.flatten()
981
995
 
982
996
  if len(contact_pairs) > 0:
983
997
  mask = np.ones_like(dist_map).astype(bool)
@@ -985,48 +999,32 @@ def mask_contact_neighborhood(setA, setB, labelsA, labelsB, distance, mode='two-
985
999
  indices_to_keep = []
986
1000
  for cp in contact_pairs:
987
1001
 
988
- if np.any(cp < 0):
989
- if cp[0] < 0:
990
- mask_A = cp[1]
991
- mask_B = np.abs(cp[0])
992
- else:
993
- mask_A = cp[0]
994
- mask_B = np.abs(cp[1])
995
- else:
996
- mask_A = cp[0]
997
- mask_B = cp[1]
998
-
999
- try:
1002
+ cp = np.abs(cp)
1003
+ mask_A, mask_B = cp
1004
+ idx_A = np.where(mask_ids_A == int(mask_A))[0][0]
1005
+ idx_B = np.where(mask_ids_B == int(mask_B))[0][0]
1006
+
1007
+ intersection = 0
1008
+ if lblB is not None:
1009
+ intersection = len(flatA[(flatA==int(mask_A))&(flatB==int(mask_B))])
1000
1010
 
1001
- idx_A = np.where(mask_ids_A == int(mask_A))[0][0]
1002
- idx_B = np.where(mask_ids_B == int(mask_B))[0][0]
1003
- print(idx_A, idx_B)
1004
- indices_to_keep.append([idx_A,idx_B])
1005
- except Exception as e:
1006
- print(f'{e} {t=} error something happened!!')
1007
- pass
1011
+ indices_to_keep.append([idx_A,idx_B, intersection])
1012
+ print(f'Ref cell #{ids_A[idx_A]} matched with neigh. cell #{ids_B[idx_B]}...')
1013
+ print(f'Computed intersection: {intersection} px...')
1008
1014
 
1009
- print(f'Indices to keep: {indices_to_keep}...')
1010
1015
  if len(indices_to_keep) > 0:
1011
1016
  indices_to_keep = np.array(indices_to_keep)
1012
1017
  mask[indices_to_keep[:, 0], indices_to_keep[:, 1]] = False
1013
1018
  if mode == 'self':
1014
1019
  mask[indices_to_keep[:, 1], indices_to_keep[:, 0]] = False
1015
1020
  dist_map[mask] = 1.0E06
1021
+ intersection_map[indices_to_keep[:,0], indices_to_keep[:,1]] = indices_to_keep[:,2]
1016
1022
  plot_map=True
1017
1023
  else:
1018
1024
  dist_map[:,:] = 1.0E06
1019
1025
  else:
1020
1026
  dist_map[:, :] = 1.0E06
1021
1027
 
1022
- # PROCEED all the same?? --> I guess so
1023
- # if plot_map:
1024
- # import matplotlib.pyplot as plt
1025
- # print(indices_to_keep)
1026
- # plt.imshow(dist_map)
1027
- # plt.pause(5)
1028
- # plt.close()
1029
-
1030
1028
  d_filter = 1.0E05
1031
1029
  if attention_weight:
1032
1030
  weights, closest_A = compute_attention_weight(dist_map, d_filter, status_A, ids_A, axis=1,
@@ -1036,11 +1034,14 @@ def mask_contact_neighborhood(setA, setB, labelsA, labelsB, distance, mode='two-
1036
1034
  for k in range(dist_map.shape[0]):
1037
1035
 
1038
1036
  col = dist_map[k, :]
1037
+ col_inter = intersection_map[k, :]
1039
1038
  col[col == 0.] = 1.0E06
1040
1039
 
1041
1040
  neighs_B = np.array([ids_B[i] for i in np.where((col <= d_filter))[0]])
1042
1041
  status_neigh_B = np.array([status_B[i] for i in np.where((col <= d_filter))[0]])
1043
1042
  dist_B = [round(col[i], 2) for i in np.where((col <= d_filter))[0]]
1043
+ intersect_B = [round(col_inter[i], 2) for i in np.where((col <= d_filter))[0]]
1044
+
1044
1045
  if len(dist_B) > 0:
1045
1046
  closest_B_cell = neighs_B[np.argmin(dist_B)]
1046
1047
 
@@ -1080,14 +1081,14 @@ def mask_contact_neighborhood(setA, setB, labelsA, labelsB, distance, mode='two-
1080
1081
  else:
1081
1082
  closest_b = False
1082
1083
  if isinstance(sym_neigh, list):
1083
- sym_neigh.append({'id': ids_A[k], 'distance': dist_B[n], 'status': status_A[k]})
1084
+ sym_neigh.append({'id': ids_A[k], 'distance': dist_B[n], 'status': status_A[k], 'intersection': intersect_B[n]})
1084
1085
  else:
1085
- sym_neigh = [{'id': ids_A[k], 'distance': dist_B[n], 'status': status_A[k]}]
1086
+ sym_neigh = [{'id': ids_A[k], 'distance': dist_B[n], 'status': status_A[k], 'intersection': intersect_B[n]}]
1086
1087
  if attention_weight:
1087
1088
  sym_neigh[-1].update({'weight': weight_A, 'closest': closest_b})
1088
1089
 
1089
1090
  # Write the minimum info about neighborhing cell B
1090
- neigh_dico = {'id': neighs_B[n], 'distance': dist_B[n], 'status': status_neigh_B[n]}
1091
+ neigh_dico = {'id': neighs_B[n], 'distance': dist_B[n], 'status': status_neigh_B[n], 'intersection': intersect_B[n]}
1091
1092
  if attention_weight:
1092
1093
  neigh_dico.update({'weight': weights[n_index], 'closest': closest})
1093
1094
 
@@ -1303,6 +1304,90 @@ def compute_contact_neighborhood_at_position(pos, distance, population=['targets
1303
1304
  return df_A, df_B
1304
1305
 
1305
1306
 
1307
+ def extract_neighborhood_in_pair_table(df, distance=None, reference_population="targets", neighbor_population="effectors", mode="circle", neighborhood_key=None, contact_only=True,):
1308
+
1309
+ """
1310
+ Extracts data from a pair table that matches specific neighborhood criteria based on reference and neighbor
1311
+ populations, distance, and mode of neighborhood computation (e.g., circular or contact-based).
1312
+
1313
+ Parameters
1314
+ ----------
1315
+ df : pandas.DataFrame
1316
+ DataFrame containing the pair table, which includes columns for 'reference_population', 'neighbor_population',
1317
+ and a column for neighborhood status.
1318
+ distance : int, optional
1319
+ Radius in pixels for neighborhood calculation, used only if `neighborhood_key` is not provided.
1320
+ reference_population : str, default="targets"
1321
+ The reference population to consider. Must be either "targets" or "effectors".
1322
+ neighbor_population : str, default="effectors"
1323
+ The neighbor population to consider. Must be either "targets" or "effectors", used only if `neighborhood_key` is not provided.
1324
+ mode : str, default="circle"
1325
+ Neighborhood computation mode. Options are "circle" for radius-based or "contact" for contact-based neighborhood, used only if `neighborhood_key` is not provided.
1326
+ neighborhood_key : str, optional
1327
+ A precomputed neighborhood key to identify specific neighborhoods. If provided, this key overrides `distance`,
1328
+ `mode`, and `neighbor_population`.
1329
+ contact_only : bool, default=True
1330
+ If True, only rows indicating contact with the neighbor population (status=1) are kept; if False, both
1331
+ contact (status=1) and no-contact (status=0) rows are included.
1332
+
1333
+ Returns
1334
+ -------
1335
+ pandas.DataFrame
1336
+ Filtered DataFrame containing rows that meet the specified neighborhood criteria.
1337
+
1338
+ Notes
1339
+ -----
1340
+ - When `neighborhood_key` is None, the neighborhood column is generated based on the provided `reference_population`,
1341
+ `neighbor_population`, `distance`, and `mode`.
1342
+ - The function uses `status_<neigh_col>` to filter rows based on `contact_only` criteria.
1343
+ - Ensures that `reference_population` and `neighbor_population` are valid inputs and consistent with the neighborhood
1344
+ mode and key.
1345
+
1346
+ Example
1347
+ -------
1348
+ >>> neighborhood_data = extract_neighborhood_in_pair_table(df, distance=50, reference_population="targets",
1349
+ neighbor_population="effectors", mode="circle")
1350
+ >>> neighborhood_data.head()
1351
+
1352
+ Raises
1353
+ ------
1354
+ AssertionError
1355
+ If `reference_population` or `neighbor_population` is not valid, or if the required neighborhood status
1356
+ column does not exist in `df`.
1357
+ """
1358
+
1359
+
1360
+ assert reference_population in ["targets", "effectors"], "Please set a valid reference population ('targets' or 'effectors')"
1361
+ if neighborhood_key is None:
1362
+ assert neighbor_population in ["targets", "effectors"], "Please set a valid neighbor population ('targets' or 'effectors')"
1363
+ assert mode in ["circle", "contact"], "Please set a valid neighborhood computation mode ('circle' or 'contact')"
1364
+ if reference_population==neighbor_population:
1365
+ type = "self"
1366
+ else:
1367
+ type = "2"
1368
+
1369
+ neigh_col = f"neighborhood_{type}_{mode}_{distance}_px"
1370
+ else:
1371
+ neigh_col = neighborhood_key.replace('status_','')
1372
+ if 'self' in neigh_col:
1373
+ neighbor_population = reference_population
1374
+ else:
1375
+ if reference_population=="effectors":
1376
+ neighbor_population=='targets'
1377
+ else:
1378
+ neighbor_population=='effectors'
1379
+
1380
+ assert "status_"+neigh_col in list(df.columns),"The selected neighborhood does not appear in the data..."
1381
+
1382
+ if contact_only:
1383
+ s_keep = [1]
1384
+ else:
1385
+ s_keep = [0,1]
1386
+
1387
+ data = df.loc[(df['reference_population']==reference_population)&(df['neighbor_population']==neighbor_population)&(df["status_"+neigh_col].isin(s_keep))]
1388
+
1389
+ return data
1390
+
1306
1391
 
1307
1392
  # def mask_intersection_neighborhood(setA, labelsA, setB, labelsB, threshold_iou=0.5, viewpoint='B'):
1308
1393
  # # do whatever to match objects in A and B
@@ -90,6 +90,10 @@ def measure_pairs(pos, neighborhood_protocol):
90
90
  cosine_dot_vector[:] = np.nan
91
91
 
92
92
  coords_neighbor = group_neigh[['POSITION_X', 'POSITION_Y']].to_numpy()[0]
93
+ intersection = np.nan
94
+ if 'intersection' in list(group_neigh.columns):
95
+ intersection = group_neigh['intersection'].values[0]
96
+
93
97
  neighbor_vector[0] = coords_neighbor[0] - coords_reference[0]
94
98
  neighbor_vector[1] = coords_neighbor[1] - coords_reference[1]
95
99
 
@@ -109,7 +113,7 @@ def measure_pairs(pos, neighborhood_protocol):
109
113
  {'REFERENCE_ID': tid, 'NEIGHBOR_ID': nc,
110
114
  'reference_population': reference_population,
111
115
  'neighbor_population': neighbor_population,
112
- 'FRAME': t, 'distance': relative_distance,
116
+ 'FRAME': t, 'distance': relative_distance, 'intersection': intersection,
113
117
  'angle': angle * 180 / np.pi,
114
118
  f'status_{neighborhood_description}': 1,
115
119
  f'class_{neighborhood_description}': 0,
@@ -201,9 +205,14 @@ def measure_pair_signals_at_position(pos, neighborhood_protocol, velocity_kwargs
201
205
  neighbor_dicts = group.loc[: , f'{neighborhood_description}'].values
202
206
  timeline_reference = group['FRAME'].to_numpy()
203
207
  coords_reference = group[['POSITION_X', 'POSITION_Y']].to_numpy()
208
+ if "area" in list(group.columns):
209
+ ref_area = group['area'].to_numpy()
210
+ else:
211
+ ref_area = [np.nan]*len(coords_reference)
204
212
 
205
213
  neighbor_ids = []
206
214
  neighbor_ids_per_t = []
215
+ intersection_values = []
207
216
 
208
217
  time_of_first_entrance_in_neighborhood = {}
209
218
  t_departure={}
@@ -218,10 +227,16 @@ def measure_pair_signals_at_position(pos, neighborhood_protocol, velocity_kwargs
218
227
  for neigh in neighbors_at_t:
219
228
  if neigh['id'] not in neighbor_ids:
220
229
  time_of_first_entrance_in_neighborhood[neigh['id']]=t
230
+ if 'intersection' in neigh:
231
+ intersection_values.append({"frame": t, "neigh_id": neigh['id'], "intersection": neigh['intersection']})
232
+ else:
233
+ intersection_values.append({"frame": t, "neigh_id": neigh['id'], "intersection": np.nan})
221
234
  neighbor_ids.append(neigh['id'])
222
235
  neighs_t.append(neigh['id'])
223
236
  neighbor_ids_per_t.append(neighs_t)
224
237
 
238
+ intersection_values = pd.DataFrame(intersection_values)
239
+
225
240
  #print(neighbor_ids_per_t)
226
241
  unique_neigh = list(np.unique(neighbor_ids))
227
242
  print(f'Reference cell {tid}: found {len(unique_neigh)} neighbour cells: {unique_neigh}...')
@@ -232,6 +247,11 @@ def measure_pair_signals_at_position(pos, neighborhood_protocol, velocity_kwargs
232
247
 
233
248
  coords_neighbor = group_neigh[['POSITION_X', 'POSITION_Y']].to_numpy()
234
249
  timeline_neighbor = group_neigh['FRAME'].to_numpy()
250
+ if "area" in list(group_neigh.columns):
251
+ neigh_area = group_neigh['area'].to_numpy()
252
+ else:
253
+ neigh_area = [np.nan]*len(timeline_neighbor)
254
+
235
255
 
236
256
  # # Perform timeline matching to have same start-end points and no gaps
237
257
  full_timeline, _, _ = timeline_matching(timeline_reference, timeline_neighbor)
@@ -239,6 +259,9 @@ def measure_pair_signals_at_position(pos, neighborhood_protocol, velocity_kwargs
239
259
  neighbor_vector = np.zeros((len(full_timeline), 2))
240
260
  neighbor_vector[:,:] = np.nan
241
261
 
262
+ intersection_vector = np.zeros((len(full_timeline)))
263
+ intersection_vector[:] = np.nan
264
+
242
265
  centre_of_mass_columns = [(c,c.replace('POSITION_X','POSITION_Y')) for c in list(neighbor_properties.columns) if c.endswith('centre_of_mass_POSITION_X')]
243
266
  centre_of_mass_labels = [c.replace('_centre_of_mass_POSITION_X','') for c in list(neighbor_properties.columns) if c.endswith('centre_of_mass_POSITION_X')]
244
267
 
@@ -319,7 +342,20 @@ def measure_pair_signals_at_position(pos, neighborhood_protocol, velocity_kwargs
319
342
 
320
343
  if t in timeline_reference: # meaning position exists on both sides
321
344
 
322
- idx_reference = list(timeline_reference).index(t)
345
+ idx_reference = list(timeline_reference).index(t)
346
+ inter = intersection_values.loc[(intersection_values['neigh_id']==nc)&(intersection_values["frame"]==t),"intersection"].values
347
+ if len(inter)==0:
348
+ inter = np.nan
349
+ else:
350
+ inter = inter[0]
351
+
352
+ neigh_inter_fraction = np.nan
353
+ if inter==inter and neigh_area[t]==neigh_area[t]:
354
+ neigh_inter_fraction = inter / neigh_area[t]
355
+
356
+ ref_inter_fraction = np.nan
357
+ if inter==inter and ref_area[t]==ref_area[t]:
358
+ ref_inter_fraction = inter / ref_area[t]
323
359
 
324
360
  if nc in neighbor_ids_per_t[idx_reference]:
325
361
 
@@ -328,7 +364,7 @@ def measure_pair_signals_at_position(pos, neighborhood_protocol, velocity_kwargs
328
364
  {'REFERENCE_ID': tid, 'NEIGHBOR_ID': nc,
329
365
  'reference_population': reference_population,
330
366
  'neighbor_population': neighbor_population,
331
- 'FRAME': t, 'distance': relative_distance[t],
367
+ 'FRAME': t, 'distance': relative_distance[t], 'intersection': inter, 'reference_frac_area_intersection': ref_inter_fraction, 'neighbor_frac_area_intersection': neigh_inter_fraction,
332
368
  'velocity': rel_velocity[t],
333
369
  'velocity_smooth': rel_velocity_long_timescale[t],
334
370
  'angle': angle[t] * 180 / np.pi,
@@ -349,7 +385,7 @@ def measure_pair_signals_at_position(pos, neighborhood_protocol, velocity_kwargs
349
385
  {'REFERENCE_ID': tid, 'NEIGHBOR_ID': nc,
350
386
  'reference_population': reference_population,
351
387
  'neighbor_population': neighbor_population,
352
- 'FRAME': t, 'distance': relative_distance[t],
388
+ 'FRAME': t, 'distance': relative_distance[t], 'intersection': inter, 'reference_frac_area_intersection': ref_inter_fraction, 'neighbor_frac_area_intersection': neigh_inter_fraction,
353
389
  'velocity': rel_velocity[t],
354
390
  'velocity_smooth': rel_velocity_long_timescale[t],
355
391
  'angle': angle[t] * 180 / np.pi,
@@ -642,4 +678,92 @@ def extract_neighborhood_settings(neigh_string, population='targets'):
642
678
  return neigh_protocol
643
679
 
644
680
 
681
+ def expand_pair_table(data):
682
+
683
+ """
684
+ Expands a pair table by merging reference and neighbor trajectory data from CSV files based on the specified
685
+ reference and neighbor populations, and their associated positions and frames.
686
+
687
+ Parameters
688
+ ----------
689
+ data : pandas.DataFrame
690
+ DataFrame containing the pair table, which should include the columns:
691
+ - 'reference_population': The reference population type.
692
+ - 'neighbor_population': The neighbor population type.
693
+ - 'position': The position identifier for each pair.
694
+
695
+ Returns
696
+ -------
697
+ pandas.DataFrame
698
+ Expanded DataFrame that includes merged reference and neighbor data, sorted by position, reference population,
699
+ neighbor population, and frame. Rows without values in 'REFERENCE_ID', 'NEIGHBOR_ID', 'reference_population',
700
+ or 'neighbor_population' are dropped.
701
+
702
+ Notes
703
+ -----
704
+ - For each unique pair of `reference_population` and `neighbor_population`, the function identifies corresponding
705
+ trajectories CSV files based on the position identifier.
706
+ - The function reads the trajectories CSV files, prefixes columns with 'reference_' or 'neighbor_' to avoid
707
+ conflicts, and merges data from reference and neighbor tables based on `TRACK_ID` or `ID`, and `FRAME`.
708
+ - Merges are performed in an outer join manner to retain all rows, regardless of missing values in the target files.
709
+ - The final DataFrame is sorted and cleaned to ensure only valid pairings are included.
710
+
711
+ Example
712
+ -------
713
+ >>> expanded_df = expand_pair_table(pair_table)
714
+ >>> expanded_df.head()
715
+
716
+ Raises
717
+ ------
718
+ AssertionError
719
+ If 'reference_population' or 'neighbor_population' is not found in the columns of `data`.
720
+ """
721
+
722
+ assert 'reference_population' in list(data.columns),"Please provide a valid pair table..."
723
+ assert 'neighbor_population' in list(data.columns),"Please provide a valid pair table..."
724
+
725
+ expanded_table = []
726
+
727
+ for neigh, group in data.groupby(['reference_population','neighbor_population']):
728
+
729
+ ref_pop = neigh[0]; neigh_pop = neigh[1];
730
+
731
+ for pos,pos_group in group.groupby('position'):
732
+
733
+ ref_tab = os.sep.join([pos,'output','tables',f'trajectories_{ref_pop}.csv'])
734
+ neigh_tab = os.sep.join([pos,'output','tables',f'trajectories_{neigh_pop}.csv'])
735
+
736
+ if os.path.exists(ref_tab):
737
+ df_ref = pd.read_csv(ref_tab)
738
+ if 'TRACK_ID' in df_ref.columns:
739
+ if not np.all(df_ref['TRACK_ID'].isnull()):
740
+ ref_merge_cols = ['TRACK_ID','FRAME']
741
+ else:
742
+ ref_merge_cols = ['ID','FRAME']
743
+ else:
744
+ ref_merge_cols = ['ID','FRAME']
745
+
746
+ if os.path.exists(neigh_tab):
747
+ df_neigh = pd.read_csv(neigh_tab)
748
+ if 'TRACK_ID' in df_neigh.columns:
749
+ if not np.all(df_neigh['TRACK_ID'].isnull()):
750
+ neigh_merge_cols = ['TRACK_ID','FRAME']
751
+ else:
752
+ neigh_merge_cols = ['ID','FRAME']
753
+ else:
754
+ neigh_merge_cols = ['ID','FRAME']
755
+
756
+ df_ref = df_ref.add_prefix('reference_',axis=1)
757
+ df_neigh = df_neigh.add_prefix('neighbor_',axis=1)
758
+ ref_merge_cols = ['reference_'+c for c in ref_merge_cols]
759
+ neigh_merge_cols = ['neighbor_'+c for c in neigh_merge_cols]
760
+
761
+ merge_ref = pos_group.merge(df_ref, how='outer', left_on=['REFERENCE_ID','FRAME'], right_on=ref_merge_cols, suffixes=('', '_reference'))
762
+ merge_neigh = merge_ref.merge(df_neigh, how='outer', left_on=['NEIGHBOR_ID','FRAME'], right_on=neigh_merge_cols, suffixes=('_reference', '_neighbor'))
763
+ expanded_table.append(merge_neigh)
764
+
765
+ df_expanded = pd.concat(expanded_table, axis=0, ignore_index = True)
766
+ df_expanded = df_expanded.sort_values(by=['position', 'reference_population','neighbor_population','REFERENCE_ID','NEIGHBOR_ID','FRAME'])
767
+ df_expanded = df_expanded.dropna(axis=0, subset=['REFERENCE_ID','NEIGHBOR_ID','reference_population','neighbor_population'])
645
768
 
769
+ return df_expanded
@@ -251,14 +251,14 @@ def measure_index(indices):
251
251
  iso_table = measure_isotropic_intensity(positions_at_t, img, channels=channel_names, intensity_measurement_radii=intensity_measurement_radii, column_labels=column_labels, operations=isotropic_operations, verbose=False)
252
252
 
253
253
  if do_iso_intensities and do_features:
254
- measurements_at_t = iso_table.merge(feature_table, how='outer', on='class_id',suffixes=('', '_delme'))
254
+ measurements_at_t = iso_table.merge(feature_table, how='outer', on='class_id',suffixes=('_delme', ''))
255
255
  measurements_at_t = measurements_at_t[[c for c in measurements_at_t.columns if not c.endswith('_delme')]]
256
256
  elif do_iso_intensities * (not do_features):
257
257
  measurements_at_t = iso_table
258
258
  elif do_features:
259
- measurements_at_t = positions_at_t.merge(feature_table, how='outer', on='class_id',suffixes=('', '_delme'))
259
+ measurements_at_t = positions_at_t.merge(feature_table, how='outer', on='class_id',suffixes=('_delme', ''))
260
260
  measurements_at_t = measurements_at_t[[c for c in measurements_at_t.columns if not c.endswith('_delme')]]
261
-
261
+
262
262
  center_of_mass_x_cols = [c for c in list(measurements_at_t.columns) if c.endswith('centre_of_mass_x')]
263
263
  center_of_mass_y_cols = [c for c in list(measurements_at_t.columns) if c.endswith('centre_of_mass_y')]
264
264
  for c in center_of_mass_x_cols: