celldetective 1.3.1__py3-none-any.whl → 1.3.3.post1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. celldetective/_version.py +1 -1
  2. celldetective/events.py +2 -0
  3. celldetective/gui/classifier_widget.py +51 -3
  4. celldetective/gui/control_panel.py +9 -3
  5. celldetective/gui/generic_signal_plot.py +161 -2
  6. celldetective/gui/gui_utils.py +90 -1
  7. celldetective/gui/measurement_options.py +35 -32
  8. celldetective/gui/plot_signals_ui.py +8 -3
  9. celldetective/gui/process_block.py +36 -114
  10. celldetective/gui/retrain_segmentation_model_options.py +3 -1
  11. celldetective/gui/signal_annotator.py +53 -26
  12. celldetective/gui/signal_annotator2.py +17 -30
  13. celldetective/gui/survival_ui.py +7 -3
  14. celldetective/gui/tableUI.py +300 -183
  15. celldetective/gui/thresholds_gui.py +195 -199
  16. celldetective/gui/viewers.py +267 -13
  17. celldetective/io.py +110 -10
  18. celldetective/measure.py +128 -88
  19. celldetective/models/segmentation_effectors/ricm_bf_all_last/config_input.json +79 -0
  20. celldetective/models/segmentation_effectors/ricm_bf_all_last/ricm_bf_all_last +0 -0
  21. celldetective/models/segmentation_effectors/ricm_bf_all_last/training_instructions.json +37 -0
  22. celldetective/models/segmentation_effectors/test-transfer/config_input.json +39 -0
  23. celldetective/models/segmentation_effectors/test-transfer/test-transfer +0 -0
  24. celldetective/neighborhood.py +154 -69
  25. celldetective/relative_measurements.py +128 -4
  26. celldetective/scripts/measure_cells.py +3 -3
  27. celldetective/signals.py +207 -213
  28. celldetective/utils.py +16 -0
  29. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/METADATA +11 -10
  30. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/RECORD +34 -29
  31. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/WHEEL +1 -1
  32. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/LICENSE +0 -0
  33. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/entry_points.txt +0 -0
  34. {celldetective-1.3.1.dist-info → celldetective-1.3.3.post1.dist-info}/top_level.txt +0 -0
celldetective/signals.py CHANGED
@@ -17,6 +17,7 @@ from sklearn.metrics import confusion_matrix, classification_report
17
17
  from sklearn.metrics import jaccard_score, balanced_accuracy_score, precision_score, recall_score
18
18
  from scipy.interpolate import interp1d
19
19
  from scipy.ndimage import shift
20
+ from sklearn.metrics import ConfusionMatrixDisplay
20
21
 
21
22
  from celldetective.io import locate_signal_model, get_position_pickle, get_position_table
22
23
  from celldetective.tracking import clean_trajectories, interpolate_nan_properties
@@ -149,7 +150,7 @@ def analyze_signals(trajectories, model, interpolate_na=True,
149
150
  assert os.path.exists(model_config_path),f'Model configuration could not be located in folder {model_path}... Abort.'
150
151
 
151
152
  available_signals = list(trajectories.columns)
152
- print('The available_signals are : ',available_signals)
153
+ #print('The available_signals are : ',available_signals)
153
154
 
154
155
  f = open(model_config_path)
155
156
  config = json.load(f)
@@ -167,22 +168,11 @@ def analyze_signals(trajectories, model, interpolate_na=True,
167
168
  selected_signals = []
168
169
  for s in required_signals:
169
170
  pattern_test = [s in a or s==a for a in available_signals]
170
- print(f'Pattern test for signal {s}: ', pattern_test)
171
+ #print(f'Pattern test for signal {s}: ', pattern_test)
171
172
  assert np.any(pattern_test),f'No signal matches with the requirements of the model {required_signals}. Please pass the signals manually with the argument selected_signals or add measurements. Abort.'
172
- valid_columns = np.array(available_signals)[np.array(pattern_test)]
173
- if len(valid_columns)==1:
174
- selected_signals.append(valid_columns[0])
175
- else:
176
- #print(test_number_of_nan(trajectories, valid_columns))
177
- print(f'Found several candidate signals: {valid_columns}')
178
- for vc in natsorted(valid_columns):
179
- if 'circle' in vc:
180
- selected_signals.append(vc)
181
- break
182
- else:
183
- selected_signals.append(valid_columns[0])
184
- # do something more complicated in case of one to many columns
185
- #pass
173
+ valid_columns = natsorted(np.array(available_signals)[np.array(pattern_test)])
174
+ print(f"Selecting the first time series among: {valid_columns} for input requirement {s}...")
175
+ selected_signals.append(valid_columns[0])
186
176
  else:
187
177
  assert len(selected_signals)==len(required_signals),f'Mismatch between the number of required signals {required_signals} and the provided signals {selected_signals}... Abort.'
188
178
 
@@ -202,13 +192,7 @@ def analyze_signals(trajectories, model, interpolate_na=True,
202
192
  signals[i,frames,j] = signal
203
193
  signals[i,max(frames):,j] = signal[-1]
204
194
 
205
- # for i in range(5):
206
- # print('pre model')
207
- # plt.plot(signals[i,:,0])
208
- # plt.show()
209
-
210
195
  model = SignalDetectionModel(pretrained=complete_path)
211
- print('signal shape: ', signals.shape)
212
196
 
213
197
  classes = model.predict_class(signals)
214
198
  times_recast = model.predict_time_of_interest(signals)
@@ -378,194 +362,193 @@ def analyze_pair_signals_at_position(pos, model, use_gpu=True):
378
362
  return None
379
363
 
380
364
 
381
- def analyze_signals(trajectories, model, interpolate_na=True,
382
- selected_signals=None,
383
- model_path=None,
384
- column_labels={'track': "TRACK_ID", 'time': 'FRAME', 'x': 'POSITION_X', 'y': 'POSITION_Y'},
385
- plot_outcome=False, output_dir=None):
386
- """
387
- Analyzes signals from trajectory data using a specified signal detection model and configuration.
388
-
389
- This function preprocesses trajectory data, selects specified signals, and applies a pretrained signal detection
390
- model to predict classes and times of interest for each trajectory. It supports custom column labeling, interpolation
391
- of missing values, and plotting of analysis outcomes.
392
-
393
- Parameters
394
- ----------
395
- trajectories : pandas.DataFrame
396
- DataFrame containing trajectory data with columns for track ID, frame, position, and signals.
397
- model : str
398
- The name of the signal detection model to be used for analysis.
399
- interpolate_na : bool, optional
400
- Whether to interpolate missing values in the trajectories (default is True).
401
- selected_signals : list of str, optional
402
- A list of column names from `trajectories` representing the signals to be analyzed. If None, signals will
403
- be automatically selected based on the model configuration (default is None).
404
- column_labels : dict, optional
405
- A dictionary mapping the default column names ('track', 'time', 'x', 'y') to the corresponding column names
406
- in `trajectories` (default is {'track': "TRACK_ID", 'time': 'FRAME', 'x': 'POSITION_X', 'y': 'POSITION_Y'}).
407
- plot_outcome : bool, optional
408
- If True, generates and saves a plot of the signal analysis outcome (default is False).
409
- output_dir : str, optional
410
- The directory where the outcome plot will be saved. Required if `plot_outcome` is True (default is None).
411
-
412
- Returns
413
- -------
414
- pandas.DataFrame
415
- The input `trajectories` DataFrame with additional columns for predicted classes, times of interest, and
416
- corresponding colors based on status and class.
417
-
418
- Raises
419
- ------
420
- AssertionError
421
- If the model or its configuration file cannot be located.
422
-
423
- Notes
424
- -----
425
- - The function relies on an external model configuration file (`config_input.json`) located in the model's directory.
426
- - Signal selection and preprocessing are based on the requirements specified in the model's configuration.
427
-
428
- """
429
-
430
- model_path = locate_signal_model(model, path=model_path)
431
- complete_path = model_path # +model
432
- complete_path = rf"{complete_path}"
433
- model_config_path = os.sep.join([complete_path, 'config_input.json'])
434
- model_config_path = rf"{model_config_path}"
435
- assert os.path.exists(complete_path), f'Model {model} could not be located in folder {model_path}... Abort.'
436
- assert os.path.exists(
437
- model_config_path), f'Model configuration could not be located in folder {model_path}... Abort.'
438
-
439
- available_signals = list(trajectories.columns)
440
- print('The available_signals are : ', available_signals)
441
-
442
- f = open(model_config_path)
443
- config = json.load(f)
444
- required_signals = config["channels"]
445
-
446
- try:
447
- label = config['label']
448
- if label == '':
449
- label = None
450
- except:
451
- label = None
452
-
453
- if selected_signals is None:
454
- selected_signals = []
455
- for s in required_signals:
456
- pattern_test = [s in a or s == a for a in available_signals]
457
- print(f'Pattern test for signal {s}: ', pattern_test)
458
- assert np.any(
459
- pattern_test), f'No signal matches with the requirements of the model {required_signals}. Please pass the signals manually with the argument selected_signals or add measurements. Abort.'
460
- valid_columns = np.array(available_signals)[np.array(pattern_test)]
461
- if len(valid_columns) == 1:
462
- selected_signals.append(valid_columns[0])
463
- else:
464
- # print(test_number_of_nan(trajectories, valid_columns))
465
- print(f'Found several candidate signals: {valid_columns}')
466
- for vc in natsorted(valid_columns):
467
- if 'circle' in vc:
468
- selected_signals.append(vc)
469
- break
470
- else:
471
- selected_signals.append(valid_columns[0])
472
- # do something more complicated in case of one to many columns
473
- # pass
474
- else:
475
- assert len(selected_signals) == len(
476
- required_signals), f'Mismatch between the number of required signals {required_signals} and the provided signals {selected_signals}... Abort.'
477
-
478
- print(f'The following channels will be passed to the model: {selected_signals}')
479
- trajectories_clean = clean_trajectories(trajectories, interpolate_na=interpolate_na,
480
- interpolate_position_gaps=interpolate_na, column_labels=column_labels)
481
-
482
- max_signal_size = int(trajectories_clean[column_labels['time']].max()) + 2
483
- tracks = trajectories_clean[column_labels['track']].unique()
484
- signals = np.zeros((len(tracks), max_signal_size, len(selected_signals)))
485
-
486
- for i, (tid, group) in enumerate(trajectories_clean.groupby(column_labels['track'])):
487
- frames = group[column_labels['time']].to_numpy().astype(int)
488
- for j, col in enumerate(selected_signals):
489
- signal = group[col].to_numpy()
490
- signals[i, frames, j] = signal
491
- signals[i, max(frames):, j] = signal[-1]
492
-
493
- # for i in range(5):
494
- # print('pre model')
495
- # plt.plot(signals[i,:,0])
496
- # plt.show()
497
-
498
- model = SignalDetectionModel(pretrained=complete_path)
499
- print('signal shape: ', signals.shape)
500
-
501
- classes = model.predict_class(signals)
502
- times_recast = model.predict_time_of_interest(signals)
503
-
504
- if label is None:
505
- class_col = 'class'
506
- time_col = 't0'
507
- status_col = 'status'
508
- else:
509
- class_col = 'class_' + label
510
- time_col = 't_' + label
511
- status_col = 'status_' + label
512
-
513
- for i, (tid, group) in enumerate(trajectories.groupby(column_labels['track'])):
514
- indices = group.index
515
- trajectories.loc[indices, class_col] = classes[i]
516
- trajectories.loc[indices, time_col] = times_recast[i]
517
- print('Done.')
518
-
519
- for tid, group in trajectories.groupby(column_labels['track']):
520
-
521
- indices = group.index
522
- t0 = group[time_col].to_numpy()[0]
523
- cclass = group[class_col].to_numpy()[0]
524
- timeline = group[column_labels['time']].to_numpy()
525
- status = np.zeros_like(timeline)
526
- if t0 > 0:
527
- status[timeline >= t0] = 1.
528
- if cclass == 2:
529
- status[:] = 2
530
- if cclass > 2:
531
- status[:] = 42
532
- status_color = [color_from_status(s) for s in status]
533
- class_color = [color_from_class(cclass) for i in range(len(status))]
534
-
535
- trajectories.loc[indices, status_col] = status
536
- trajectories.loc[indices, 'status_color'] = status_color
537
- trajectories.loc[indices, 'class_color'] = class_color
538
-
539
- if plot_outcome:
540
- fig, ax = plt.subplots(1, len(selected_signals), figsize=(10, 5))
541
- for i, s in enumerate(selected_signals):
542
- for k, (tid, group) in enumerate(trajectories.groupby(column_labels['track'])):
543
- cclass = group[class_col].to_numpy()[0]
544
- t0 = group[time_col].to_numpy()[0]
545
- timeline = group[column_labels['time']].to_numpy()
546
- if cclass == 0:
547
- if len(selected_signals) > 1:
548
- ax[i].plot(timeline - t0, group[s].to_numpy(), c='tab:blue', alpha=0.1)
549
- else:
550
- ax.plot(timeline - t0, group[s].to_numpy(), c='tab:blue', alpha=0.1)
551
- if len(selected_signals) > 1:
552
- for a, s in zip(ax, selected_signals):
553
- a.set_title(s)
554
- a.set_xlabel(r'time - t$_0$ [frame]')
555
- a.spines['top'].set_visible(False)
556
- a.spines['right'].set_visible(False)
557
- else:
558
- ax.set_title(s)
559
- ax.set_xlabel(r'time - t$_0$ [frame]')
560
- ax.spines['top'].set_visible(False)
561
- ax.spines['right'].set_visible(False)
562
- plt.tight_layout()
563
- if output_dir is not None:
564
- plt.savefig(output_dir + 'signal_collapse.png', bbox_inches='tight', dpi=300)
565
- plt.pause(3)
566
- plt.close()
567
-
568
- return trajectories
365
+ # def analyze_signals(trajectories, model, interpolate_na=True,
366
+ # selected_signals=None,
367
+ # model_path=None,
368
+ # column_labels={'track': "TRACK_ID", 'time': 'FRAME', 'x': 'POSITION_X', 'y': 'POSITION_Y'},
369
+ # plot_outcome=False, output_dir=None):
370
+ # """
371
+ # Analyzes signals from trajectory data using a specified signal detection model and configuration.
372
+
373
+ # This function preprocesses trajectory data, selects specified signals, and applies a pretrained signal detection
374
+ # model to predict classes and times of interest for each trajectory. It supports custom column labeling, interpolation
375
+ # of missing values, and plotting of analysis outcomes.
376
+
377
+ # Parameters
378
+ # ----------
379
+ # trajectories : pandas.DataFrame
380
+ # DataFrame containing trajectory data with columns for track ID, frame, position, and signals.
381
+ # model : str
382
+ # The name of the signal detection model to be used for analysis.
383
+ # interpolate_na : bool, optional
384
+ # Whether to interpolate missing values in the trajectories (default is True).
385
+ # selected_signals : list of str, optional
386
+ # A list of column names from `trajectories` representing the signals to be analyzed. If None, signals will
387
+ # be automatically selected based on the model configuration (default is None).
388
+ # column_labels : dict, optional
389
+ # A dictionary mapping the default column names ('track', 'time', 'x', 'y') to the corresponding column names
390
+ # in `trajectories` (default is {'track': "TRACK_ID", 'time': 'FRAME', 'x': 'POSITION_X', 'y': 'POSITION_Y'}).
391
+ # plot_outcome : bool, optional
392
+ # If True, generates and saves a plot of the signal analysis outcome (default is False).
393
+ # output_dir : str, optional
394
+ # The directory where the outcome plot will be saved. Required if `plot_outcome` is True (default is None).
395
+
396
+ # Returns
397
+ # -------
398
+ # pandas.DataFrame
399
+ # The input `trajectories` DataFrame with additional columns for predicted classes, times of interest, and
400
+ # corresponding colors based on status and class.
401
+
402
+ # Raises
403
+ # ------
404
+ # AssertionError
405
+ # If the model or its configuration file cannot be located.
406
+
407
+ # Notes
408
+ # -----
409
+ # - The function relies on an external model configuration file (`config_input.json`) located in the model's directory.
410
+ # - Signal selection and preprocessing are based on the requirements specified in the model's configuration.
411
+
412
+ # """
413
+
414
+ # model_path = locate_signal_model(model, path=model_path)
415
+ # complete_path = model_path # +model
416
+ # complete_path = rf"{complete_path}"
417
+ # model_config_path = os.sep.join([complete_path, 'config_input.json'])
418
+ # model_config_path = rf"{model_config_path}"
419
+ # assert os.path.exists(complete_path), f'Model {model} could not be located in folder {model_path}... Abort.'
420
+ # assert os.path.exists(
421
+ # model_config_path), f'Model configuration could not be located in folder {model_path}... Abort.'
422
+
423
+ # available_signals = list(trajectories.columns)
424
+
425
+ # f = open(model_config_path)
426
+ # config = json.load(f)
427
+ # required_signals = config["channels"]
428
+
429
+ # try:
430
+ # label = config['label']
431
+ # if label == '':
432
+ # label = None
433
+ # except:
434
+ # label = None
435
+
436
+ # if selected_signals is None:
437
+ # selected_signals = []
438
+ # for s in required_signals:
439
+ # pattern_test = [s in a or s == a for a in available_signals]
440
+ # #print(f'Pattern test for signal {s}: ', pattern_test)
441
+ # assert np.any(
442
+ # pattern_test), f'No signal matches with the requirements of the model {required_signals}. Please pass the signals manually with the argument selected_signals or add measurements. Abort.'
443
+ # valid_columns = np.array(available_signals)[np.array(pattern_test)]
444
+ # if len(valid_columns) == 1:
445
+ # selected_signals.append(valid_columns[0])
446
+ # else:
447
+ # # print(test_number_of_nan(trajectories, valid_columns))
448
+ # print(f'Found several candidate signals: {valid_columns}')
449
+ # for vc in natsorted(valid_columns):
450
+ # if 'circle' in vc:
451
+ # selected_signals.append(vc)
452
+ # break
453
+ # else:
454
+ # selected_signals.append(valid_columns[0])
455
+ # # do something more complicated in case of one to many columns
456
+ # # pass
457
+ # else:
458
+ # assert len(selected_signals) == len(
459
+ # required_signals), f'Mismatch between the number of required signals {required_signals} and the provided signals {selected_signals}... Abort.'
460
+
461
+ # print(f'The following channels will be passed to the model: {selected_signals}')
462
+ # trajectories_clean = clean_trajectories(trajectories, interpolate_na=interpolate_na,
463
+ # interpolate_position_gaps=interpolate_na, column_labels=column_labels)
464
+
465
+ # max_signal_size = int(trajectories_clean[column_labels['time']].max()) + 2
466
+ # tracks = trajectories_clean[column_labels['track']].unique()
467
+ # signals = np.zeros((len(tracks), max_signal_size, len(selected_signals)))
468
+
469
+ # for i, (tid, group) in enumerate(trajectories_clean.groupby(column_labels['track'])):
470
+ # frames = group[column_labels['time']].to_numpy().astype(int)
471
+ # for j, col in enumerate(selected_signals):
472
+ # signal = group[col].to_numpy()
473
+ # signals[i, frames, j] = signal
474
+ # signals[i, max(frames):, j] = signal[-1]
475
+
476
+ # # for i in range(5):
477
+ # # print('pre model')
478
+ # # plt.plot(signals[i,:,0])
479
+ # # plt.show()
480
+
481
+ # model = SignalDetectionModel(pretrained=complete_path)
482
+ # print('signal shape: ', signals.shape)
483
+
484
+ # classes = model.predict_class(signals)
485
+ # times_recast = model.predict_time_of_interest(signals)
486
+
487
+ # if label is None:
488
+ # class_col = 'class'
489
+ # time_col = 't0'
490
+ # status_col = 'status'
491
+ # else:
492
+ # class_col = 'class_' + label
493
+ # time_col = 't_' + label
494
+ # status_col = 'status_' + label
495
+
496
+ # for i, (tid, group) in enumerate(trajectories.groupby(column_labels['track'])):
497
+ # indices = group.index
498
+ # trajectories.loc[indices, class_col] = classes[i]
499
+ # trajectories.loc[indices, time_col] = times_recast[i]
500
+ # print('Done.')
501
+
502
+ # for tid, group in trajectories.groupby(column_labels['track']):
503
+
504
+ # indices = group.index
505
+ # t0 = group[time_col].to_numpy()[0]
506
+ # cclass = group[class_col].to_numpy()[0]
507
+ # timeline = group[column_labels['time']].to_numpy()
508
+ # status = np.zeros_like(timeline)
509
+ # if t0 > 0:
510
+ # status[timeline >= t0] = 1.
511
+ # if cclass == 2:
512
+ # status[:] = 2
513
+ # if cclass > 2:
514
+ # status[:] = 42
515
+ # status_color = [color_from_status(s) for s in status]
516
+ # class_color = [color_from_class(cclass) for i in range(len(status))]
517
+
518
+ # trajectories.loc[indices, status_col] = status
519
+ # trajectories.loc[indices, 'status_color'] = status_color
520
+ # trajectories.loc[indices, 'class_color'] = class_color
521
+
522
+ # if plot_outcome:
523
+ # fig, ax = plt.subplots(1, len(selected_signals), figsize=(10, 5))
524
+ # for i, s in enumerate(selected_signals):
525
+ # for k, (tid, group) in enumerate(trajectories.groupby(column_labels['track'])):
526
+ # cclass = group[class_col].to_numpy()[0]
527
+ # t0 = group[time_col].to_numpy()[0]
528
+ # timeline = group[column_labels['time']].to_numpy()
529
+ # if cclass == 0:
530
+ # if len(selected_signals) > 1:
531
+ # ax[i].plot(timeline - t0, group[s].to_numpy(), c='tab:blue', alpha=0.1)
532
+ # else:
533
+ # ax.plot(timeline - t0, group[s].to_numpy(), c='tab:blue', alpha=0.1)
534
+ # if len(selected_signals) > 1:
535
+ # for a, s in zip(ax, selected_signals):
536
+ # a.set_title(s)
537
+ # a.set_xlabel(r'time - t$_0$ [frame]')
538
+ # a.spines['top'].set_visible(False)
539
+ # a.spines['right'].set_visible(False)
540
+ # else:
541
+ # ax.set_title(s)
542
+ # ax.set_xlabel(r'time - t$_0$ [frame]')
543
+ # ax.spines['top'].set_visible(False)
544
+ # ax.spines['right'].set_visible(False)
545
+ # plt.tight_layout()
546
+ # if output_dir is not None:
547
+ # plt.savefig(output_dir + 'signal_collapse.png', bbox_inches='tight', dpi=300)
548
+ # plt.pause(3)
549
+ # plt.close()
550
+
551
+ # return trajectories
569
552
 
570
553
 
571
554
  def analyze_pair_signals(trajectories_pairs,trajectories_reference,trajectories_neighbors, model, interpolate_na=True, selected_signals=None,
@@ -1405,7 +1388,10 @@ class SignalDetectionModel(object):
1405
1388
 
1406
1389
  if self.show_plots:
1407
1390
  try:
1408
- plot_confusion_matrix(results, ["dead","alive","miscellaneous"], output_dir=self.model_folder+os.sep, title=title)
1391
+ ConfusionMatrixDisplay.from_predictions(ground_truth, predictions, cmap="Blues", normalize="pred", display_labels=["event","no event","left censored"])
1392
+ plt.savefig(os.sep.join([self.model_folder,"test_confusion_matrix.png"]),bbox_inches='tight',dpi=300)
1393
+ plt.pause(3)
1394
+ plt.close()
1409
1395
  except Exception as e:
1410
1396
  print(e)
1411
1397
  pass
@@ -1434,8 +1420,12 @@ class SignalDetectionModel(object):
1434
1420
 
1435
1421
  if self.show_plots:
1436
1422
  try:
1437
- plot_confusion_matrix(results, ["dead","alive","miscellaneous"], output_dir=self.model_folder+os.sep, title=title)
1438
- except:
1423
+ ConfusionMatrixDisplay.from_predictions(ground_truth, predictions, cmap="Blues", normalize="pred", display_labels=["event","no event","left censored"])
1424
+ plt.savefig(os.sep.join([self.model_folder,"validation_confusion_matrix.png"]),bbox_inches='tight',dpi=300)
1425
+ plt.pause(3)
1426
+ plt.close()
1427
+ except Exception as e:
1428
+ print(e)
1439
1429
  pass
1440
1430
  print("Validation set: ",classification_report(ground_truth,predictions))
1441
1431
 
@@ -3077,6 +3067,10 @@ def mean_signal(df, signal_name, class_col, time_col=None, class_value=[0], retu
3077
3067
  assert signal_name in list(df.columns),"The signal you want to plot is not one of the measured features."
3078
3068
  if isinstance(class_value,int):
3079
3069
  class_value = [class_value]
3070
+ elif class_value is None or class_col is None:
3071
+ class_col = 'class_temp'
3072
+ df['class_temp'] = 1
3073
+ class_value = [1]
3080
3074
 
3081
3075
  if forced_max_duration is None:
3082
3076
  max_duration = int(df['FRAME'].max())+1 #ceil(np.amax(df.groupby(['position','TRACK_ID']).size().values))
celldetective/utils.py CHANGED
@@ -1,3 +1,4 @@
1
+
1
2
  import numpy as np
2
3
  import pandas as pd
3
4
  import matplotlib.pyplot as plt
@@ -29,6 +30,21 @@ from skimage.morphology import disk
29
30
  from scipy.stats import ks_2samp
30
31
  from cliffs_delta import cliffs_delta
31
32
 
33
+ def safe_log(array):
34
+
35
+ if isinstance(array,int) or isinstance(array,float):
36
+ if value<=0.:
37
+ return np.nan
38
+ else:
39
+ return np.log10(value)
40
+ else:
41
+ if isinstance(array, list):
42
+ array = np.array(array)
43
+ output_array = np.zeros_like(array).astype(float)
44
+ output_array[:] = np.nan
45
+ output_array[array==array] = np.log10(array[array==array])
46
+ return output_array
47
+
32
48
  def contour_of_instance_segmentation(label, distance):
33
49
 
34
50
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: celldetective
3
- Version: 1.3.1
3
+ Version: 1.3.3.post1
4
4
  Summary: description
5
5
  Home-page: http://github.com/remyeltorro/celldetective
6
6
  Author: Rémy Torro
@@ -10,8 +10,8 @@ Description-Content-Type: text/markdown
10
10
  License-File: LICENSE
11
11
  Requires-Dist: wheel
12
12
  Requires-Dist: nbsphinx
13
- Requires-Dist: nbsphinx-link
14
- Requires-Dist: sphinx-rtd-theme
13
+ Requires-Dist: nbsphinx_link
14
+ Requires-Dist: sphinx_rtd_theme
15
15
  Requires-Dist: sphinx
16
16
  Requires-Dist: jinja2
17
17
  Requires-Dist: ipykernel
@@ -34,12 +34,13 @@ Requires-Dist: liblapack
34
34
  Requires-Dist: gputools
35
35
  Requires-Dist: lmfit
36
36
  Requires-Dist: superqt[cmap]
37
- Requires-Dist: matplotlib-scalebar
37
+ Requires-Dist: setuptools
38
+ Requires-Dist: matplotlib_scalebar
38
39
  Requires-Dist: numpy==1.26.4
39
40
  Requires-Dist: pytest
40
41
  Requires-Dist: pytest-qt
41
42
  Requires-Dist: h5py
42
- Requires-Dist: cliffs-delta
43
+ Requires-Dist: cliffs_delta
43
44
 
44
45
  # Celldetective
45
46
 
@@ -171,20 +172,20 @@ For more information about how to get started, please check the [documentation](
171
172
  # How to cite?
172
173
 
173
174
  If you use this software in your research, please cite the
174
- [Celldetective](https://www.biorxiv.org/content/10.1101/2024.03.15.585250v1)
175
+ [Celldetective](https://www.biorxiv.org/content/10.1101/2024.03.15.585250v3)
175
176
  paper (currently preprint):
176
177
 
177
178
  ``` raw
178
179
  @article {Torro2024.03.15.585250,
179
- author = {R{\'e}my Torro and Beatriz D{\`\i}az-Bello and Dalia El Arawi and Lorna Ammer and Patrick Chames and Kheya Sengupta and Laurent Limozin},
180
+ author = {Torro, R{\'e}my and D{\'\i}az-Bello, Beatriz and Arawi, Dalia El and Dervanova, Ksenija and Ammer, Lorna and Dupuy, Florian and Chames, Patrick and Sengupta, Kheya and Limozin, Laurent},
180
181
  title = {Celldetective: an AI-enhanced image analysis tool for unraveling dynamic cell interactions},
181
182
  elocation-id = {2024.03.15.585250},
182
183
  year = {2024},
183
184
  doi = {10.1101/2024.03.15.585250},
184
185
  publisher = {Cold Spring Harbor Laboratory},
185
- abstract = {A current key challenge in bioimaging is the analysis of multimodal and multidimensional data reporting dynamic interactions between diverse cell populations. We developed Celldetective, a software that integrates AI-based segmentation and tracking algorithms and automated signal analysis into a user-friendly graphical interface. It offers complete interactive visualization, annotation, and training capabilities. We demonstrate it by analyzing original experimental data of spreading immune effector cells as well as antibody-dependent cell cytotoxicity events using multimodal fluorescence microscopy.Competing Interest StatementThe authors have declared no competing interest.},
186
- URL = {https://www.biorxiv.org/content/early/2024/03/17/2024.03.15.585250},
187
- eprint = {https://www.biorxiv.org/content/early/2024/03/17/2024.03.15.585250.full.pdf},
186
+ abstract = {A current challenge in bioimaging for immunology and immunotherapy research lies in analyzing multimodal and multidimensional data that capture dynamic interactions between diverse cell populations. Here, we introduce Celldetective, an open-source Python-based software designed for high-performance, end-to-end analysis of image-based in vitro immune and immunotherapy assays. Purpose-built for multicondition, 2D multichannel time-lapse microscopy of mixed cell populations, Celldetective is optimized for the needs of immunology assays. The software seamlessly integrates AI-based segmentation, Bayesian tracking, and automated single-cell event detection, all within an intuitive graphical interface that supports interactive visualization, annotation, and training capabilities. We demonstrate its utility with original data on immune effector cell interactions with an activating surface, mediated by bispecific antibodies, and further showcase its potential for analyzing extensive sets of pairwise interactions in antibody-dependent cell cytotoxicity events.Competing Interest StatementThe authors have declared no competing interest.},
187
+ URL = {https://www.biorxiv.org/content/early/2024/11/13/2024.03.15.585250},
188
+ eprint = {https://www.biorxiv.org/content/early/2024/11/13/2024.03.15.585250.full.pdf},
188
189
  journal = {bioRxiv}
189
190
  }
190
191
  ```