FlowCyPy 0.7.3__tar.gz → 0.8.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (165) hide show
  1. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/_version.py +2 -2
  2. flowcypy-0.8.0/FlowCyPy/acquisition.py +362 -0
  3. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/classifier.py +4 -4
  4. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/cytometer.py +26 -16
  5. flowcypy-0.8.0/FlowCyPy/dataframe_subclass.py +319 -0
  6. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/detector.py +2 -32
  7. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/flow_cell.py +8 -4
  8. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/helper.py +83 -2
  9. flowcypy-0.8.0/FlowCyPy/triggered_acquisition.py +90 -0
  10. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/utils.py +11 -0
  11. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy.egg-info/PKG-INFO +1 -1
  12. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy.egg-info/SOURCES.txt +2 -0
  13. {flowcypy-0.7.3 → flowcypy-0.8.0}/PKG-INFO +1 -1
  14. flowcypy-0.8.0/developments/scripts/dev_study_on_size.py +59 -0
  15. flowcypy-0.8.0/developments/scripts/temp.py +112 -0
  16. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/extras/flow_cytometer_signal.py +12 -8
  17. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/extras/signal_acquisition.py +1 -1
  18. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/tutorials/limit_of_detection.py +8 -6
  19. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/tutorials/workflow.py +20 -16
  20. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/test_classifiers.py +7 -12
  21. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/test_coupling_mechanism.py +2 -3
  22. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/test_flow_cytometer.py +11 -10
  23. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/test_population.py +1 -1
  24. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/test_scatterer_distribution.py +2 -2
  25. flowcypy-0.7.3/FlowCyPy/acquisition.py +0 -766
  26. flowcypy-0.7.3/developments/scripts/dev_study_on_size.py +0 -161
  27. flowcypy-0.7.3/developments/scripts/temp.py +0 -207
  28. {flowcypy-0.7.3 → flowcypy-0.8.0}/.flake8 +0 -0
  29. {flowcypy-0.7.3 → flowcypy-0.8.0}/.github/dependabot.yml +0 -0
  30. {flowcypy-0.7.3 → flowcypy-0.8.0}/.github/workflows/deploy_PyPi.yml +0 -0
  31. {flowcypy-0.7.3 → flowcypy-0.8.0}/.github/workflows/deploy_anaconda.yml +0 -0
  32. {flowcypy-0.7.3 → flowcypy-0.8.0}/.github/workflows/deploy_coverage.yml +0 -0
  33. {flowcypy-0.7.3 → flowcypy-0.8.0}/.github/workflows/deploy_documentation.yml +0 -0
  34. {flowcypy-0.7.3 → flowcypy-0.8.0}/.gitignore +0 -0
  35. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/__init__.py +0 -0
  36. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/coupling_mechanism/__init__.py +0 -0
  37. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/coupling_mechanism/empirical.py +0 -0
  38. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/coupling_mechanism/mie.py +0 -0
  39. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/coupling_mechanism/rayleigh.py +0 -0
  40. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/coupling_mechanism/uniform.py +0 -0
  41. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/directories.py +0 -0
  42. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/distribution/__init__.py +0 -0
  43. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/distribution/base_class.py +0 -0
  44. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/distribution/delta.py +0 -0
  45. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/distribution/lognormal.py +0 -0
  46. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/distribution/normal.py +0 -0
  47. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/distribution/particle_size_distribution.py +0 -0
  48. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/distribution/uniform.py +0 -0
  49. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/distribution/weibull.py +0 -0
  50. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/noises.py +0 -0
  51. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/particle_count.py +0 -0
  52. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/peak_locator/__init__.py +0 -0
  53. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/peak_locator/base_class.py +0 -0
  54. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/peak_locator/basic.py +0 -0
  55. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/peak_locator/derivative.py +0 -0
  56. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/peak_locator/moving_average.py +0 -0
  57. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/physical_constant.py +0 -0
  58. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/population.py +0 -0
  59. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/populations_instances.py +0 -0
  60. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/scatterer_collection.py +0 -0
  61. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/signal_digitizer.py +0 -0
  62. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/source.py +0 -0
  63. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy/units.py +0 -0
  64. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy.egg-info/dependency_links.txt +0 -0
  65. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy.egg-info/requires.txt +0 -0
  66. {flowcypy-0.7.3 → flowcypy-0.8.0}/FlowCyPy.egg-info/top_level.txt +0 -0
  67. {flowcypy-0.7.3 → flowcypy-0.8.0}/LICENSE +0 -0
  68. {flowcypy-0.7.3 → flowcypy-0.8.0}/README.rst +0 -0
  69. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/Deep_peak_square.ipynb +0 -0
  70. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/Physics-informed_AI.ipynb +0 -0
  71. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/ROI_analysis-Copy1.ipynb +0 -0
  72. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/ROI_analysis.ipynb +0 -0
  73. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/Untitled.ipynb +0 -0
  74. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/Untitled1.ipynb +0 -0
  75. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/Untitled2.ipynb +0 -0
  76. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/ai_dev2.ipynb +0 -0
  77. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/best_model.h5 +0 -0
  78. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/best_model.keras +0 -0
  79. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/concentration_validation.py +0 -0
  80. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/doc/canto_spec.md +0 -0
  81. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/doc/internship.pdf +0 -0
  82. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/get_started.md +0 -0
  83. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/grad_cam_output.png +0 -0
  84. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/image.png +0 -0
  85. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/model.png +0 -0
  86. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/model_example.png +0 -0
  87. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/output_file.prof +0 -0
  88. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/AI_peak_detection.py +0 -0
  89. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/concentration_comparison.py +0 -0
  90. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/create_images.py +0 -0
  91. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/data_analysis.py +0 -0
  92. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/dev_beads_analysis.py +0 -0
  93. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/dev_canto.py +0 -0
  94. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/dev_classifier.py +0 -0
  95. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/dev_shot_noise_check.py +0 -0
  96. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/dev_stats_0.py +0 -0
  97. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/dev_stats_1.py +0 -0
  98. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/dev_stats_2.py +0 -0
  99. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/dev_study_on_ri.py +0 -0
  100. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/mat2csv.py +0 -0
  101. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/scripts/profiler.py +0 -0
  102. {flowcypy-0.7.3 → flowcypy-0.8.0}/developments/test.pdf +0 -0
  103. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/Makefile +0 -0
  104. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/extras/README.rst +0 -0
  105. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/extras/distributions.py +0 -0
  106. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/extras/scatterer_distribution.py +0 -0
  107. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/noise_sources/README.rst +0 -0
  108. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/noise_sources/dark_current.py +0 -0
  109. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/noise_sources/shot_noise.py +0 -0
  110. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/noise_sources/thermal.py +0 -0
  111. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/examples/tutorials/README.rst +0 -0
  112. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/distributions/Delta.png +0 -0
  113. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/distributions/LogNormal.png +0 -0
  114. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/distributions/Normal.png +0 -0
  115. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/distributions/RosinRammler.png +0 -0
  116. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/distributions/Uniform.png +0 -0
  117. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/distributions/Weibull.png +0 -0
  118. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/example_0.png +0 -0
  119. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/example_1.png +0 -0
  120. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/example_2.png +0 -0
  121. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/example_3.png +0 -0
  122. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/flow_cytometer.png +0 -0
  123. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/images/logo.png +0 -0
  124. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/make.bat +0 -0
  125. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/_static/default.css +0 -0
  126. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/_static/logo.png +0 -0
  127. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/_static/thumbnail.png +0 -0
  128. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/code/base.rst +0 -0
  129. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/code/detector.rst +0 -0
  130. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/code/distributions.rst +0 -0
  131. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/code/flow_cell.rst +0 -0
  132. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/code/flow_cytometer.rst +0 -0
  133. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/code/peak_locator.rst +0 -0
  134. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/code/scatterer.rst +0 -0
  135. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/code/source.rst +0 -0
  136. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/code.rst +0 -0
  137. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/conf.py +0 -0
  138. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/examples.rst +0 -0
  139. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/index.rst +0 -0
  140. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal/core_components.rst +0 -0
  141. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal/getting_started.rst +0 -0
  142. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal/objectives/main.rst +0 -0
  143. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal/objectives/pre.rst +0 -0
  144. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal/objectives/stretch.rst +0 -0
  145. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal/prerequisites/index.rst +0 -0
  146. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal/prerequisites/mathematics.rst +0 -0
  147. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal/prerequisites/optics.rst +0 -0
  148. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal/prerequisites/programming.rst +0 -0
  149. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal/ressources.rst +0 -0
  150. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal/tasks.rst +0 -0
  151. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/internal.rst +0 -0
  152. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/references.rst +0 -0
  153. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/sg_execution_times.rst +0 -0
  154. {flowcypy-0.7.3 → flowcypy-0.8.0}/docs/source/theory.rst +0 -0
  155. {flowcypy-0.7.3 → flowcypy-0.8.0}/meta.yaml +0 -0
  156. {flowcypy-0.7.3 → flowcypy-0.8.0}/notebook.ipynb +0 -0
  157. {flowcypy-0.7.3 → flowcypy-0.8.0}/pyproject.toml +0 -0
  158. {flowcypy-0.7.3 → flowcypy-0.8.0}/setup.cfg +0 -0
  159. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/__init__.py +0 -0
  160. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/test_detector_noise.py +0 -0
  161. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/test_distribution.py +0 -0
  162. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/test_noises.py +0 -0
  163. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/test_peak_algorithm.py +0 -0
  164. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/test_peak_analyzer.py +0 -0
  165. {flowcypy-0.7.3 → flowcypy-0.8.0}/tests/test_source.py +0 -0
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.7.3'
16
- __version_tuple__ = version_tuple = (0, 7, 3)
15
+ __version__ = version = '0.8.0'
16
+ __version_tuple__ = version_tuple = (0, 8, 0)
@@ -0,0 +1,362 @@
1
+ import logging
2
+ import warnings
3
+ from typing import Optional, Union, List
4
+ from MPSPlots.styles import mps
5
+ import pandas as pd
6
+ import numpy as np
7
+ from FlowCyPy import units
8
+ import matplotlib.pyplot as plt
9
+ import seaborn as sns
10
+ from tabulate import tabulate
11
+ from FlowCyPy import helper
12
+ from FlowCyPy.triggered_acquisition import TriggeredAcquisitions
13
+ from FlowCyPy.dataframe_subclass import TriggeredAcquisitionDataFrame
14
+
15
+ class DataAccessor:
16
+ def __init__(self, outer):
17
+ self._outer = outer
18
+
19
+
20
+ class Acquisition:
21
+ """
22
+ Represents a flow cytometry experiment, including runtime, dataframes, logging, and visualization.
23
+
24
+ Attributes
25
+ ----------
26
+ run_time : units.second
27
+ Total runtime of the experiment.
28
+ scatterer_dataframe : pd.DataFrame
29
+ DataFrame containing scatterer data, indexed by population and time.
30
+ detector_dataframe : pd.DataFrame
31
+ DataFrame containing detector signal data, indexed by detector and time.
32
+ """
33
+
34
+ def __init__(self, run_time: units.second, cytometer: object, scatterer_dataframe: pd.DataFrame, detector_dataframe: pd.DataFrame):
35
+ """
36
+ Initializes the Experiment instance.
37
+
38
+ Parameters
39
+ ----------
40
+ run_time : Quantity
41
+ Total runtime of the experiment.
42
+ scatterer_dataframe : pd.DataFrame
43
+ DataFrame with scatterer data.
44
+ detector_dataframe : pd.DataFrame
45
+ DataFrame with detector signal data.
46
+ """
47
+ self.cytometer = cytometer
48
+ self.logger = self.LoggerInterface(self)
49
+
50
+ self.signal = detector_dataframe
51
+ self.scatterer = scatterer_dataframe
52
+ self.run_time = run_time
53
+
54
+ @property
55
+ def n_detectors(self) -> int:
56
+ return len(self.signal.index.get_level_values('Detector').unique())
57
+
58
+ def _get_trigger_indices(
59
+ self,
60
+ threshold: units.Quantity,
61
+ trigger_detector_name: str = None,
62
+ pre_buffer: int = 64,
63
+ post_buffer: int = 64
64
+ ) -> tuple[np.ndarray, np.ndarray]:
65
+ """
66
+ Calculate start and end indices for triggered segments, ensuring no retriggering
67
+ occurs during an active buffer period.
68
+
69
+ Parameters
70
+ ----------
71
+ threshold : units.Quantity
72
+ The threshold value for triggering.
73
+ trigger_detector_name : str, optional
74
+ The name of the detector to use for the triggering signal.
75
+ pre_buffer : int, optional
76
+ Number of samples to include before the trigger point.
77
+ post_buffer : int, optional
78
+ Number of samples to include after the trigger point.
79
+
80
+ Returns
81
+ -------
82
+ tuple[np.ndarray, np.ndarray]
83
+ The start and end indices of non-overlapping triggered segments.
84
+
85
+ Raises
86
+ ------
87
+ ValueError
88
+ If the specified detector is not found in the data.
89
+ """
90
+ if trigger_detector_name not in self.signal.index.get_level_values('Detector').unique():
91
+ raise ValueError(f"Detector '{trigger_detector_name}' not found.")
92
+
93
+ signal = self.signal.xs(trigger_detector_name)['Signal']
94
+ trigger_signal = signal > threshold.to(signal.pint.units)
95
+
96
+ crossings = np.where(np.diff(trigger_signal.astype(int)) == 1)[0]
97
+ start_indices = np.clip(crossings - pre_buffer, 0, len(trigger_signal) - 1)
98
+ end_indices = np.clip(crossings + post_buffer, 0, len(trigger_signal) - 1)
99
+
100
+ # Suppress retriggering within an active buffer period
101
+ suppressed_start_indices = []
102
+ suppressed_end_indices = []
103
+
104
+ last_end = -1
105
+ for start, end in zip(start_indices, end_indices):
106
+ if start > last_end: # Ensure no overlap with the last active buffer
107
+ suppressed_start_indices.append(start)
108
+ suppressed_end_indices.append(end)
109
+ last_end = end # Update the end of the current active buffer
110
+
111
+ return np.array(suppressed_start_indices), np.array(suppressed_end_indices)
112
+
113
+ def run_triggering(self,
114
+ threshold: units.Quantity,
115
+ trigger_detector_name: str,
116
+ pre_buffer: int = 64,
117
+ post_buffer: int = 64,
118
+ max_triggers: int = None) -> TriggeredAcquisitions:
119
+ """
120
+ Execute triggered acquisition analysis for signal data.
121
+
122
+ This method identifies segments of signal data based on a triggering threshold
123
+ and specified detector. It extracts segments of interest from the signal,
124
+ including a pre-trigger buffer and post-trigger buffer.
125
+
126
+ Parameters
127
+ ----------
128
+ threshold : units.Quantity
129
+ The threshold value for triggering. Only signal values exceeding this threshold
130
+ will be considered as trigger events.
131
+ trigger_detector_name : str
132
+ The name of the detector used for triggering. This determines which detector's
133
+ signal is analyzed for trigger events.
134
+ pre_buffer : int, optional
135
+ The number of points to include before the trigger point in each segment.
136
+ Default is 64.
137
+ post_buffer : int, optional
138
+ The number of points to include after the trigger point in each segment.
139
+ Default is 64.
140
+ max_triggers : int, optional
141
+ The maximum number of triggers to process. If None, all triggers will be processed.
142
+ Default is None.
143
+
144
+ Raises
145
+ ------
146
+ ValueError
147
+ If the specified `trigger_detector_name` is not found in the dataset.
148
+
149
+ Warnings
150
+ --------
151
+ UserWarning
152
+ If no triggers are detected for the specified threshold, the method raises a warning
153
+ indicating that no signals met the criteria.
154
+
155
+ Notes
156
+ -----
157
+ - The peak detection function `self.detect_peaks` is automatically called at the end of this method to analyze triggered segments.
158
+ """
159
+ self.threshold = threshold
160
+ self.trigger_detector_name = trigger_detector_name
161
+ start_indices, end_indices = self._get_trigger_indices(
162
+ threshold, trigger_detector_name, pre_buffer, post_buffer
163
+ )
164
+
165
+ if max_triggers is not None:
166
+ start_indices = start_indices[:max_triggers]
167
+ end_indices = end_indices[:max_triggers]
168
+
169
+ segments = []
170
+ for detector_name in self.signal.index.get_level_values('Detector').unique():
171
+ detector_data = self.signal.xs(detector_name)
172
+ time, digitized, signal = detector_data['Time'], detector_data['DigitizedSignal'], detector_data['Signal']
173
+
174
+
175
+ for idx, (start, end) in enumerate(zip(start_indices, end_indices)):
176
+
177
+ segment = pd.DataFrame({
178
+ 'Time': time[start:end + 1],
179
+ 'DigitizedSignal': digitized[start:end + 1],
180
+ 'Signal': signal[start:end + 1],
181
+ 'Detector': detector_name,
182
+ 'SegmentID': idx
183
+ })
184
+ segments.append(segment)
185
+
186
+ if len(segments) !=0:
187
+ triggered_signal = TriggeredAcquisitionDataFrame(pd.concat(segments).set_index(['Detector', 'SegmentID']))
188
+ triggered_signal.attrs['bit_depth'] = self.signal.attrs['bit_depth']
189
+ triggered_signal.attrs['saturation_levels'] = self.signal.attrs['saturation_levels']
190
+ triggered_signal.attrs['scatterer_dataframe'] = self.signal.attrs['scatterer_dataframe']
191
+
192
+ return TriggeredAcquisitions(parent=self, dataframe=triggered_signal)
193
+ else:
194
+ warnings.warn(
195
+ f"No signal were triggered during the run time, try changing the threshold. Signal min-max value is: {self.signal['Signal'].min().to_compact()}, {self.signal['Signal'].max().to_compact()}",
196
+ UserWarning
197
+ )
198
+
199
+ class LoggerInterface:
200
+ """
201
+ A nested class for logging statistical information about the experiment.
202
+
203
+ Methods
204
+ -------
205
+ scatterer()
206
+ Logs statistics about the scatterer populations.
207
+ detector()
208
+ Logs statistics about the detector signals.
209
+ """
210
+
211
+ def __init__(self, experiment: object):
212
+ self.experiment = experiment
213
+
214
+ def scatterer(self, table_format: str = "grid") -> None:
215
+ """
216
+ Logs detailed information about scatterer populations.
217
+
218
+ Parameters
219
+ ----------
220
+ table_format : str, optional
221
+ The format for the table display (default: 'grid').
222
+ Options include 'plain', 'github', 'grid', 'fancy_grid', etc.
223
+
224
+ Returns
225
+ -------
226
+ None
227
+ Logs scatterer population information, including refractive index, size, particle count,
228
+ number of events, and time statistics.
229
+ """
230
+ logging.info("\n=== Scatterer Population Properties ===")
231
+
232
+ # Collect general population data
233
+ general_table_data = [
234
+ self._get_population_properties(population)
235
+ for population in self.experiment.scatterer.groupby("Population")
236
+ ]
237
+ general_headers = [
238
+ "Name",
239
+ "Refractive Index",
240
+ "Medium Refractive Index",
241
+ "Size",
242
+ "Particle Count",
243
+ "Number of Events",
244
+ "Min Time Between Events",
245
+ "Avg Time Between Events",
246
+ ]
247
+
248
+ formatted_general_table = tabulate(
249
+ general_table_data, headers=general_headers, tablefmt=table_format, floatfmt=".4f"
250
+ )
251
+ logging.info("\n" + formatted_general_table)
252
+
253
+ def _get_population_properties(self, population_group: tuple) -> List[Union[str, float]]:
254
+ """
255
+ Extracts key properties of a scatterer population for the general properties table.
256
+
257
+ Parameters
258
+ ----------
259
+ population_group : tuple
260
+ A tuple containing the population name and its corresponding DataFrame.
261
+
262
+ Returns
263
+ -------
264
+ list
265
+ List of scatterer properties: [name, refractive index, medium refractive index, size,
266
+ particle count, number of events, min time between events, avg time between events].
267
+ """
268
+ population_name, population_df = population_group
269
+
270
+ name = population_name
271
+ refractive_index = f"{population_df['RefractiveIndex'].mean():~P}"
272
+ medium_refractive_index = f"{self.experiment.run_time:~P}" # Replace with actual medium refractive index if stored elsewhere
273
+ size = f"{population_df['Size'].mean():~P}"
274
+ particle_count = len(population_df)
275
+ num_events = particle_count
276
+
277
+ min_delta_position = population_df["Time"].diff().abs().min()
278
+ avg_delta_position = population_df["Time"].diff().mean()
279
+
280
+ return [
281
+ name,
282
+ refractive_index,
283
+ medium_refractive_index,
284
+ size,
285
+ particle_count,
286
+ num_events,
287
+ min_delta_position,
288
+ avg_delta_position,
289
+ ]
290
+
291
+ def detector(self, table_format: str = "grid", include_totals: bool = True) -> None:
292
+ """
293
+ Logs statistics about detector signals.
294
+
295
+ Parameters
296
+ ----------
297
+ table_format : str, optional
298
+ The format for the table display (default: 'grid').
299
+ Options include 'plain', 'github', 'grid', 'fancy_grid', etc.
300
+ include_totals : bool, optional
301
+ If True, logs the total number of events across all detectors (default: True).
302
+
303
+ Returns
304
+ -------
305
+ None
306
+ Logs details about detector signals, including event counts,
307
+ timing statistics, and mean event rates.
308
+ """
309
+ logging.info("\n=== Detector Signal Statistics ===")
310
+
311
+ # Compute statistics for each detector
312
+ df = self.experiment.signal
313
+ table_data = [
314
+ self._get_detector_stats(detector_name, df.xs(detector_name, level="Detector"))
315
+ for detector_name in df.index.levels[0]
316
+ ]
317
+ headers = [
318
+ "Detector",
319
+ "Number of Acquisition",
320
+ "First Event Time",
321
+ "Last Event Time",
322
+ "Time Between Events",
323
+ ]
324
+
325
+ formatted_table = tabulate(table_data, headers=headers, tablefmt=table_format, floatfmt=".3f")
326
+ logging.info("\n" + formatted_table)
327
+
328
+ if include_totals:
329
+ total_events = sum(stat[1] for stat in table_data)
330
+ logging.info(f"\nTotal number of events detected across all detectors: {total_events}")
331
+
332
+ def _get_detector_stats(self, detector_name: str, group: pd.DataFrame) -> list:
333
+ """
334
+ Computes statistics for a detector.
335
+
336
+ Parameters
337
+ ----------
338
+ detector_name : str
339
+ Name of the detector.
340
+ group : pd.DataFrame
341
+ DataFrame containing the detector data.
342
+
343
+ Returns
344
+ -------
345
+ list
346
+ List of computed statistics: [detector_name, num_events, first_event_time,
347
+ last_event_time, avg_time_between_events, min_time_between_events, mean_event_rate].
348
+ """
349
+ num_acquisition = len(group["Time"])
350
+ first_event_time = group["Time"].min()
351
+ last_event_time = group["Time"].max()
352
+
353
+ time_diffs = group["Time"].diff().dropna()
354
+ time_between_events = time_diffs.mean()
355
+
356
+ return [
357
+ detector_name,
358
+ num_acquisition,
359
+ first_event_time,
360
+ last_event_time,
361
+ time_between_events,
362
+ ]
@@ -2,7 +2,7 @@ from sklearn.cluster import KMeans
2
2
  from sklearn.cluster import DBSCAN
3
3
  from sklearn.mixture import GaussianMixture
4
4
  import pandas as pd
5
- from typing import Dict, Tuple
5
+ from FlowCyPy.dataframe_subclass import ClassifierDataFrame
6
6
 
7
7
 
8
8
  class BaseClassifier:
@@ -80,7 +80,7 @@ class KmeansClassifier(BaseClassifier):
80
80
 
81
81
  dataframe['Label'] = labels
82
82
 
83
- return labels
83
+ return ClassifierDataFrame(dataframe)
84
84
 
85
85
  class GaussianMixtureClassifier(BaseClassifier):
86
86
  def __init__(self, number_of_components: int) -> None:
@@ -128,7 +128,7 @@ class GaussianMixtureClassifier(BaseClassifier):
128
128
  # Add labels to the original DataFrame
129
129
  dataframe['Label'] = labels
130
130
 
131
- return labels
131
+ return ClassifierDataFrame(dataframe)
132
132
 
133
133
  class DBSCANClassifier(BaseClassifier):
134
134
  def __init__(self, epsilon: float = 0.5, min_samples: int = 5) -> None:
@@ -179,4 +179,4 @@ class DBSCANClassifier(BaseClassifier):
179
179
  # Add labels to the original DataFrame
180
180
  dataframe['Label'] = labels
181
181
 
182
- return labels
182
+ return ClassifierDataFrame(dataframe)
@@ -13,6 +13,8 @@ from FlowCyPy.flow_cell import FlowCell
13
13
  from FlowCyPy.detector import Detector
14
14
  from FlowCyPy.acquisition import Acquisition
15
15
  from FlowCyPy.signal_digitizer import SignalDigitizer
16
+ from FlowCyPy.helper import validate_units
17
+ from FlowCyPy.dataframe_subclass import ContinuousAcquisitionDataFrame
16
18
 
17
19
 
18
20
  # Set up logging configuration
@@ -40,7 +42,7 @@ class FlowCytometer:
40
42
  coupling_mechanism : str, optional
41
43
  The scattering mechanism used to couple the signal from the particles to the detectors.
42
44
  Supported mechanisms include: 'mie' (default): Mie scattering, 'rayleigh': Rayleigh scattering, 'uniform': Uniform signal coupling, 'empirical': Empirical data-driven coupling
43
- background_power : Quantity, optional
45
+ background_power : units.watt, optional
44
46
  The background optical power added to the detector signal. Defaults to 0 milliwatts.
45
47
 
46
48
  Attributes
@@ -55,7 +57,7 @@ class FlowCytometer:
55
57
  The detectors used to collect and process signals from the scatterers.
56
58
  coupling_mechanism : str
57
59
  The selected mechanism for signal coupling.
58
- background_power : Quantity
60
+ background_power : units.watt
59
61
  The optical background power added to the detector signals.
60
62
 
61
63
  Raises
@@ -71,7 +73,7 @@ class FlowCytometer:
71
73
  signal_digitizer: SignalDigitizer,
72
74
  detectors: List[Detector],
73
75
  coupling_mechanism: Optional[str] = 'mie',
74
- background_power: Optional[Quantity] = 0 * milliwatt):
76
+ background_power: Optional[units.watt] = 0 * milliwatt):
75
77
 
76
78
  self.scatterer_collection = scatterer_collection
77
79
  self.flow_cell = flow_cell
@@ -87,7 +89,7 @@ class FlowCytometer:
87
89
  for detector in detectors:
88
90
  detector.cytometer = self
89
91
 
90
- def run_coupling_analysis(self, scatterer_dataframe: pd.DataFrame) -> None:
92
+ def _run_coupling_analysis(self, scatterer_dataframe: pd.DataFrame) -> None:
91
93
  """
92
94
  Computes and assigns the optical coupling power for each particle-detection event.
93
95
 
@@ -184,7 +186,7 @@ class FlowCytometer:
184
186
 
185
187
  scatterer_dataframe['Widths'] = PintArray(widths, dtype=widths.units)
186
188
 
187
- def initialize_signal(self, run_time: Quantity) -> None:
189
+ def _initialize_signal(self, run_time: units.second) -> None:
188
190
  """
189
191
  Initializes the raw signal for each detector based on the source and flow cell configuration.
190
192
 
@@ -205,11 +207,14 @@ class FlowCytometer:
205
207
 
206
208
  dataframes.append(dataframe)
207
209
 
208
- self.dataframe = pd.concat(dataframes, keys=[d.name for d in self.detectors])
210
+ dataframe = pd.concat(dataframes, keys=[d.name for d in self.detectors])
209
211
 
210
- self.dataframe.index.names = ["Detector", "Index"]
212
+ dataframe.index.names = ["Detector", "Index"]
211
213
 
212
- def get_acquisition(self, run_time: Quantity) -> None:
214
+ return dataframe
215
+
216
+ @validate_units(run_time=units.second)
217
+ def get_acquisition(self, run_time: units.second) -> None:
213
218
  """
214
219
  Simulates the generation of optical signal pulses for each particle event.
215
220
 
@@ -231,13 +236,13 @@ class FlowCytometer:
231
236
  if not run_time.check('second'):
232
237
  raise ValueError(f"flow_speed must be in meter per second, but got {run_time.units}")
233
238
 
234
- self.initialize_signal(run_time=run_time)
239
+ signal_dataframe = self._initialize_signal(run_time=run_time)
235
240
 
236
- scatterer_dataframe = self.flow_cell.generate_event_dataframe(self.scatterer_collection.populations, run_time=run_time)
241
+ scatterer_dataframe = self.flow_cell._generate_event_dataframe(self.scatterer_collection.populations, run_time=run_time)
237
242
 
238
243
  self.scatterer_collection.fill_dataframe_with_sampling(scatterer_dataframe)
239
244
 
240
- self.run_coupling_analysis(scatterer_dataframe)
245
+ self._run_coupling_analysis(scatterer_dataframe)
241
246
 
242
247
  self._generate_pulse_parameters(scatterer_dataframe)
243
248
 
@@ -249,7 +254,7 @@ class FlowCytometer:
249
254
  for detector in self.detectors:
250
255
  _coupling_power = scatterer_dataframe[detector.name].values
251
256
 
252
- detector_signal = self.dataframe.xs(detector.name)['Signal']
257
+ detector_signal = signal_dataframe.xs(detector.name)['Signal']
253
258
 
254
259
  # Generate noise components
255
260
  detector._add_thermal_noise_to_raw_signal(signal=detector_signal)
@@ -257,7 +262,7 @@ class FlowCytometer:
257
262
  detector._add_dark_current_noise_to_raw_signal(signal=detector_signal)
258
263
 
259
264
  # Broadcast the time array to the shape of (number of signals, len(detector.time))
260
- time = self.dataframe.xs(detector.name)['Time'].pint.magnitude
265
+ time = signal_dataframe.xs(detector.name)['Time'].pint.magnitude
261
266
 
262
267
  time_grid = np.expand_dims(time, axis=0) * units.second
263
268
  centers = np.expand_dims(_centers, axis=1) * units.second
@@ -278,15 +283,20 @@ class FlowCytometer:
278
283
 
279
284
  digitized_signal = detector.capture_signal(signal=detector_signal)
280
285
 
281
- self.dataframe.loc[detector.name, 'Signal'] = PintArray(detector_signal, detector_signal.pint.units)
286
+ signal_dataframe.loc[detector.name, 'Signal'] = PintArray(detector_signal, detector_signal.pint.units)
287
+
288
+ signal_dataframe.loc[detector.name, 'DigitizedSignal'] = PintArray(digitized_signal, units.bit_bins)
282
289
 
283
- self.dataframe.loc[detector.name, 'DigitizedSignal'] = PintArray(digitized_signal, units.bit_bins)
290
+ signal_dataframe = ContinuousAcquisitionDataFrame(signal_dataframe)
291
+ signal_dataframe.attrs['bit_depth'] = self.signal_digitizer._bit_depth
292
+ signal_dataframe.attrs['saturation_levels'] = {d.name: d._saturation_levels for d in self.detectors}
293
+ signal_dataframe.attrs['scatterer_dataframe'] = scatterer_dataframe
284
294
 
285
295
  experiment = Acquisition(
286
296
  cytometer=self,
287
297
  run_time=run_time,
288
298
  scatterer_dataframe=scatterer_dataframe,
289
- detector_dataframe=self.dataframe
299
+ detector_dataframe=signal_dataframe
290
300
  )
291
301
 
292
302
  return experiment