FlowCyPy 0.7.1__tar.gz → 0.7.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (169) hide show
  1. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/_version.py +2 -2
  2. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/acquisition.py +157 -51
  3. flowcypy-0.7.4/FlowCyPy/classifier.py +182 -0
  4. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/cytometer.py +70 -29
  5. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/detector.py +7 -64
  6. flowcypy-0.7.4/FlowCyPy/flow_cell.py +140 -0
  7. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/helper.py +48 -82
  8. flowcypy-0.7.4/FlowCyPy/noises.py +87 -0
  9. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/particle_count.py +3 -2
  10. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/population.py +3 -4
  11. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/scatterer_collection.py +7 -7
  12. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/signal_digitizer.py +1 -3
  13. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/source.py +4 -7
  14. flowcypy-0.7.4/FlowCyPy/utils.py +74 -0
  15. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy.egg-info/PKG-INFO +2 -2
  16. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy.egg-info/SOURCES.txt +16 -4
  17. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy.egg-info/requires.txt +1 -1
  18. {flowcypy-0.7.1 → flowcypy-0.7.4}/PKG-INFO +2 -2
  19. flowcypy-0.7.4/developments/Deep_peak_square.ipynb +1049 -0
  20. flowcypy-0.7.4/developments/Physics-informed_AI.ipynb +876 -0
  21. flowcypy-0.7.4/developments/ROI_analysis-Copy1.ipynb +639 -0
  22. flowcypy-0.7.4/developments/ROI_analysis.ipynb +778 -0
  23. flowcypy-0.7.4/developments/Untitled.ipynb +227 -0
  24. flowcypy-0.7.4/developments/Untitled1.ipynb +668 -0
  25. flowcypy-0.7.4/developments/Untitled2.ipynb +313 -0
  26. flowcypy-0.7.4/developments/ai_dev2.ipynb +1745 -0
  27. flowcypy-0.7.4/developments/best_model.h5 +0 -0
  28. flowcypy-0.7.4/developments/best_model.keras +0 -0
  29. flowcypy-0.7.4/developments/concentration_validation.py +100 -0
  30. flowcypy-0.7.4/developments/grad_cam_output.png +0 -0
  31. flowcypy-0.7.4/developments/model.png +0 -0
  32. flowcypy-0.7.4/developments/model_example.png +0 -0
  33. flowcypy-0.7.4/developments/scripts/AI_peak_detection.py +85 -0
  34. flowcypy-0.7.4/developments/scripts/temp.py +70 -0
  35. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/extras/flow_cytometer_signal.py +14 -10
  36. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/extras/scatterer_distribution.py +3 -2
  37. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/extras/signal_acquisition.py +1 -1
  38. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/tutorials/limit_of_detection.py +1 -1
  39. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/tutorials/workflow.py +2 -4
  40. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/sg_execution_times.rst +15 -15
  41. {flowcypy-0.7.1 → flowcypy-0.7.4}/pyproject.toml +1 -1
  42. flowcypy-0.7.4/tests/test_classifiers.py +83 -0
  43. {flowcypy-0.7.1 → flowcypy-0.7.4}/tests/test_coupling_mechanism.py +3 -3
  44. {flowcypy-0.7.1 → flowcypy-0.7.4}/tests/test_flow_cytometer.py +26 -8
  45. {flowcypy-0.7.1 → flowcypy-0.7.4}/tests/test_peak_analyzer.py +1 -1
  46. {flowcypy-0.7.1 → flowcypy-0.7.4}/tests/test_population.py +3 -3
  47. {flowcypy-0.7.1 → flowcypy-0.7.4}/tests/test_scatterer_distribution.py +3 -3
  48. flowcypy-0.7.1/FlowCyPy/classifier.py +0 -210
  49. flowcypy-0.7.1/FlowCyPy/coupling_mechanism.py +0 -205
  50. flowcypy-0.7.1/FlowCyPy/flow_cell.py +0 -197
  51. flowcypy-0.7.1/FlowCyPy/logger.py +0 -136
  52. flowcypy-0.7.1/FlowCyPy/noises.py +0 -34
  53. flowcypy-0.7.1/FlowCyPy/plottings.py +0 -269
  54. flowcypy-0.7.1/FlowCyPy/utils.py +0 -191
  55. flowcypy-0.7.1/developments/scripts/temp.py +0 -219
  56. flowcypy-0.7.1/tests/test_extra.py +0 -55
  57. {flowcypy-0.7.1 → flowcypy-0.7.4}/.flake8 +0 -0
  58. {flowcypy-0.7.1 → flowcypy-0.7.4}/.github/dependabot.yml +0 -0
  59. {flowcypy-0.7.1 → flowcypy-0.7.4}/.github/workflows/deploy_PyPi.yml +0 -0
  60. {flowcypy-0.7.1 → flowcypy-0.7.4}/.github/workflows/deploy_anaconda.yml +0 -0
  61. {flowcypy-0.7.1 → flowcypy-0.7.4}/.github/workflows/deploy_coverage.yml +0 -0
  62. {flowcypy-0.7.1 → flowcypy-0.7.4}/.github/workflows/deploy_documentation.yml +0 -0
  63. {flowcypy-0.7.1 → flowcypy-0.7.4}/.gitignore +0 -0
  64. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/__init__.py +0 -0
  65. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/coupling_mechanism/__init__.py +0 -0
  66. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/coupling_mechanism/empirical.py +0 -0
  67. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/coupling_mechanism/mie.py +0 -0
  68. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/coupling_mechanism/rayleigh.py +0 -0
  69. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/coupling_mechanism/uniform.py +0 -0
  70. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/directories.py +0 -0
  71. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/distribution/__init__.py +0 -0
  72. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/distribution/base_class.py +0 -0
  73. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/distribution/delta.py +0 -0
  74. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/distribution/lognormal.py +0 -0
  75. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/distribution/normal.py +0 -0
  76. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/distribution/particle_size_distribution.py +0 -0
  77. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/distribution/uniform.py +0 -0
  78. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/distribution/weibull.py +0 -0
  79. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/peak_locator/__init__.py +0 -0
  80. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/peak_locator/base_class.py +0 -0
  81. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/peak_locator/basic.py +0 -0
  82. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/peak_locator/derivative.py +0 -0
  83. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/peak_locator/moving_average.py +0 -0
  84. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/physical_constant.py +0 -0
  85. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/populations_instances.py +0 -0
  86. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy/units.py +0 -0
  87. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy.egg-info/dependency_links.txt +0 -0
  88. {flowcypy-0.7.1 → flowcypy-0.7.4}/FlowCyPy.egg-info/top_level.txt +0 -0
  89. {flowcypy-0.7.1 → flowcypy-0.7.4}/LICENSE +0 -0
  90. {flowcypy-0.7.1 → flowcypy-0.7.4}/README.rst +0 -0
  91. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/doc/canto_spec.md +0 -0
  92. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/doc/internship.pdf +0 -0
  93. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/get_started.md +0 -0
  94. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/image.png +0 -0
  95. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/output_file.prof +0 -0
  96. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/concentration_comparison.py +0 -0
  97. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/create_images.py +0 -0
  98. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/data_analysis.py +0 -0
  99. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/dev_beads_analysis.py +0 -0
  100. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/dev_canto.py +0 -0
  101. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/dev_classifier.py +0 -0
  102. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/dev_shot_noise_check.py +0 -0
  103. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/dev_stats_0.py +0 -0
  104. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/dev_stats_1.py +0 -0
  105. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/dev_stats_2.py +0 -0
  106. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/dev_study_on_ri.py +0 -0
  107. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/dev_study_on_size.py +0 -0
  108. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/mat2csv.py +0 -0
  109. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/scripts/profiler.py +0 -0
  110. {flowcypy-0.7.1 → flowcypy-0.7.4}/developments/test.pdf +0 -0
  111. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/Makefile +0 -0
  112. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/extras/README.rst +0 -0
  113. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/extras/distributions.py +0 -0
  114. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/noise_sources/README.rst +0 -0
  115. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/noise_sources/dark_current.py +0 -0
  116. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/noise_sources/shot_noise.py +0 -0
  117. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/noise_sources/thermal.py +0 -0
  118. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/examples/tutorials/README.rst +0 -0
  119. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/distributions/Delta.png +0 -0
  120. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/distributions/LogNormal.png +0 -0
  121. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/distributions/Normal.png +0 -0
  122. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/distributions/RosinRammler.png +0 -0
  123. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/distributions/Uniform.png +0 -0
  124. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/distributions/Weibull.png +0 -0
  125. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/example_0.png +0 -0
  126. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/example_1.png +0 -0
  127. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/example_2.png +0 -0
  128. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/example_3.png +0 -0
  129. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/flow_cytometer.png +0 -0
  130. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/images/logo.png +0 -0
  131. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/make.bat +0 -0
  132. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/_static/default.css +0 -0
  133. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/_static/logo.png +0 -0
  134. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/_static/thumbnail.png +0 -0
  135. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/code/base.rst +0 -0
  136. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/code/detector.rst +0 -0
  137. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/code/distributions.rst +0 -0
  138. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/code/flow_cell.rst +0 -0
  139. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/code/flow_cytometer.rst +0 -0
  140. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/code/peak_locator.rst +0 -0
  141. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/code/scatterer.rst +0 -0
  142. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/code/source.rst +0 -0
  143. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/code.rst +0 -0
  144. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/conf.py +0 -0
  145. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/examples.rst +0 -0
  146. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/index.rst +0 -0
  147. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal/core_components.rst +0 -0
  148. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal/getting_started.rst +0 -0
  149. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal/objectives/main.rst +0 -0
  150. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal/objectives/pre.rst +0 -0
  151. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal/objectives/stretch.rst +0 -0
  152. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal/prerequisites/index.rst +0 -0
  153. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal/prerequisites/mathematics.rst +0 -0
  154. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal/prerequisites/optics.rst +0 -0
  155. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal/prerequisites/programming.rst +0 -0
  156. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal/ressources.rst +0 -0
  157. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal/tasks.rst +0 -0
  158. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/internal.rst +0 -0
  159. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/references.rst +0 -0
  160. {flowcypy-0.7.1 → flowcypy-0.7.4}/docs/source/theory.rst +0 -0
  161. {flowcypy-0.7.1 → flowcypy-0.7.4}/meta.yaml +0 -0
  162. {flowcypy-0.7.1 → flowcypy-0.7.4}/notebook.ipynb +0 -0
  163. {flowcypy-0.7.1 → flowcypy-0.7.4}/setup.cfg +0 -0
  164. {flowcypy-0.7.1 → flowcypy-0.7.4}/tests/__init__.py +0 -0
  165. {flowcypy-0.7.1 → flowcypy-0.7.4}/tests/test_detector_noise.py +0 -0
  166. {flowcypy-0.7.1 → flowcypy-0.7.4}/tests/test_distribution.py +0 -0
  167. {flowcypy-0.7.1 → flowcypy-0.7.4}/tests/test_noises.py +0 -0
  168. {flowcypy-0.7.1 → flowcypy-0.7.4}/tests/test_peak_algorithm.py +0 -0
  169. {flowcypy-0.7.1 → flowcypy-0.7.4}/tests/test_source.py +0 -0
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.7.1'
16
- __version_tuple__ = version_tuple = (0, 7, 1)
15
+ __version__ = version = '0.7.4'
16
+ __version_tuple__ = version_tuple = (0, 7, 4)
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ import warnings
2
3
  from typing import Optional, Union, List
3
4
  from MPSPlots.styles import mps
4
5
  import pandas as pd
@@ -9,8 +10,9 @@ from scipy.signal import find_peaks
9
10
  import matplotlib.pyplot as plt
10
11
  import seaborn as sns
11
12
  from tabulate import tabulate
12
- import warnings
13
+
13
14
  from FlowCyPy import helper
15
+ from FlowCyPy.classifier import BaseClassifier
14
16
 
15
17
  class DataAccessor:
16
18
  def __init__(self, outer):
@@ -94,13 +96,13 @@ class Acquisition:
94
96
  results = results.reset_index(drop=True)
95
97
 
96
98
  # Check for multiple peaks and issue a warning
97
- peak_counts = results.groupby(['Detector', 'SegmentID']).size()
98
- multiple_peak_segments = peak_counts[peak_counts > 1]
99
- if not multiple_peak_segments.empty:
100
- warnings.warn(
101
- f"Multiple peaks detected in the following segments: {multiple_peak_segments.index.tolist()}",
102
- UserWarning
103
- )
99
+ # peak_counts = results.groupby(['Detector', 'SegmentID']).size()
100
+ # multiple_peak_segments = peak_counts[peak_counts > 1]
101
+ # if not multiple_peak_segments.empty:
102
+ # warnings.warn(
103
+ # f"Multiple peaks detected in the following segments: {multiple_peak_segments.index.tolist()}",
104
+ # UserWarning
105
+ # )
104
106
 
105
107
  _temp = results.reset_index()[['Detector', 'SegmentID', 'Height']].pint.dequantify().droplevel('unit', axis=1)
106
108
 
@@ -172,22 +174,47 @@ class Acquisition:
172
174
  post_buffer: int = 64,
173
175
  max_triggers: int = None) -> None:
174
176
  """
175
- Executes the triggered acquisition analysis.
177
+ Execute triggered acquisition analysis for signal data.
178
+
179
+ This method identifies segments of signal data based on a triggering threshold
180
+ and specified detector. It extracts segments of interest from the signal,
181
+ including a pre-trigger buffer and post-trigger buffer, and stores the results
182
+ in `self.data.triggered`.
176
183
 
177
184
  Parameters
178
185
  ----------
179
186
  threshold : units.Quantity
180
- Trigger threshold value.
181
- trigger_detector_name : str, optional
182
- Detector used for triggering, by default None.
183
- custom_trigger : np.ndarray, optional
184
- Custom trigger array, by default None.
187
+ The threshold value for triggering. Only signal values exceeding this threshold
188
+ will be considered as trigger events.
189
+ trigger_detector_name : str
190
+ The name of the detector used for triggering. This determines which detector's
191
+ signal is analyzed for trigger events.
185
192
  pre_buffer : int, optional
186
- Points before trigger, by default 64.
193
+ The number of points to include before the trigger point in each segment.
194
+ Default is 64.
187
195
  post_buffer : int, optional
188
- Points after trigger, by default 64.
196
+ The number of points to include after the trigger point in each segment.
197
+ Default is 64.
189
198
  max_triggers : int, optional
190
- Maximum number of triggers to process, by default None.
199
+ The maximum number of triggers to process. If None, all triggers will be processed.
200
+ Default is None.
201
+
202
+ Raises
203
+ ------
204
+ ValueError
205
+ If the specified `trigger_detector_name` is not found in the dataset.
206
+
207
+ Warnings
208
+ --------
209
+ UserWarning
210
+ If no triggers are detected for the specified threshold, the method raises a warning
211
+ indicating that no signals met the criteria.
212
+
213
+ Notes
214
+ -----
215
+ - Triggered segments are stored in `self.data.triggered` as a pandas DataFrame with a hierarchical index on `['Detector', 'SegmentID']`.
216
+ - This method modifies `self.data.triggered` in place.
217
+ - The peak detection function `self.detect_peaks` is automatically called at the end of this method to analyze triggered segments.
191
218
  """
192
219
  self.threshold = threshold
193
220
  self.trigger_detector_name = trigger_detector_name
@@ -226,7 +253,28 @@ class Acquisition:
226
253
 
227
254
  self.detect_peaks()
228
255
 
229
- def classify_dataset(self, classifier: object, features: List[str], detectors: list[str]) -> None:
256
+ def classify_dataset(self, classifier: BaseClassifier, features: List[str], detectors: list[str]) -> None:
257
+ """
258
+ Classify the dataset using the specified classifier and features.
259
+
260
+ This method applies a classification algorithm to the dataset by first unstacking
261
+ the "Detector" level of the DataFrame's index. It then uses the provided classifier
262
+ object to classify the dataset based on the specified features and detectors.
263
+
264
+ Parameters
265
+ ----------
266
+ classifier : BaseClassifier
267
+ An object implementing a `run` method for classification.
268
+ features : List[str]
269
+ A list of column names corresponding to the features to be used for classification (e.g., 'Height', 'Width', 'Area').
270
+ detectors : list[str]
271
+ A list of detector names to filter the data before classification. Only data from these detectors will be included in the classification process.
272
+
273
+ Returns
274
+ -------
275
+ None
276
+ This method updates the `self.data.peaks` attribute in place with the classified data.
277
+ """
230
278
  self.data.peaks = self.data.peaks.unstack('Detector')
231
279
  self.classifier = classifier
232
280
 
@@ -417,21 +465,32 @@ class Acquisition:
417
465
  def __init__(self, acquisition: object):
418
466
  self.acquisition = acquisition
419
467
 
420
- def signals(self, figure_size: tuple = (10, 6), show: bool = True) -> None:
468
+ def signals(
469
+ self,
470
+ figure_size: tuple = (10, 6),
471
+ show: bool = True,
472
+ show_populations: str | List[str] = None,
473
+ save_filename: str = None
474
+ ) -> None:
421
475
  """
422
476
  Visualizes raw signals for all detector channels and the scatterer distribution.
423
477
 
424
478
  Parameters
425
479
  ----------
426
480
  figure_size : tuple, optional
427
- Size of the plot (default: (10, 6)).
428
- add_peak_locator : bool, optional
429
- Adds peak location markers to the signals if True (default: False).
481
+ Size of the plot in inches (default: (10, 6)).
430
482
  show : bool, optional
431
- Displays the plot immediately if True (default: True).
483
+ If True, displays the plot immediately (default: True).
484
+ show_populations : str or list of str, optional
485
+ List of population names to highlight in the event plot. If None, shows all populations.
486
+ save_filename : str, optional
487
+ If provided, saves the figure to the specified file.
432
488
  """
433
- n_plots = self.acquisition.n_detectors + 1
489
+ # Handle `show_populations` default case
490
+ if show_populations is None:
491
+ show_populations = self.acquisition.data.scatterer.index.get_level_values('Population').unique()
434
492
 
493
+ n_plots = self.acquisition.n_detectors + 1 # One extra plot for events
435
494
  time_units = self.acquisition.data.continuous.Time.max().to_compact().units
436
495
 
437
496
  with plt.style.context(mps):
@@ -440,56 +499,103 @@ class Acquisition:
440
499
  nrows=n_plots,
441
500
  figsize=figure_size,
442
501
  sharex=True,
443
- height_ratios=[1] * (n_plots - 1) + [0.5],
502
+ height_ratios=[1] * (n_plots - 1) + [0.5]
444
503
  )
445
504
 
505
+ # Plot digitized and continuous signals for each detector
446
506
  for ax, (detector_name, group) in zip(axes[:-1], self.acquisition.data.continuous.groupby("Detector")):
447
507
  detector = self.get_detector(detector_name)
448
508
 
449
- ax.step(group["Time"].pint.to(time_units), group["DigitizedSignal"], label="Digitized Signal", where='mid')
509
+ # Convert time and signal data to the appropriate units
510
+ time_data = group["Time"].pint.to(time_units)
511
+ digitized_signal = group["DigitizedSignal"]
512
+
513
+ # Plot digitized signal
514
+ ax.step(time_data, digitized_signal, label="Digitized Signal", where='mid')
450
515
  ax.set_ylabel(detector_name)
451
516
  ax.set_ylim([0, self.acquisition.cytometer.signal_digitizer._bit_depth])
452
517
 
518
+ # Twin axis for continuous signal
453
519
  ax2 = ax.twinx()
454
- ax2_x = self.acquisition.data.continuous.loc[detector_name, 'Time']
455
- ax2_y = self.acquisition.data.continuous.loc[detector_name, 'Signal']
456
- ax2_y_units = ax2_y.max().to_compact().units
457
- ax2.plot(
458
- ax2_x.pint.to(time_units),
459
- ax2_y.pint.to(ax2_y_units),
460
- color='black',
461
- linewidth=1,
462
- linestyle='-',
463
- label='Continuous signal',
464
- zorder=0,
465
- )
520
+ cont_time, cont_signal, cont_signal_units = self._get_continuous_signal(detector_name, time_units)
521
+ ax2.plot(cont_time, cont_signal, color='black', linewidth=1, linestyle='-', label='Continuous Signal', zorder=0)
466
522
 
467
- ax2.set_ylim(detector._saturation_levels)
468
-
469
- self._add_event_to_ax(ax=axes[-1], time_units=time_units)
523
+ # Set y-limits for the continuous signal
524
+ if detector._saturation_levels[0] != detector._saturation_levels[1]:
525
+ ax2.set_ylim(detector._saturation_levels)
470
526
 
527
+ # Add event markers to the last subplot
528
+ self._add_event_to_ax(ax=axes[-1], time_units=time_units, show_populations=show_populations)
471
529
  axes[-1].set_xlabel(f"Time [{time_units}]")
530
+
531
+ # Save or show the figure
532
+ if save_filename:
533
+ fig.savefig(fname=save_filename)
472
534
  if show:
473
535
  plt.show()
474
536
 
475
- def _add_event_to_ax(self, ax: plt.Axes, time_units: units.Quantity, palette: str = 'tab10') -> None:
537
+
538
+ def _get_continuous_signal(self, detector_name: str, time_units: units.Quantity):
539
+ """
540
+ Retrieves and converts the continuous signal data for a given detector.
541
+
542
+ Parameters
543
+ ----------
544
+ detector_name : str
545
+ Name of the detector.
546
+ time_units : units.Quantity
547
+ Desired time units.
548
+
549
+ Returns
550
+ -------
551
+ tuple
552
+ (time array, signal array, signal units)
553
+ """
554
+ data = self.acquisition.data.continuous.loc[detector_name]
555
+ cont_time = data['Time'].pint.to(time_units)
556
+ cont_signal = data['Signal']
557
+ cont_signal_units = cont_signal.max().to_compact().units
558
+ return cont_time, cont_signal.pint.to(cont_signal_units), cont_signal_units
559
+
560
+ def _add_event_to_ax(
561
+ self,
562
+ ax: plt.Axes,
563
+ time_units: units.Quantity,
564
+ palette: str = 'tab10',
565
+ show_populations: str | List[str] = None
566
+ ) -> None:
567
+ """
568
+ Adds vertical markers for event occurrences in the scatterer data.
569
+
570
+ Parameters
571
+ ----------
572
+ ax : plt.Axes
573
+ The matplotlib axis to modify.
574
+ time_units : units.Quantity
575
+ Time units to use for plotting.
576
+ palette : str, optional
577
+ Color palette for different populations (default: 'tab10').
578
+ show_populations : str or list of str, optional
579
+ Populations to display. If None, all populations are shown.
580
+ """
581
+ # Get unique population names
476
582
  unique_populations = self.acquisition.data.scatterer.index.get_level_values('Population').unique()
477
583
  color_mapping = dict(zip(unique_populations, sns.color_palette(palette, len(unique_populations))))
478
584
 
479
585
  for population_name, group in self.acquisition.data.scatterer.groupby('Population'):
586
+ if show_populations is not None and population_name not in show_populations:
587
+ continue
480
588
  x = group.Time.pint.to(time_units)
481
589
  color = color_mapping[population_name]
482
590
  ax.vlines(x, ymin=0, ymax=1, transform=ax.get_xaxis_transform(), label=population_name, color=color)
483
591
 
484
592
  ax.tick_params(axis='y', left=False, labelleft=False)
485
-
486
593
  ax.get_yaxis().set_visible(False)
487
594
  ax.set_xlabel(f"Time [{time_units}]")
488
-
489
595
  ax.legend()
490
596
 
491
597
  @helper.plot_sns
492
- def coupling_distribution(self, x_detector: str, y_detector: str, equal_limits: bool = False) -> None:
598
+ def coupling_distribution(self, x_detector: str, y_detector: str, bandwidth_adjust: float = 1) -> None:
493
599
  """
494
600
  Plots the density distribution of optical coupling between two detector channels.
495
601
 
@@ -512,7 +618,7 @@ class Acquisition:
512
618
  y = df[y_detector].pint.to(y_units)
513
619
 
514
620
  with plt.style.context(mps):
515
- grid = sns.jointplot(data=df, x=x, y=y, hue="Population", alpha=0.8)
621
+ grid = sns.jointplot(data=df, x=x, y=y, hue="Population", alpha=0.8, marginal_kws=dict(bw_adjust=bandwidth_adjust))
516
622
 
517
623
  grid.ax_joint.set_xlabel(f"Signal {x_detector} [{x_units}]")
518
624
  grid.ax_joint.set_ylabel(f"Signal {y_detector} [{y_units}]")
@@ -522,7 +628,7 @@ class Acquisition:
522
628
  return grid
523
629
 
524
630
  @helper.plot_sns
525
- def scatterer(self, alpha: float = 0.8, bandwidth_adjust: float = 1, log_scale: bool = False, color_palette: Optional[Union[str, dict]] = None) -> None:
631
+ def scatterer(self, alpha: float = 0.8, bandwidth_adjust: float = 1, color_palette: Optional[Union[str, dict]] = None) -> None:
526
632
  """
527
633
  Visualizes the joint distribution of scatterer sizes and refractive indices using a Seaborn jointplot.
528
634
 
@@ -616,6 +722,7 @@ class Acquisition:
616
722
  def trigger(self, show: bool = True) -> None:
617
723
  """Plot detected peaks on signal segments."""
618
724
  n_plots = self.acquisition.n_detectors + 1
725
+
619
726
  with plt.style.context(mps):
620
727
  _, axes = plt.subplots(
621
728
  nrows=n_plots,
@@ -628,12 +735,12 @@ class Acquisition:
628
735
 
629
736
  time_units = self.acquisition.data.triggered['Time'].max().to_compact().units
630
737
 
631
- for ax, (detector_name, group) in zip(axes, self.acquisition.data.triggered.groupby(level=['Detector'])):
738
+ for ax, (detector_name, group) in zip(axes, self.acquisition.data.triggered.groupby(level='Detector')):
632
739
  detector = self.get_detector(detector_name)
633
740
 
634
741
  ax.set_ylabel(detector_name)
635
742
 
636
- for _, sub_group in group.groupby(level=['SegmentID']):
743
+ for _, sub_group in group.groupby(level='SegmentID'):
637
744
  x = sub_group['Time'].pint.to(time_units)
638
745
  digitized = sub_group['DigitizedSignal']
639
746
  ax.step(x, digitized, where='mid', linewidth=2)
@@ -661,7 +768,7 @@ class Acquisition:
661
768
  ax2.legend()
662
769
 
663
770
 
664
- for ax, (detector_name, group) in zip(axes, self.acquisition.data.peaks.groupby(level=['Detector'], axis=0)):
771
+ for ax, (detector_name, group) in zip(axes, self.acquisition.data.peaks.groupby(level='Detector')):
665
772
  x = group['Time'].pint.to(time_units)
666
773
  y = group['Height']
667
774
  ax.scatter(x, y, color='C1')
@@ -700,7 +807,6 @@ class Acquisition:
700
807
 
701
808
  # Set the plotting style
702
809
  with plt.style.context(mps):
703
- # Generate a scatter plot using seaborn's jointplot
704
810
  grid = sns.jointplot(
705
811
  data=self.acquisition.data.peaks,
706
812
  x=(feature, x_detector),
@@ -0,0 +1,182 @@
1
+ from sklearn.cluster import KMeans
2
+ from sklearn.cluster import DBSCAN
3
+ from sklearn.mixture import GaussianMixture
4
+ import pandas as pd
5
+ from typing import Dict, Tuple
6
+
7
+
8
+ class BaseClassifier:
9
+ def filter_dataframe(self, dataframe: pd.DataFrame, features: list, detectors: list = None) -> object:
10
+ """
11
+ Filter the DataFrame based on the selected features and detectors.
12
+
13
+ Parameters
14
+ ----------
15
+ features : list
16
+ List of features to use for filtering. Options include 'Heights', 'Widths', 'Areas'.
17
+ detectors : list, optional
18
+ List of detectors to use. If None, use all detectors.
19
+
20
+ Returns
21
+ -------
22
+ DataFrame
23
+ A filtered DataFrame containing only the selected detectors and features.
24
+
25
+ Raises
26
+ ------
27
+ ValueError
28
+ If no matching features are found for the given detectors and features.
29
+ """
30
+ # Determine detectors to use
31
+
32
+ if detectors is None:
33
+ detectors = dataframe.columns.get_level_values(1).unique().tolist()
34
+
35
+ return dataframe.loc[:, (features, detectors)]
36
+
37
+
38
+ class KmeansClassifier(BaseClassifier):
39
+ def __init__(self, number_of_cluster: int) -> None:
40
+ """
41
+ Initialize the Classifier.
42
+
43
+ Parameters
44
+ ----------
45
+ dataframe : DataFrame
46
+ The input dataframe with multi-index columns.
47
+ """
48
+ self.number_of_cluster = number_of_cluster
49
+
50
+ def run(self, dataframe: pd.DataFrame, features: list = ['Height'], detectors: list = None, random_state: int = 42) -> pd.DataFrame:
51
+ """
52
+ Run KMeans clustering on the selected features and detectors.
53
+
54
+ Parameters
55
+ ----------
56
+ dataframe : pd.DataFrame
57
+ The input DataFrame with multi-index (e.g., by 'Detector').
58
+ features : list
59
+ List of features to use for clustering. Options include 'Height', 'Width', 'Area'.
60
+ detectors : list, optional
61
+ List of detectors to use. If None, use all detectors.
62
+ random_state : int, optional
63
+ Random state for KMeans, by default 42.
64
+
65
+ Returns
66
+ -------
67
+ pd.DataFrame
68
+ DataFrame with clustering labels added.
69
+ """
70
+ # Filter the DataFrame
71
+ sub_dataframe = self.filter_dataframe(dataframe=dataframe, features=features, detectors=detectors)
72
+
73
+ # Ensure data is dequantified if it uses Pint quantities
74
+ if hasattr(sub_dataframe, 'pint'):
75
+ sub_dataframe = sub_dataframe.pint.dequantify().droplevel('unit', axis=1)
76
+
77
+ # Run KMeans
78
+ kmeans = KMeans(n_clusters=self.number_of_cluster, random_state=random_state)
79
+ labels = kmeans.fit_predict(sub_dataframe)
80
+
81
+ dataframe['Label'] = labels
82
+
83
+ return labels
84
+
85
+ class GaussianMixtureClassifier(BaseClassifier):
86
+ def __init__(self, number_of_components: int) -> None:
87
+ """
88
+ Initialize the Gaussian Mixture Classifier.
89
+
90
+ Parameters
91
+ ----------
92
+ number_of_components : int
93
+ Number of Gaussian components (clusters) to use for the model.
94
+ """
95
+ self.number_of_components = number_of_components
96
+
97
+ def run(self, dataframe: pd.DataFrame, features: list = ['Height'], detectors: list = None, random_state: int = 42) -> pd.DataFrame:
98
+ """
99
+ Run Gaussian Mixture Model (GMM) clustering on the selected features and detectors.
100
+
101
+ Parameters
102
+ ----------
103
+ dataframe : pd.DataFrame
104
+ The input DataFrame with multi-index (e.g., by 'Detector').
105
+ features : list
106
+ List of features to use for clustering. Options include 'Height', 'Width', 'Area'.
107
+ detectors : list, optional
108
+ List of detectors to use. If None, use all detectors.
109
+ random_state : int, optional
110
+ Random state for reproducibility, by default 42.
111
+
112
+ Returns
113
+ -------
114
+ pd.DataFrame
115
+ DataFrame with clustering labels added.
116
+ """
117
+ # Filter the DataFrame
118
+ sub_dataframe = self.filter_dataframe(dataframe=dataframe, features=features, detectors=detectors)
119
+
120
+ # Ensure data is dequantified if it uses Pint quantities
121
+ if hasattr(sub_dataframe, 'pint'):
122
+ sub_dataframe = sub_dataframe.pint.dequantify().droplevel('unit', axis=1)
123
+
124
+ # Run Gaussian Mixture Model
125
+ gmm = GaussianMixture(n_components=self.number_of_components, random_state=random_state)
126
+ labels = gmm.fit_predict(sub_dataframe)
127
+
128
+ # Add labels to the original DataFrame
129
+ dataframe['Label'] = labels
130
+
131
+ return labels
132
+
133
+ class DBSCANClassifier(BaseClassifier):
134
+ def __init__(self, epsilon: float = 0.5, min_samples: int = 5) -> None:
135
+ """
136
+ Initialize the DBSCAN Classifier.
137
+
138
+ Parameters
139
+ ----------
140
+ epsilon : float, optional
141
+ The maximum distance between two samples for them to be considered as neighbors.
142
+ Default is 0.5.
143
+ min_samples : int, optional
144
+ The number of samples in a neighborhood for a point to be considered a core point.
145
+ Default is 5.
146
+ """
147
+ self.epsilon = epsilon
148
+ self.min_samples = min_samples
149
+
150
+ def run(self, dataframe: pd.DataFrame, features: list = ['Height'], detectors: list = None) -> pd.DataFrame:
151
+ """
152
+ Run DBSCAN clustering on the selected features and detectors.
153
+
154
+ Parameters
155
+ ----------
156
+ dataframe : pd.DataFrame
157
+ The input DataFrame with multi-index (e.g., by 'Detector').
158
+ features : list
159
+ List of features to use for clustering. Options include 'Height', 'Width', 'Area'.
160
+ detectors : list, optional
161
+ List of detectors to use. If None, use all detectors.
162
+
163
+ Returns
164
+ -------
165
+ pd.DataFrame
166
+ DataFrame with clustering labels added. Noise points are labeled as -1.
167
+ """
168
+ # Filter the DataFrame
169
+ sub_dataframe = self.filter_dataframe(dataframe=dataframe, features=features, detectors=detectors)
170
+
171
+ # Ensure data is dequantified if it uses Pint quantities
172
+ if hasattr(sub_dataframe, 'pint'):
173
+ sub_dataframe = sub_dataframe.pint.dequantify().droplevel('unit', axis=1)
174
+
175
+ # Run DBSCAN
176
+ dbscan = DBSCAN(eps=self.epsilon, min_samples=self.min_samples)
177
+ labels = dbscan.fit_predict(sub_dataframe)
178
+
179
+ # Add labels to the original DataFrame
180
+ dataframe['Label'] = labels
181
+
182
+ return labels