FlowCyPy 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. FlowCyPy/__init__.py +15 -0
  2. FlowCyPy/_version.py +16 -0
  3. FlowCyPy/classifier.py +196 -0
  4. FlowCyPy/coupling_mechanism/__init__.py +4 -0
  5. FlowCyPy/coupling_mechanism/empirical.py +47 -0
  6. FlowCyPy/coupling_mechanism/mie.py +205 -0
  7. FlowCyPy/coupling_mechanism/rayleigh.py +115 -0
  8. FlowCyPy/coupling_mechanism/uniform.py +39 -0
  9. FlowCyPy/cytometer.py +198 -0
  10. FlowCyPy/detector.py +616 -0
  11. FlowCyPy/directories.py +36 -0
  12. FlowCyPy/distribution/__init__.py +16 -0
  13. FlowCyPy/distribution/base_class.py +59 -0
  14. FlowCyPy/distribution/delta.py +86 -0
  15. FlowCyPy/distribution/lognormal.py +94 -0
  16. FlowCyPy/distribution/normal.py +95 -0
  17. FlowCyPy/distribution/particle_size_distribution.py +110 -0
  18. FlowCyPy/distribution/uniform.py +96 -0
  19. FlowCyPy/distribution/weibull.py +80 -0
  20. FlowCyPy/event_correlator.py +244 -0
  21. FlowCyPy/flow_cell.py +122 -0
  22. FlowCyPy/helper.py +85 -0
  23. FlowCyPy/logger.py +322 -0
  24. FlowCyPy/noises.py +29 -0
  25. FlowCyPy/particle_count.py +102 -0
  26. FlowCyPy/peak_locator/__init__.py +4 -0
  27. FlowCyPy/peak_locator/base_class.py +163 -0
  28. FlowCyPy/peak_locator/basic.py +108 -0
  29. FlowCyPy/peak_locator/derivative.py +143 -0
  30. FlowCyPy/peak_locator/moving_average.py +114 -0
  31. FlowCyPy/physical_constant.py +19 -0
  32. FlowCyPy/plottings.py +270 -0
  33. FlowCyPy/population.py +239 -0
  34. FlowCyPy/populations_instances.py +49 -0
  35. FlowCyPy/report.py +236 -0
  36. FlowCyPy/scatterer.py +373 -0
  37. FlowCyPy/source.py +249 -0
  38. FlowCyPy/units.py +26 -0
  39. FlowCyPy/utils.py +191 -0
  40. FlowCyPy-0.5.0.dist-info/LICENSE +21 -0
  41. FlowCyPy-0.5.0.dist-info/METADATA +252 -0
  42. FlowCyPy-0.5.0.dist-info/RECORD +44 -0
  43. FlowCyPy-0.5.0.dist-info/WHEEL +5 -0
  44. FlowCyPy-0.5.0.dist-info/top_level.txt +1 -0
FlowCyPy/logger.py ADDED
@@ -0,0 +1,322 @@
1
+ import logging
2
+ from tabulate import tabulate
3
+ import pandas as pd
4
+ from typing import List, Union, Optional
5
+ from FlowCyPy.units import particle, milliliter
6
+
7
+
8
+ class EventCorrelatorLogger:
9
+ """
10
+ Logs key statistics and properties for the EventCorrelator class, including peak detection statistics
11
+ for each detector and coincident event information.
12
+
13
+ Parameters
14
+ ----------
15
+ correlator : EventCorrelator
16
+ An instance of the EventCorrelator class to log statistics for.
17
+ """
18
+
19
+ def __init__(self, correlator: object):
20
+ """
21
+ Initializes the EventCorrelatorLogger with the correlator instance.
22
+
23
+ Parameters
24
+ ----------
25
+ correlator : EventCorrelator
26
+ An instance of EventCorrelator to log properties and statistics for.
27
+ """
28
+ self.correlator = correlator
29
+ self.detectors = correlator.cytometer.detectors
30
+ self.coincidence = getattr(correlator, "coincidence", None)
31
+
32
+ def log_statistics(self, table_format: str = "grid") -> None:
33
+ """
34
+ Logs statistics for each detector, including number of peaks, time between peaks, and peak times.
35
+
36
+ Parameters
37
+ ----------
38
+ table_format : str, optional
39
+ The format for the table display (default is 'grid').
40
+ """
41
+ logging.info("\n=== Detector Statistics ===")
42
+
43
+ table_data = [self._get_detector_stats(detector) for detector in self.detectors]
44
+ headers = [
45
+ "Detector",
46
+ "Number of Peaks",
47
+ "First Peak Time",
48
+ "Last Peak Time",
49
+ "Avg Time Between Peaks",
50
+ "Min Time Between Peaks",
51
+ 'Measured Concentration'
52
+ ]
53
+
54
+ formatted_table = tabulate(table_data, headers=headers, tablefmt=table_format, floatfmt=".4f")
55
+ logging.info("\n" + formatted_table)
56
+
57
+ def _get_detector_stats(self, detector) -> List[Union[str, int, str]]:
58
+ """
59
+ Computes statistics for a single detector.
60
+
61
+ Parameters
62
+ ----------
63
+ detector : object
64
+ A detector object with peak detection data.
65
+
66
+ Returns
67
+ -------
68
+ list
69
+ List of statistics: [detector name, number of peaks, first peak time, last peak time,
70
+ average time between peaks, minimum time between peaks].
71
+ """
72
+ group = self.correlator.dataframe.xs(detector.name, level="Detector")
73
+ num_events = len(group)
74
+
75
+ if num_events > 1:
76
+ times = group['PeakTimes'].sort_values()
77
+ time_diffs = times.diff().dropna()
78
+ avg_time_between_peaks = f"{time_diffs.mean().to_compact():.4~P}"
79
+ min_time_between_peaks = f"{time_diffs.min().to_compact():.4~P}"
80
+ measured_concentration = num_events * particle / self.correlator.cytometer.scatterer.flow_cell.volume.to(milliliter)
81
+ else:
82
+ avg_time_between_peaks = "N/A"
83
+ min_time_between_peaks = "N/A"
84
+ measured_concentration = "N/A"
85
+
86
+ first_peak_time = f"{group['PeakTimes'].min().to_compact():.4~P}" if num_events > 0 else "N/A"
87
+ last_peak_time = f"{group['PeakTimes'].max().to_compact():.4~P}" if num_events > 0 else "N/A"
88
+
89
+ return [detector.name, num_events, first_peak_time, last_peak_time, avg_time_between_peaks, min_time_between_peaks, measured_concentration]
90
+
91
+ def log_coincidence_statistics(self, table_format: str = "grid") -> None:
92
+ """
93
+ Logs statistics about coincident events detected between detectors.
94
+
95
+ Parameters
96
+ ----------
97
+ table_format : str, optional
98
+ The format for the table display (default is 'grid').
99
+ """
100
+ if self.coincidence is None or self.coincidence.empty:
101
+ logging.warning("No coincidence events to log.")
102
+ return
103
+
104
+ logging.info("\n=== Coincidence Event Statistics ===")
105
+
106
+ table_data = self._get_coincidence_stats()
107
+ headers = ["Detector 1 Event", "Detector 2 Event", "Time Difference"]
108
+
109
+ formatted_table = tabulate(table_data, headers=headers, tablefmt=table_format, floatfmt=".4f")
110
+ logging.info("\n" + formatted_table)
111
+
112
+ def _get_coincidence_stats(self) -> List[List[Union[str, float]]]:
113
+ """
114
+ Extracts statistics about coincident events.
115
+
116
+ Returns
117
+ -------
118
+ list
119
+ List of coincident event statistics: [detector 1 event, detector 2 event, time difference].
120
+ """
121
+ coinc_df = self.coincidence.reset_index()
122
+ time_diffs = (
123
+ coinc_df[self.detectors[0].name, 'PeakTimes'] -
124
+ coinc_df[self.detectors[1].name, 'PeakTimes']
125
+ ).abs()
126
+
127
+ return [
128
+ [
129
+ row[self.detectors[0].name, 'PeakTimes'],
130
+ row[self.detectors[1].name, 'PeakTimes'],
131
+ time_diff.to_compact()
132
+ ]
133
+ for row, time_diff in zip(coinc_df.itertuples(), time_diffs)
134
+ ]
135
+
136
+
137
+ class ScattererLogger:
138
+ """
139
+ Logs key properties of scatterers in formatted tables.
140
+
141
+ Parameters
142
+ ----------
143
+ scatterers : list
144
+ List of scatterer instances to log properties for.
145
+ """
146
+
147
+ def __init__(self, scatterers: List[object]):
148
+ """
149
+ Initializes the ScattererLogger with the scatterers to log.
150
+
151
+ Parameters
152
+ ----------
153
+ scatterers : list
154
+ List of scatterer objects.
155
+ """
156
+ self.scatterers = scatterers
157
+
158
+ def log_properties(self, table_format: str = "grid") -> None:
159
+ """
160
+ Logs properties of scatterers in formatted tables.
161
+
162
+ Parameters
163
+ ----------
164
+ table_format : str, optional
165
+ The format for the table display (default is 'grid').
166
+ Options include 'plain', 'github', 'grid', 'fancy_grid', etc., as supported by tabulate.
167
+ """
168
+ logging.info("\n=== Scatterer Properties Summary ===")
169
+
170
+ # First table: General properties
171
+ general_table_data = [self._get_population_properties(population) for population in self.scatterers.populations]
172
+ general_headers = [
173
+ "Name",
174
+ "Refractive Index",
175
+ "Medium Refractive Index",
176
+ "Size",
177
+ "Particle Count",
178
+ "Number of Events",
179
+ "Min Time Between Events",
180
+ "Avg Time Between Events"
181
+ ]
182
+ formatted_general_table = tabulate(general_table_data, headers=general_headers, tablefmt=table_format, floatfmt=".4f")
183
+ logging.info("\n" + formatted_general_table)
184
+
185
+ def _get_population_properties(self, population: object) -> List[Union[str, float]]:
186
+ """
187
+ Extracts key properties of a scatterer for the general properties table.
188
+
189
+ Parameters
190
+ ----------
191
+ population : object
192
+ A scatterer object with properties such as name, refractive index, size, etc.
193
+
194
+ Returns
195
+ -------
196
+ list
197
+ List of scatterer properties: [name, refractive index, size, concentration, number of events].
198
+ """
199
+ name = population.name
200
+ refractive_index = f"{population.refractive_index}"
201
+ medium_refractive_index = f"{self.scatterers.medium_refractive_index}"
202
+ size = f"{population.size}"
203
+ concentration = f"{population.particle_count}"
204
+ num_events = population.n_events
205
+
206
+ min_delta_position = abs(population.dataframe['Time'].diff()).min().to_compact()
207
+ avg_delta_position = population.dataframe['Time'].diff().mean().to_compact()
208
+
209
+ return [name, refractive_index, medium_refractive_index, size, concentration, num_events, avg_delta_position, min_delta_position]
210
+
211
+
212
+ class SimulationLogger:
213
+ """
214
+ Logs key statistics about the simulated pulse events for each detector.
215
+ Provides an organized summary of simulation events, including total events,
216
+ average time between events, first and last event times, and minimum time between events.
217
+
218
+ Parameters
219
+ ----------
220
+ cytometer : object
221
+ The cytometer instance which containes info on detected pulse.
222
+ """
223
+
224
+ def __init__(self, cytometer: object):
225
+ self.cytometer = cytometer
226
+ self.detectors = cytometer.detectors
227
+ self.pulse_dataframe = cytometer.pulse_dataframe
228
+
229
+ def log_statistics(self, include_totals: bool = True, table_format: str = "grid") -> None:
230
+ """
231
+ Logs summary statistics for each detector in a formatted table.
232
+
233
+ Parameters
234
+ ----------
235
+ include_totals : bool, optional
236
+ If True, logs the total number of events across all detectors (default is True).
237
+ table_format : str, optional
238
+ The format for the table display (default is 'grid').
239
+ Options include 'plain', 'github', 'grid', 'fancy_grid', etc., as supported by tabulate.
240
+ """
241
+ logging.info("\n=== Simulation Statistics Summary ===")
242
+
243
+ table_data = [self._get_detector_stats(detector) for detector in self.detectors]
244
+ headers = [
245
+ "Detector",
246
+ "Number of Events",
247
+ "Saturated",
248
+ "First Event Time",
249
+ "Last Event Time",
250
+ "Avg Time Between Events",
251
+ "Min Time Between Events",
252
+ "Mean event rate"
253
+ ]
254
+
255
+ formatted_table = tabulate(table_data, headers=headers, tablefmt=table_format, floatfmt=".3f")
256
+ logging.info("\n" + formatted_table)
257
+
258
+ if include_totals:
259
+ total_events = sum(stat[1] for stat in table_data) # Sum of events from all detectors
260
+ logging.info(f"\nTotal number of events detected across all detectors: {total_events}")
261
+
262
+ def _get_detector_stats(self, detector) -> List[Union[str, int, Optional[str]]]:
263
+ """
264
+ Computes statistics for a single detector.
265
+
266
+ Parameters
267
+ ----------
268
+ detector : object
269
+ A detector object with 'name' and pulse event data attributes.
270
+
271
+ Returns
272
+ -------
273
+ list
274
+ List of computed statistics: [detector name, num_events, first_event_time, last_event_time,
275
+ avg_time_between_events, min_time_between_events, mean_detection_rate]
276
+ """
277
+ centers = self.pulse_dataframe['Centers']
278
+ num_events = len(centers)
279
+
280
+ if num_events > 1:
281
+ centers_sorted = centers.sort_values()
282
+ time_diffs = centers_sorted.diff().dropna() # Time differences between events
283
+ avg_time_between_events = self._format_time(time_diffs.mean())
284
+ min_time_between_events = self._format_time(time_diffs.min())
285
+ else:
286
+ avg_time_between_events = "N/A"
287
+ min_time_between_events = "N/A"
288
+
289
+ first_event_time = self._format_time(centers.min()) if num_events > 0 else "N/A"
290
+ last_event_time = self._format_time(centers.max()) if num_events > 0 else "N/A"
291
+
292
+ mean_event_rate = (num_events / self.cytometer.scatterer.flow_cell.run_time).to('Hz')
293
+
294
+ return [
295
+ detector.name,
296
+ num_events,
297
+ detector.is_saturated, # Was the detector saturated at some point
298
+ first_event_time,
299
+ last_event_time,
300
+ avg_time_between_events,
301
+ min_time_between_events,
302
+ mean_event_rate
303
+ ]
304
+
305
+ def _format_time(self, time_value) -> str:
306
+ """
307
+ Formats a time value for display, converting to compact notation if available.
308
+
309
+ Parameters
310
+ ----------
311
+ time_value : pint.Quantity or pd.Timestamp or float
312
+ The time value to format.
313
+
314
+ Returns
315
+ -------
316
+ str
317
+ The formatted time string.
318
+ """
319
+ try:
320
+ return f"{time_value.to_compact():.4~P}"
321
+ except AttributeError:
322
+ return f"{time_value:.4f}" if pd.notnull(time_value) else "N/A"
FlowCyPy/noises.py ADDED
@@ -0,0 +1,29 @@
1
+
2
+
3
+ class NoiseSetting:
4
+ _instance = None
5
+
6
+ def __new__(cls, *args, **kwargs):
7
+ if cls._instance is None:
8
+ cls._instance = super().__new__(cls)
9
+ return cls._instance
10
+
11
+ _include_noises = True
12
+ include_shot_noise = True
13
+ include_dark_current_noise = True
14
+ include_thermal_noise = True
15
+ include_RIN_noise = True
16
+
17
+ @property
18
+ def include_noises(self):
19
+ return self._include_noises
20
+
21
+ @include_noises.setter
22
+ def include_noises(self, value):
23
+ self._include_noises = value
24
+ # Dynamically update other noise components
25
+ if not value:
26
+ self.include_shot_noise = False
27
+ self.include_dark_current_noise = False
28
+ self.include_thermal_noise = False
29
+ self.include_RIN_noise = False
@@ -0,0 +1,102 @@
1
+ from FlowCyPy.units import Quantity, particle, liter, second
2
+
3
+
4
+ class ParticleCount:
5
+ """
6
+ A class to represent the quantity of particles in a flow, which can be defined
7
+ either as a concentration (particles per unit volume) or as a fixed number of particles.
8
+
9
+ Parameters
10
+ ----------
11
+ value : Quantity
12
+ The input quantity, either a concentration (e.g., particles/liter) or a fixed number of particles.
13
+ """
14
+
15
+ def __init__(self, value: Quantity):
16
+ """
17
+ Initializes the ParticleCount with either a concentration or a fixed number of particles.
18
+
19
+ Parameters
20
+ ----------
21
+ value : Quantity
22
+ A Quantity representing either a concentration (particles per unit volume)
23
+ or a fixed number of particles.
24
+
25
+ Raises
26
+ ------
27
+ ValueError
28
+ If the input value does not have the expected dimensionality.
29
+ """
30
+ if value.check(particle):
31
+ # Fixed number of particles
32
+ self.num_particles = value.to(particle)
33
+ self.concentration = None
34
+ elif value.check(particle / liter):
35
+ # Concentration of particles
36
+ self.concentration = value.to(particle / liter)
37
+ self.num_particles = None
38
+ else:
39
+ raise ValueError(
40
+ "Value must have dimensions of either 'particles' or 'particles per unit volume'."
41
+ )
42
+
43
+ def calculate_number_of_events(self, flow_area: Quantity, flow_speed: Quantity, run_time: Quantity) -> Quantity:
44
+ """
45
+ Calculates the total number of particles based on the flow volume and the defined concentration.
46
+
47
+ Parameters
48
+ ----------
49
+ flow_volume : Quantity
50
+ The volume of the flow (e.g., in liters or cubic meters).
51
+
52
+ Returns
53
+ -------
54
+ Quantity
55
+ The total number of particles as a Quantity with the unit of 'particles'.
56
+
57
+ Raises
58
+ ------
59
+ ValueError
60
+ If no concentration is defined and the total number of particles cannot be calculated.
61
+ """
62
+ flow_volume = flow_area * flow_speed * run_time
63
+
64
+ if self.num_particles is not None:
65
+ return self.num_particles
66
+ elif self.concentration is not None:
67
+ return (self.concentration * flow_volume).to(particle)
68
+ else:
69
+ raise ValueError("Either a number of particles or a concentration must be defined.")
70
+
71
+ def compute_particle_flux(self, flow_speed: Quantity, flow_area: Quantity, run_time: Quantity) -> Quantity:
72
+ """
73
+ Computes the particle flux in the flow system, accounting for flow speed,
74
+ flow area, and either the particle concentration or a predefined number of particles.
75
+
76
+ Parameters
77
+ ----------
78
+ flow_speed : Quantity
79
+ The speed of the flow (e.g., in meters per second).
80
+ flow_area : Quantity
81
+ The cross-sectional area of the flow tube (e.g., in square meters).
82
+ run_time : Quantity
83
+ The total duration of the flow (e.g., in seconds).
84
+
85
+ Returns
86
+ -------
87
+ Quantity
88
+ The particle flux in particles per second (particle/second).
89
+ """
90
+ if self.concentration is None:
91
+ return self.num_particles / run_time
92
+
93
+ flow_volume_per_second = (flow_speed * flow_area).to(liter / second)
94
+ particle_flux = (self.concentration * flow_volume_per_second).to(particle / second)
95
+ return particle_flux
96
+
97
+ def __repr__(self):
98
+ if self.num_particles is not None:
99
+ return f"{self.num_particles}"
100
+ elif self.concentration is not None:
101
+ return f"{self.concentration}"
102
+ return "Undefined"
@@ -0,0 +1,4 @@
1
+ from .base_class import BasePeakLocator
2
+ from .basic import BasicPeakLocator
3
+ from .moving_average import MovingAverage
4
+ from .derivative import DerivativePeakLocator
@@ -0,0 +1,163 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+ from scipy.integrate import cumulative_trapezoid
4
+ import matplotlib.pyplot as plt
5
+ import pint_pandas
6
+ from abc import ABC, abstractmethod
7
+ from FlowCyPy.units import Quantity
8
+ from scipy.signal import peak_widths
9
+
10
+
11
+ class BasePeakLocator(ABC):
12
+ """
13
+ A base class to handle common functionality for peak detection,
14
+ including area calculation under peaks.
15
+ """
16
+
17
+ peak_properties: pd.DataFrame = None
18
+
19
+ def init_data(self, dataframe: pd.DataFrame) -> None:
20
+ """
21
+ Initialize signal and time data for peak detection.
22
+
23
+ Parameters
24
+ ----------
25
+ dataframe : pd.DataFrame
26
+ A DataFrame containing the signal data with columns 'Signal' and 'Time'.
27
+
28
+ Raises
29
+ ------
30
+ ValueError
31
+ If the DataFrame is missing required columns or is empty.
32
+ """
33
+ if not {'Signal', 'Time'}.issubset(dataframe.columns):
34
+ raise ValueError("The DataFrame must contain 'Signal' and 'Time' columns.")
35
+
36
+ if dataframe.empty:
37
+ raise ValueError("The DataFrame is empty. Please provide valid signal data.")
38
+
39
+ self.data = pd.DataFrame(index=np.arange(len(dataframe)))
40
+ self.data['Signal'] = dataframe['Signal']
41
+ self.data['Time'] = dataframe['Time']
42
+
43
+ self.dt = self.data.Time[1] - self.data.Time[0]
44
+
45
+ @abstractmethod
46
+ def _compute_algorithm_specific_features(self) -> None:
47
+ """
48
+ Abstract method for computing features specific to the detection algorithm.
49
+ This must be implemented by subclasses.
50
+ """
51
+ pass
52
+
53
+ def detect_peaks(self, compute_area: bool = True) -> pd.DataFrame:
54
+ """
55
+ Detect peaks and compute peak properties.
56
+
57
+ Parameters
58
+ ----------
59
+ compute_area : bool, optional
60
+ If True, computes the area under each peak (default is True).
61
+
62
+ Returns
63
+ -------
64
+ pd.DataFrame
65
+ A DataFrame containing peak properties (times, heights, widths, etc.).
66
+ """
67
+ peak_indices, widths_samples, width_heights, left_ips, right_ips = self._compute_algorithm_specific_features()
68
+
69
+ peak_times = self.data['Time'].values[peak_indices]
70
+ heights = self.data['Signal'].values[peak_indices]
71
+ widths = widths_samples * self.dt
72
+
73
+ # Store results in `peak_properties`
74
+ self.peak_properties = pd.DataFrame({
75
+ 'PeakTimes': peak_times,
76
+ 'Heights': heights,
77
+ 'Widths': pint_pandas.PintArray(widths, dtype=widths.units),
78
+ 'WidthHeights': pint_pandas.PintArray(width_heights, dtype=heights.units),
79
+ 'LeftIPs': pint_pandas.PintArray(left_ips * self.dt, dtype=self.dt.units),
80
+ 'RightIPs': pint_pandas.PintArray(right_ips * self.dt, dtype=self.dt.units),
81
+ })
82
+
83
+ # Compute areas if needed
84
+ if compute_area:
85
+ self._compute_peak_areas()
86
+
87
+ return self.peak_properties
88
+
89
+ def _compute_peak_areas(self) -> None:
90
+ """
91
+ Computes the areas under the detected peaks using cumulative integration.
92
+
93
+ The cumulative integral of the signal is interpolated at the left and right
94
+ interpolated positions of each peak to compute the enclosed area.
95
+
96
+ Adds an 'Areas' column to `self.peak_properties`.
97
+
98
+ Raises
99
+ ------
100
+ RuntimeError
101
+ If `peak_properties` or `data` has not been initialized.
102
+ """
103
+ if not hasattr(self, 'peak_properties') or self.peak_properties.empty:
104
+ raise RuntimeError("No peaks detected. Run `detect_peaks()` first.")
105
+
106
+ if not hasattr(self, 'data') or self.data.empty:
107
+ raise RuntimeError("Signal data is not initialized. Call `init_data()` first.")
108
+
109
+ # Compute cumulative integral of the signal
110
+ cumulative = cumulative_trapezoid(
111
+ self.data.Signal.values.quantity.magnitude,
112
+ x=self.data.Time.values.quantity.magnitude,
113
+ initial=0 # Include 0 at the start
114
+ )
115
+
116
+ # Interpolate cumulative integral at left and right interpolated positions
117
+ left_cum_integral = np.interp(
118
+ x=self.peak_properties.LeftIPs,
119
+ xp=self.data.index,
120
+ fp=cumulative
121
+ )
122
+ right_cum_integral = np.interp(
123
+ x=self.peak_properties.RightIPs,
124
+ xp=self.data.index,
125
+ fp=cumulative
126
+ )
127
+
128
+ # Compute areas under peaks
129
+ areas = right_cum_integral - left_cum_integral
130
+
131
+ # Add areas with units to peak properties
132
+ self.peak_properties['Areas'] = pint_pandas.PintArray(
133
+ areas, dtype=self.data.Signal.pint.units * self.data.Time.pint.units
134
+ )
135
+
136
+ def _add_to_ax(self, time_unit: str | Quantity, signal_unit: str | Quantity, ax: plt.Axes = None) -> None:
137
+ """
138
+ Plots the signal with detected peaks and FWHM lines.
139
+
140
+ Parameters
141
+ ----------
142
+ time_unit : str or Quantity
143
+ Unit for the time axis (e.g., 'microsecond').
144
+ signal_unit : str or Quantity
145
+ Unit for the signal axis (e.g., 'volt').
146
+ ax : plt.Axes
147
+ The matplotlib Axes to plot on. Creates a new figure if not provided.
148
+ """
149
+ # Plot vertical lines at peak times
150
+ for _, row in self.peak_properties.iterrows():
151
+ ax.axvline(row['PeakTimes'].to(time_unit), color='black', linestyle='-', lw=0.8)
152
+
153
+ self._add_custom_to_ax(ax=ax, time_unit=time_unit, signal_unit=signal_unit)
154
+
155
+ def _compute_peak_widths(self, peak_indices: list[int], values: np.ndarray) -> None:
156
+ # Compute peak properties
157
+ widths_samples, width_heights, left_ips, right_ips = peak_widths(
158
+ x=values,
159
+ peaks=peak_indices,
160
+ rel_height=self.rel_height
161
+ )
162
+
163
+ return widths_samples, width_heights, left_ips, right_ips