FlowCyPy 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. FlowCyPy/__init__.py +13 -0
  2. FlowCyPy/_version.py +16 -0
  3. FlowCyPy/acquisition.py +652 -0
  4. FlowCyPy/classifier.py +208 -0
  5. FlowCyPy/coupling_mechanism/__init__.py +4 -0
  6. FlowCyPy/coupling_mechanism/empirical.py +47 -0
  7. FlowCyPy/coupling_mechanism/mie.py +207 -0
  8. FlowCyPy/coupling_mechanism/rayleigh.py +116 -0
  9. FlowCyPy/coupling_mechanism/uniform.py +40 -0
  10. FlowCyPy/coupling_mechanism.py +205 -0
  11. FlowCyPy/cytometer.py +314 -0
  12. FlowCyPy/detector.py +439 -0
  13. FlowCyPy/directories.py +36 -0
  14. FlowCyPy/distribution/__init__.py +16 -0
  15. FlowCyPy/distribution/base_class.py +79 -0
  16. FlowCyPy/distribution/delta.py +104 -0
  17. FlowCyPy/distribution/lognormal.py +124 -0
  18. FlowCyPy/distribution/normal.py +128 -0
  19. FlowCyPy/distribution/particle_size_distribution.py +132 -0
  20. FlowCyPy/distribution/uniform.py +117 -0
  21. FlowCyPy/distribution/weibull.py +115 -0
  22. FlowCyPy/flow_cell.py +198 -0
  23. FlowCyPy/helper.py +81 -0
  24. FlowCyPy/logger.py +136 -0
  25. FlowCyPy/noises.py +34 -0
  26. FlowCyPy/particle_count.py +127 -0
  27. FlowCyPy/peak_locator/__init__.py +4 -0
  28. FlowCyPy/peak_locator/base_class.py +163 -0
  29. FlowCyPy/peak_locator/basic.py +108 -0
  30. FlowCyPy/peak_locator/derivative.py +143 -0
  31. FlowCyPy/peak_locator/moving_average.py +166 -0
  32. FlowCyPy/physical_constant.py +19 -0
  33. FlowCyPy/plottings.py +269 -0
  34. FlowCyPy/population.py +136 -0
  35. FlowCyPy/populations_instances.py +65 -0
  36. FlowCyPy/scatterer_collection.py +306 -0
  37. FlowCyPy/signal_digitizer.py +90 -0
  38. FlowCyPy/source.py +249 -0
  39. FlowCyPy/units.py +30 -0
  40. FlowCyPy/utils.py +191 -0
  41. FlowCyPy-0.7.0.dist-info/LICENSE +21 -0
  42. FlowCyPy-0.7.0.dist-info/METADATA +252 -0
  43. FlowCyPy-0.7.0.dist-info/RECORD +45 -0
  44. FlowCyPy-0.7.0.dist-info/WHEEL +5 -0
  45. FlowCyPy-0.7.0.dist-info/top_level.txt +1 -0
FlowCyPy/flow_cell.py ADDED
@@ -0,0 +1,198 @@
1
+ from typing import List
2
+ from FlowCyPy.units import meter, second, particle
3
+
4
+ from PyMieSim.units import Quantity
5
+ import pandas as pd
6
+ from tabulate import tabulate
7
+ from pydantic.dataclasses import dataclass
8
+ from pydantic import field_validator
9
+ from pint_pandas import PintType, PintArray
10
+ from FlowCyPy.source import BaseBeam
11
+ from FlowCyPy.population import Population
12
+ from FlowCyPy.scatterer_collection import ScattererCollection
13
+ import pandas
14
+ import numpy
15
+ import warnings
16
+
17
+ config_dict = dict(
18
+ arbitrary_types_allowed=True,
19
+ kw_only=True,
20
+ slots=True,
21
+ extra='forbid'
22
+ )
23
+
24
+
25
+ @dataclass(config=config_dict)
26
+ class FlowCell(object):
27
+ """
28
+ Models the flow parameters in a flow cytometer, including flow speed, flow area,
29
+ and particle interactions. This class interacts with ScattererDistribution to simulate
30
+ the flow of particles through the cytometer.
31
+
32
+ Parameters
33
+ ----------
34
+ flow_speed : Quantity
35
+ The speed of the flow in meters per second (m/s).
36
+ flow_area : Quantity
37
+ The cross-sectional area of the flow tube in square meters (m²).
38
+ """
39
+ flow_speed: Quantity
40
+ flow_area: Quantity
41
+ scheme: str = 'poisson'
42
+
43
+ source: BaseBeam = None
44
+
45
+ def __post_init__(self):
46
+ """Initialize units for flow parameters."""
47
+ self.flow_speed = Quantity(self.flow_speed, meter / second)
48
+ self.flow_area = Quantity(self.flow_area, meter ** 2)
49
+
50
+ def get_volume(self, run_time: Quantity) -> Quantity:
51
+ return self.flow_area * self.flow_speed * run_time
52
+
53
+ @field_validator('flow_area')
54
+ def _validate_flow_area(cls, value):
55
+ """
56
+ Validates that the flow area is provided in hertz.
57
+
58
+ Parameters
59
+ ----------
60
+ value : Quantity
61
+ The flow area to validate.
62
+
63
+ Returns
64
+ -------
65
+ Quantity
66
+ The validated flow area.
67
+
68
+ Raises:
69
+ ValueError: If the flow area is not in hertz.
70
+ """
71
+ if not value.check(meter ** 2):
72
+ raise ValueError(f"flow_area must be in meter ** 2, but got {value.units}")
73
+ return value
74
+
75
+ def get_population_sampling(self, run_time: Quantity, scatterer_collection: ScattererCollection) -> list[Quantity]:
76
+ population_sampling = [
77
+ p.particle_count.calculate_number_of_events(
78
+ flow_area=self.flow_area,
79
+ flow_speed=self.flow_speed,
80
+ run_time=run_time
81
+ ) for p in scatterer_collection.populations
82
+ ]
83
+
84
+ return population_sampling
85
+
86
+ def generate_event_dataframe(self, populations: List[Population], run_time: Quantity) -> pd.DataFrame:
87
+ """
88
+ Generates a DataFrame of event times for each population based on the specified scheme.
89
+
90
+ Parameters
91
+ ----------
92
+ populations : List[Population]
93
+ A list of populations for which to generate event times.
94
+ run_time : Quantity
95
+ The total duration of the experiment.
96
+
97
+ Returns
98
+ -------
99
+ pd.DataFrame
100
+ A MultiIndex DataFrame with event times for each population.
101
+ """
102
+ # Generate individual DataFrames for each population
103
+ population_event_frames = [
104
+ self._generate_poisson_events(population=population, run_time=run_time)
105
+ for population in populations
106
+ ]
107
+
108
+ # Combine the DataFrames with population names as keys
109
+ event_dataframe = pd.concat(population_event_frames, keys=[pop.name for pop in populations])
110
+ event_dataframe.index.names = ["Population", "Index"]
111
+
112
+ # Handle the scheme for event timing
113
+ if self.scheme.lower() in ['uniform-random', 'uniform-sequential']:
114
+ total_events = len(event_dataframe)
115
+ start_time = 0 * run_time.units
116
+ end_time = run_time
117
+ time_interval = (end_time - start_time) / total_events
118
+ evenly_spaced_times = numpy.arange(0, total_events) * time_interval
119
+
120
+ if self.scheme.lower() == 'uniform-random':
121
+ numpy.random.shuffle(evenly_spaced_times.magnitude) # Shuffle times for random spacing
122
+
123
+ # Assign the computed times to the DataFrame
124
+ event_dataframe['Time'] = PintArray(evenly_spaced_times.to('second'))
125
+
126
+ return event_dataframe
127
+
128
+ def _generate_poisson_events(self, run_time: Quantity, population: Population) -> pd.DataFrame:
129
+ r"""
130
+ Generate particle arrival times over the entire experiment duration based on a Poisson process.
131
+
132
+ In flow cytometry, the particle arrival times can be modeled as a Poisson process, where the time
133
+ intervals between successive particle arrivals follow an exponential distribution. The average rate
134
+ of particle arrivals (the particle flux) is given by:
135
+
136
+ .. math::
137
+ \text{Particle Flux} = \rho \cdot v \cdot A
138
+
139
+ where:
140
+ - :math:`\rho` is the scatterer density (particles per cubic meter),
141
+ - :math:`v` is the flow speed (meters per second),
142
+ - :math:`A` is the cross-sectional area of the flow tube (square meters).
143
+
144
+ The number of particles arriving in a given time interval follows a Poisson distribution, and the
145
+ time between successive arrivals follows an exponential distribution. The mean inter-arrival time
146
+ is the inverse of the particle flux:
147
+
148
+ .. math::
149
+ \Delta t \sim \text{Exponential}(1/\lambda)
150
+
151
+ where:
152
+ - :math:`\Delta t` is the time between successive particle arrivals,
153
+ - :math:`\lambda` is the particle flux (particles per second).
154
+
155
+ Steps:
156
+ 1. Compute the particle flux, which is the average number of particles passing through the detection
157
+ region per second.
158
+ 2. Calculate the expected number of particles over the entire experiment duration.
159
+ 3. Generate random inter-arrival times using the exponential distribution.
160
+ 4. Compute the cumulative arrival times by summing the inter-arrival times.
161
+ 5. Ensure that all arrival times fall within the total experiment duration.
162
+
163
+ Returns
164
+ -------
165
+ np.ndarray
166
+ An array of particle arrival times (in seconds) for the entire experiment duration, based on the Poisson process.
167
+ """
168
+ # Step 1: Compute the average particle flux (particles per second)
169
+ particle_flux = population.particle_count.compute_particle_flux(
170
+ flow_speed=self.flow_speed,
171
+ flow_area=self.flow_area,
172
+ run_time=run_time
173
+ )
174
+
175
+ # Step 2: Calculate the expected number of particles over the entire experiment
176
+ expected_particles = particle_flux * run_time
177
+ # expected_particles = population.n_events
178
+
179
+ # Step 3: Generate inter-arrival times (exponentially distributed)
180
+ inter_arrival_times = numpy.random.exponential(
181
+ scale=1 / particle_flux.magnitude,
182
+ size=int(expected_particles.magnitude)
183
+ ) / (particle_flux.units / particle)
184
+
185
+ # Step 4: Compute cumulative arrival times
186
+ arrival_times = numpy.cumsum(inter_arrival_times)
187
+
188
+ # Step 5: Limit the arrival times to the total experiment duration
189
+ arrival_times = arrival_times[arrival_times <= run_time]
190
+
191
+ dataframe = pd.DataFrame()
192
+
193
+ dataframe['Time'] = PintArray(arrival_times, dtype=arrival_times.units)
194
+
195
+ if len(dataframe) == 0:
196
+ warnings.warn("Population has been initialized with 0 events.")
197
+
198
+ return dataframe
FlowCyPy/helper.py ADDED
@@ -0,0 +1,81 @@
1
+ from typing import Callable
2
+ import matplotlib.pyplot as plt
3
+ from MPSPlots.styles import mps
4
+
5
+
6
+ def plot_helper(function: Callable) -> Callable:
7
+ """
8
+ A decorator that helps in plotting by wrapping a plotting function with additional functionality
9
+ such as handling axes creation, setting the figure style, managing legends, and saving figures.
10
+
11
+ Parameters
12
+ ----------
13
+ function : Callable
14
+ The plotting function that is decorated. It should accept `self`, `ax`, and `mode_of_interest`
15
+ as parameters.
16
+
17
+ Returns
18
+ -------
19
+ Callable
20
+ A wrapper function that adds the specified plotting functionalities.
21
+
22
+ Notes
23
+ -----
24
+ This decorator expects the decorated function to have the following signature:
25
+ `function(self, ax=None, mode_of_interest='all', **kwargs)`.
26
+ """
27
+ def wrapper(self, ax: plt.Axes = None, show: bool = True, save_filename: str = None, figure_size: tuple = None, **kwargs) -> plt.Figure:
28
+ """
29
+ A wrapped version of the plotting function that provides additional functionality for creating
30
+ and managing plots.
31
+
32
+ Parameters
33
+ ----------
34
+ self : object
35
+ The instance of the class calling this method.
36
+ ax : plt.Axes, optional
37
+ A matplotlib Axes object to draw the plot on. If None, a new figure and axes are created.
38
+ Default is None.
39
+ show : bool, optional
40
+ Whether to display the plot. If False, the plot will not be shown but can still be saved
41
+ or returned. Default is True.
42
+ mode_of_interest : str, optional
43
+ Specifies the mode of interest for the plot. If 'all', all available modes will be plotted.
44
+ This parameter is interpreted using the `interpret_mode_of_interest` function. Default is 'all'.
45
+ save_filename : str, optional
46
+ A file path to save the figure. If None, the figure will not be saved. Default is None.
47
+ **kwargs : dict
48
+ Additional keyword arguments passed to the decorated function.
49
+
50
+ Returns
51
+ -------
52
+ plt.Figure
53
+ The matplotlib Figure object created or used for the plot.
54
+
55
+ Notes
56
+ -----
57
+ - If no `ax` is provided, a new figure and axes are created using the style context `mps`.
58
+ - The legend is only added if there are labels to display.
59
+ - If `save_filename` is specified, the figure is saved to the given path.
60
+ - The plot is shown if `show` is set to True.
61
+ """
62
+ if ax is None:
63
+ with plt.style.context(mps):
64
+ figure, ax = plt.subplots(1, 1, figsize=figure_size)
65
+
66
+ else:
67
+ figure = ax.get_figure()
68
+
69
+ output = function(self, ax=ax, **kwargs)
70
+
71
+ _, labels = ax.get_legend_handles_labels()
72
+
73
+ if save_filename:
74
+ figure.savefig(save_filename)
75
+
76
+ if show:
77
+ plt.show()
78
+
79
+ return output
80
+
81
+ return wrapper
FlowCyPy/logger.py ADDED
@@ -0,0 +1,136 @@
1
+ import logging
2
+ from tabulate import tabulate
3
+ import pandas as pd
4
+ from typing import List, Union, Optional
5
+ from FlowCyPy import units
6
+
7
+
8
+ class EventCorrelatorLogger:
9
+ """
10
+ Logs key statistics and properties for the EventCorrelator class, including peak detection statistics
11
+ for each detector and coincident event information.
12
+
13
+ Parameters
14
+ ----------
15
+ correlator : EventCorrelator
16
+ An instance of the EventCorrelator class to log statistics for.
17
+ """
18
+
19
+ def __init__(self, correlator: object):
20
+ """
21
+ Initializes the EventCorrelatorLogger with the correlator instance.
22
+
23
+ Parameters
24
+ ----------
25
+ correlator : EventCorrelator
26
+ An instance of EventCorrelator to log properties and statistics for.
27
+ """
28
+ # self.run_time = run_time
29
+ self.correlator = correlator
30
+ self.detectors = correlator.cytometer.detectors
31
+ self.coincidence = getattr(correlator, "coincidence", None)
32
+
33
+ def log_statistics(self, table_format: str = "grid") -> None:
34
+ """
35
+ Logs statistics for each detector, including number of peaks, time between peaks, and peak times.
36
+
37
+ Parameters
38
+ ----------
39
+ table_format : str, optional
40
+ The format for the table display (default is 'grid').
41
+ """
42
+ logging.info("\n=== Detector Statistics ===")
43
+
44
+ table_data = [self._get_detector_stats(detector) for detector in self.detectors]
45
+ headers = [
46
+ "Detector",
47
+ "Number of Peaks",
48
+ "First Peak Time",
49
+ "Last Peak Time",
50
+ "Avg Time Between Peaks",
51
+ "Min Time Between Peaks",
52
+ 'Measured Concentration'
53
+ ]
54
+
55
+ formatted_table = tabulate(table_data, headers=headers, tablefmt=table_format, floatfmt=".4f")
56
+ logging.info("\n" + formatted_table)
57
+
58
+ def _get_detector_stats(self, detector) -> List[Union[str, int, str]]:
59
+ """
60
+ Computes statistics for a single detector.
61
+
62
+ Parameters
63
+ ----------
64
+ detector : object
65
+ A detector object with peak detection data.
66
+
67
+ Returns
68
+ -------
69
+ list
70
+ List of statistics: [detector name, number of peaks, first peak time, last peak time,
71
+ average time between peaks, minimum time between peaks].
72
+ """
73
+ group = self.correlator.dataframe.xs(detector.name, level="Detector")
74
+ num_events = len(group)
75
+
76
+ if num_events > 1:
77
+ times = group['PeakTimes'].sort_values()
78
+ time_diffs = times.diff().dropna()
79
+ avg_time_between_peaks = f"{time_diffs.mean().to_compact():.4~P}"
80
+ min_time_between_peaks = f"{time_diffs.min().to_compact():.4~P}"
81
+ # volume = self.correlator.cytometer.flow_cell.get_volume(self.run_time)
82
+ # measured_concentration = num_events * units.particle / volume.to(units.milliliter)
83
+ else:
84
+ avg_time_between_peaks = "N/A"
85
+ min_time_between_peaks = "N/A"
86
+ measured_concentration = "N/A"
87
+
88
+ first_peak_time = f"{group['PeakTimes'].min().to_compact():.4~P}" if num_events > 0 else "N/A"
89
+ last_peak_time = f"{group['PeakTimes'].max().to_compact():.4~P}" if num_events > 0 else "N/A"
90
+
91
+ return [detector.name, num_events, first_peak_time, last_peak_time, avg_time_between_peaks, min_time_between_peaks] #, measured_concentration]
92
+
93
+ def log_coincidence_statistics(self, table_format: str = "grid") -> None:
94
+ """
95
+ Logs statistics about coincident events detected between detectors.
96
+
97
+ Parameters
98
+ ----------
99
+ table_format : str, optional
100
+ The format for the table display (default is 'grid').
101
+ """
102
+ if self.coincidence is None or self.coincidence.empty:
103
+ logging.warning("No coincidence events to log.")
104
+ return
105
+
106
+ logging.info("\n=== Coincidence Event Statistics ===")
107
+
108
+ table_data = self._get_coincidence_stats()
109
+ headers = ["Detector 1 Event", "Detector 2 Event", "Time Difference"]
110
+
111
+ formatted_table = tabulate(table_data, headers=headers, tablefmt=table_format, floatfmt=".4f")
112
+ logging.info("\n" + formatted_table)
113
+
114
+ def _get_coincidence_stats(self) -> List[List[Union[str, float]]]:
115
+ """
116
+ Extracts statistics about coincident events.
117
+
118
+ Returns
119
+ -------
120
+ list
121
+ List of coincident event statistics: [detector 1 event, detector 2 event, time difference].
122
+ """
123
+ coinc_df = self.coincidence.reset_index()
124
+ time_diffs = (
125
+ coinc_df[self.detectors[0].name, 'PeakTimes'] -
126
+ coinc_df[self.detectors[1].name, 'PeakTimes']
127
+ ).abs()
128
+
129
+ return [
130
+ [
131
+ row[self.detectors[0].name, 'PeakTimes'],
132
+ row[self.detectors[1].name, 'PeakTimes'],
133
+ time_diff.to_compact()
134
+ ]
135
+ for row, time_diff in zip(coinc_df.itertuples(), time_diffs)
136
+ ]
FlowCyPy/noises.py ADDED
@@ -0,0 +1,34 @@
1
+ class RestrictiveMeta(type):
2
+ def __setattr__(cls, name, value):
3
+ if not hasattr(cls, name):
4
+ raise AttributeError(f"Cannot set unknown class-level attribute '{name}' in {cls.__name__}.")
5
+ super().__setattr__(name, value)
6
+
7
+
8
+ class NoiseSetting(metaclass=RestrictiveMeta):
9
+ _instance = None
10
+
11
+ def __new__(cls, *args, **kwargs):
12
+ if cls._instance is None:
13
+ cls._instance = super().__new__(cls)
14
+ return cls._instance
15
+
16
+ _include_noises = True
17
+ include_shot_noise = True
18
+ include_dark_current_noise = True
19
+ include_thermal_noise = True
20
+ include_RIN_noise = True
21
+
22
+ @property
23
+ def include_noises(self):
24
+ return self._include_noises
25
+
26
+ @include_noises.setter
27
+ def include_noises(self, value):
28
+ self._include_noises = value
29
+ # Dynamically update other noise components
30
+ if not value:
31
+ self.include_shot_noise = False
32
+ self.include_dark_current_noise = False
33
+ self.include_thermal_noise = False
34
+ self.include_RIN_noise = False
@@ -0,0 +1,127 @@
1
+ from FlowCyPy.units import Quantity, particle, liter, second
2
+
3
+ class ParticleCount:
4
+ """
5
+ A class to represent the quantity of particles in a flow, which can be defined
6
+ either as a concentration (particles per unit volume) or as a fixed number of particles.
7
+
8
+ Parameters
9
+ ----------
10
+ value : Quantity
11
+ The input quantity, either a concentration (e.g., particles/liter) or a fixed number of particles.
12
+ """
13
+
14
+ def __init__(self, value: Quantity):
15
+ """
16
+ Initializes the ParticleCount with either a concentration or a fixed number of particles.
17
+
18
+ Parameters
19
+ ----------
20
+ value : Quantity
21
+ A Quantity representing either a concentration (particles per unit volume)
22
+ or a fixed number of particles.
23
+
24
+ Raises
25
+ ------
26
+ ValueError
27
+ If the input value does not have the expected dimensionality.
28
+ """
29
+ if isinstance(value, ParticleCount):
30
+ self = value
31
+ return
32
+
33
+ if value.check(particle):
34
+ # Fixed number of particles
35
+ self.num_particles = value.to(particle)
36
+
37
+ elif value.check(particle / liter):
38
+ # Concentration of particles
39
+ self.concentration = value.to(particle / liter)
40
+ else:
41
+ raise ValueError(
42
+ "Value must have dimensions of either 'particles' or 'particles per unit volume'."
43
+ )
44
+
45
+ def calculate_number_of_events(self, flow_area: Quantity, flow_speed: Quantity, run_time: Quantity) -> Quantity:
46
+ """
47
+ Calculates the total number of particles based on the flow volume and the defined concentration.
48
+
49
+ Parameters
50
+ ----------
51
+ flow_volume : Quantity
52
+ The volume of the flow (e.g., in liters or cubic meters).
53
+
54
+ Returns
55
+ -------
56
+ Quantity
57
+ The total number of particles as a Quantity with the unit of 'particles'.
58
+
59
+ Raises
60
+ ------
61
+ ValueError
62
+ If no concentration is defined and the total number of particles cannot be calculated.
63
+ """
64
+ flow_volume = flow_area * flow_speed * run_time
65
+
66
+ if hasattr(self, 'concentration'):
67
+ return (self.concentration * flow_volume).to(particle)
68
+ elif hasattr(self, 'num_particles'):
69
+ return self.num_particles
70
+ else:
71
+ raise ValueError("Either a number of particles or a concentration must be defined.")
72
+
73
+ def compute_particle_flux(self, flow_speed: Quantity, flow_area: Quantity, run_time: Quantity) -> Quantity:
74
+ """
75
+ Computes the particle flux in the flow system, accounting for flow speed,
76
+ flow area, and either the particle concentration or a predefined number of particles.
77
+
78
+ Parameters
79
+ ----------
80
+ flow_speed : Quantity
81
+ The speed of the flow (e.g., in meters per second).
82
+ flow_area : Quantity
83
+ The cross-sectional area of the flow tube (e.g., in square meters).
84
+ run_time : Quantity
85
+ The total duration of the flow (e.g., in seconds).
86
+
87
+ Returns
88
+ -------
89
+ Quantity
90
+ The particle flux in particles per second (particle/second).
91
+ """
92
+ if hasattr(self, 'num_particles'):
93
+ return self.num_particles / run_time
94
+
95
+ flow_volume_per_second = (flow_speed * flow_area).to(liter / second)
96
+ particle_flux = (self.concentration * flow_volume_per_second).to(particle / second)
97
+ return particle_flux
98
+
99
+ def __repr__(self):
100
+ if hasattr(self, 'concentration'):
101
+ return f"{self.concentration}"
102
+ else:
103
+ return f"{self.num_particles}"
104
+
105
+ def __truediv__(self, factor: float):
106
+ if hasattr(self, 'concentration'):
107
+ self.concentration /= factor
108
+ else:
109
+ self.num_particles /= factor
110
+
111
+ return self
112
+
113
+ def __mul__(self, factor: float):
114
+ if hasattr(self, 'concentration'):
115
+ self.concentration *= factor
116
+ else:
117
+ self.num_particles *= factor
118
+
119
+ return self
120
+
121
+ @property
122
+ def value(self):
123
+ if hasattr(self, 'concentration'):
124
+ return self.concentration
125
+ else:
126
+ return self.num_particles
127
+
@@ -0,0 +1,4 @@
1
+ from .base_class import BasePeakLocator
2
+ from .basic import BasicPeakLocator
3
+ from .moving_average import MovingAverage
4
+ from .derivative import DerivativePeakLocator