FlowCyPy 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. FlowCyPy/__init__.py +13 -0
  2. FlowCyPy/_version.py +16 -0
  3. FlowCyPy/acquisition.py +652 -0
  4. FlowCyPy/classifier.py +208 -0
  5. FlowCyPy/coupling_mechanism/__init__.py +4 -0
  6. FlowCyPy/coupling_mechanism/empirical.py +47 -0
  7. FlowCyPy/coupling_mechanism/mie.py +207 -0
  8. FlowCyPy/coupling_mechanism/rayleigh.py +116 -0
  9. FlowCyPy/coupling_mechanism/uniform.py +40 -0
  10. FlowCyPy/coupling_mechanism.py +205 -0
  11. FlowCyPy/cytometer.py +314 -0
  12. FlowCyPy/detector.py +439 -0
  13. FlowCyPy/directories.py +36 -0
  14. FlowCyPy/distribution/__init__.py +16 -0
  15. FlowCyPy/distribution/base_class.py +79 -0
  16. FlowCyPy/distribution/delta.py +104 -0
  17. FlowCyPy/distribution/lognormal.py +124 -0
  18. FlowCyPy/distribution/normal.py +128 -0
  19. FlowCyPy/distribution/particle_size_distribution.py +132 -0
  20. FlowCyPy/distribution/uniform.py +117 -0
  21. FlowCyPy/distribution/weibull.py +115 -0
  22. FlowCyPy/flow_cell.py +198 -0
  23. FlowCyPy/helper.py +81 -0
  24. FlowCyPy/logger.py +136 -0
  25. FlowCyPy/noises.py +34 -0
  26. FlowCyPy/particle_count.py +127 -0
  27. FlowCyPy/peak_locator/__init__.py +4 -0
  28. FlowCyPy/peak_locator/base_class.py +163 -0
  29. FlowCyPy/peak_locator/basic.py +108 -0
  30. FlowCyPy/peak_locator/derivative.py +143 -0
  31. FlowCyPy/peak_locator/moving_average.py +166 -0
  32. FlowCyPy/physical_constant.py +19 -0
  33. FlowCyPy/plottings.py +269 -0
  34. FlowCyPy/population.py +136 -0
  35. FlowCyPy/populations_instances.py +65 -0
  36. FlowCyPy/scatterer_collection.py +306 -0
  37. FlowCyPy/signal_digitizer.py +90 -0
  38. FlowCyPy/source.py +249 -0
  39. FlowCyPy/units.py +30 -0
  40. FlowCyPy/utils.py +191 -0
  41. FlowCyPy-0.7.0.dist-info/LICENSE +21 -0
  42. FlowCyPy-0.7.0.dist-info/METADATA +252 -0
  43. FlowCyPy-0.7.0.dist-info/RECORD +45 -0
  44. FlowCyPy-0.7.0.dist-info/WHEEL +5 -0
  45. FlowCyPy-0.7.0.dist-info/top_level.txt +1 -0
FlowCyPy/classifier.py ADDED
@@ -0,0 +1,208 @@
1
+ from sklearn.cluster import KMeans
2
+ from sklearn.cluster import DBSCAN
3
+ import pandas as pd
4
+ from typing import List, Dict, Tuple
5
+ import seaborn as sns
6
+ import matplotlib.pyplot as plt
7
+ from MPSPlots.styles import mps
8
+
9
+ class BaseClassifier:
10
+ def filter_dataframe(self, features: list, detectors: list = None) -> object:
11
+ """
12
+ Filter the DataFrame based on the selected features and detectors.
13
+
14
+ Parameters
15
+ ----------
16
+ features : list
17
+ List of features to use for filtering. Options include 'Heights', 'Widths', 'Areas'.
18
+ detectors : list, optional
19
+ List of detectors to use. If None, use all detectors.
20
+
21
+ Returns
22
+ -------
23
+ DataFrame
24
+ A filtered DataFrame containing only the selected detectors and features.
25
+
26
+ Raises
27
+ ------
28
+ ValueError
29
+ If no matching features are found for the given detectors and features.
30
+ """
31
+ # Determine detectors to use
32
+
33
+ if detectors is None:
34
+ detectors = self.dataframe.columns.get_level_values(0).unique().tolist()
35
+
36
+ return self.dataframe.loc[detectors, features]
37
+
38
+
39
+ class KmeansClassifier(BaseClassifier):
40
+ def __init__(self, dataframe: object) -> None:
41
+ """
42
+ Initialize the Classifier.
43
+
44
+ Parameters
45
+ ----------
46
+ dataframe : DataFrame
47
+ The input dataframe with multi-index columns.
48
+ """
49
+ self.dataframe = dataframe
50
+ self.dataframe['Label'] = 0 # Initialize labels as 0
51
+
52
+ def run(self, number_of_cluster: int, features: list = ['Height'], detectors: list = None, random_state: int = 42) -> None:
53
+ """
54
+ Run KMeans clustering on the selected features and detectors.
55
+
56
+ Parameters
57
+ ----------
58
+ number_of_cluster : int
59
+ Number of clusters for KMeans.
60
+ features : list
61
+ List of features to use for clustering. Options include 'Heights', 'Widths', 'Areas'.
62
+ detectors : list, optional
63
+ List of detectors to use. If None, use all detectors.
64
+ random_state : int, optional
65
+ Random state for KMeans, by default 42.
66
+ """
67
+ # Filter the DataFrame
68
+ sub_dataframe = self.filter_dataframe(features=features, detectors=detectors)
69
+ sub_dataframe = sub_dataframe.unstack('Detector')
70
+
71
+ # Ensure data is dequantified if it uses Pint quantities
72
+ if hasattr(sub_dataframe, 'pint'):
73
+ sub_dataframe = sub_dataframe.pint.dequantify().droplevel('unit', axis=1)
74
+
75
+ sub_dataframe = sub_dataframe.droplevel(0, axis=1)
76
+
77
+ # Run KMeans
78
+ kmeans = KMeans(n_clusters=number_of_cluster, random_state=random_state)
79
+
80
+ sub_dataframe['Label'] = kmeans.fit_predict(sub_dataframe)
81
+
82
+ self.sub_dataframe = sub_dataframe
83
+
84
+ return sub_dataframe
85
+
86
+ def plot(self, x_detector: str, y_detector: str) -> None:
87
+ with plt.style.context(mps):
88
+ sns.jointplot(
89
+ data=self.sub_dataframe,
90
+ x=x_detector,
91
+ hue='Label',
92
+ y=y_detector
93
+ )
94
+
95
+ plt.show()
96
+
97
+ class DBScanClassifier(BaseClassifier):
98
+ def __init__(self, dataframe: object) -> None:
99
+ """
100
+ Initialize the DBScanClassifier.
101
+
102
+ Parameters
103
+ ----------
104
+ dataframe : DataFrame
105
+ The input dataframe with multi-index columns.
106
+ """
107
+ self.dataframe = dataframe
108
+ self.dataframe['Label'] = -1 # Initialize labels as -1 (noise for DBSCAN)
109
+
110
+ def run(self, eps: float = 0.5, min_samples: int = 5, features: list = ['Heights'], detectors: list = None) -> None:
111
+ """
112
+ Run DBSCAN clustering on the selected features and detectors.
113
+
114
+ Parameters
115
+ ----------
116
+ eps : float, optional
117
+ The maximum distance between two samples for them to be considered as in the same neighborhood, by default 0.5.
118
+ min_samples : int, optional
119
+ The number of samples in a neighborhood for a point to be considered a core point, by default 5.
120
+ features : list
121
+ List of features to use for clustering. Options include 'Heights', 'Widths', 'Areas'.
122
+ detectors : list, optional
123
+ List of detectors to use. If None, use all detectors.
124
+ """
125
+ # Filter the DataFrame
126
+ sub_dataframe = self.filter_dataframe(features=features, detectors=detectors)
127
+
128
+ # Ensure data is dequantified if it uses Pint quantities
129
+ if hasattr(sub_dataframe, 'pint'):
130
+ sub_dataframe = sub_dataframe.pint.dequantify()
131
+
132
+ # Handle missing values (if necessary)
133
+ sub_dataframe = sub_dataframe.fillna(0).to_numpy()
134
+
135
+ # Run DBSCAN
136
+ dbscan = DBSCAN(eps=eps, min_samples=min_samples)
137
+ sub_dataframe['Label'] = dbscan.fit_predict(sub_dataframe)
138
+
139
+ return sub_dataframe
140
+
141
+
142
+ class RangeClassifier:
143
+ """
144
+ A classifier for assigning population labels based on defined ranges.
145
+
146
+ Parameters
147
+ ----------
148
+ dataframe : pd.DataFrame
149
+ The input dataframe with features to classify.
150
+ feature : str
151
+ The column name of the feature to classify.
152
+
153
+ Attributes
154
+ ----------
155
+ dataframe : pd.DataFrame
156
+ The dataframe with an added 'Label' column.
157
+ ranges : List[Tuple[float, float, str]]
158
+ The list of ranges and their associated labels.
159
+ """
160
+
161
+ def __init__(self, dataframe: pd.DataFrame) -> None:
162
+ """
163
+ Initialize the classifier.
164
+
165
+ Parameters
166
+ ----------
167
+ dataframe : pd.DataFrame
168
+ The input dataframe with features to classify.
169
+ feature : str
170
+ The column name of the feature to classify.
171
+ """
172
+ self.dataframe = dataframe
173
+ self.ranges = [] # To store the ranges and their labels
174
+
175
+ def run(self, ranges: Dict[str, Tuple[float, float]]) -> None:
176
+ """
177
+ Classify the dataframe by assigning population labels based on specified ranges applied to the index.
178
+
179
+ Parameters
180
+ ----------
181
+ ranges : dict
182
+ A dictionary where keys are population names (labels) and values are tuples
183
+ specifying the (lower, upper) bounds of the range for that population.
184
+
185
+ Example
186
+ -------
187
+ >>> ranges = {
188
+ >>> 'Population 0': (0, 100),
189
+ >>> 'Population 1': (100, 150),
190
+ >>> 'Population 2': (150, 200)
191
+ >>> }
192
+ >>> classifier.run(ranges)
193
+ """
194
+ # Create conditions and corresponding labels
195
+ conditions = []
196
+ labels = []
197
+ for label, (lower, upper) in ranges.items():
198
+ conditions.append((self.dataframe.index >= lower) & (self.dataframe.index < upper))
199
+ labels.append(label)
200
+
201
+ # Use np.select to efficiently apply conditions
202
+ self.dataframe['Label'] = pd.Series(
203
+ pd.cut(self.dataframe.index,
204
+ bins=[float('-inf')] + [upper for _, (_, upper) in ranges.items()],
205
+ labels=list(ranges.keys()),
206
+ include_lowest=True),
207
+ index=self.dataframe.index)
208
+
@@ -0,0 +1,4 @@
1
+ from . import uniform
2
+ from . import rayleigh
3
+ from . import mie
4
+ from . import empirical
@@ -0,0 +1,47 @@
1
+ import numpy as np
2
+ from FlowCyPy import ScattererCollection, Detector
3
+ from FlowCyPy.source import BaseBeam
4
+ from FlowCyPy.units import watt, meter
5
+
6
+
7
+ def compute_detected_signal(source: BaseBeam, detector: Detector, scatterer: ScattererCollection, granularity: float = 1.0, A: float = 1.5, n: float = 2.0) -> float:
8
+ """
9
+ Empirical model for scattering intensity based on particle size, granularity, and detector angle.
10
+
11
+ This function models forward scatter (FSC) as proportional to the particle's size squared and
12
+ side scatter (SSC) as proportional to the granularity and modulated by angular dependence
13
+ (sin^n(theta)). Granularity is a dimensionless measure of the particle's internal complexity or
14
+ surface irregularities:
15
+
16
+ - A default value of 1.0 is used for moderate granularity (e.g., typical white blood cells).
17
+ - Granularity values < 1.0 represent smoother particles with less internal complexity (e.g., bacteria).
18
+ - Granularity values > 1.0 represent particles with higher internal complexity or surface irregularities (e.g., granulocytes).
19
+
20
+ Parameters
21
+ ----------
22
+ detector : Detector
23
+ The detector object containing theta_angle (in radians).
24
+ particle_size : float
25
+ The size of the particle (in meters).
26
+ granularity : float, optional
27
+ A measure of the particle's internal complexity or surface irregularities (dimensionless).
28
+ Default is 1.0.
29
+ A : float, optional
30
+ Empirical scaling factor for angular dependence. Default is 1.5.
31
+ n : float, optional
32
+ Power of sine function for angular dependence. Default is 2.0.
33
+
34
+ Returns
35
+ -------
36
+ float
37
+ The detected scattering intensity for the given particle and detector.
38
+ """
39
+ size_list = scatterer.dataframe['Size'].pint.to(meter).values.numpy_data
40
+
41
+ # Forward scatter is proportional to size^2
42
+ fsc_intensity = size_list**2
43
+
44
+ # Side scatter is proportional to granularity and modulated by angular dependence
45
+ ssc_intensity = granularity * (1 + A * np.sin(np.radians(detector.phi_angle))**n) * np.ones_like(size_list)
46
+
47
+ return fsc_intensity * watt if detector.phi_angle < np.radians(10) else ssc_intensity * watt
@@ -0,0 +1,207 @@
1
+ import numpy as np
2
+ from FlowCyPy import ScattererCollection, Detector
3
+ from FlowCyPy.source import BaseBeam
4
+ from PyMieSim.experiment.scatterer import Sphere as PMS_SPHERE
5
+ from PyMieSim.experiment.source import PlaneWave
6
+ from PyMieSim.experiment.detector import Photodiode as PMS_PHOTODIODE
7
+ from PyMieSim.experiment import Setup
8
+ from PyMieSim.units import Quantity, degree, watt, AU, hertz
9
+ from FlowCyPy.noises import NoiseSetting
10
+ import pandas as pd
11
+
12
+
13
+
14
+ def apply_rin_noise(source: BaseBeam, total_size: int, bandwidth: float) -> np.ndarray:
15
+ r"""
16
+ Applies Relative Intensity Noise (RIN) to the source amplitude if enabled, accounting for detection bandwidth.
17
+
18
+ Parameters
19
+ ----------
20
+ source : BaseBeam
21
+ The light source containing amplitude and RIN information.
22
+ total_size : int
23
+ The number of particles being simulated.
24
+ bandwidth : float
25
+ The detection bandwidth in Hz.
26
+
27
+ Returns
28
+ -------
29
+ np.ndarray
30
+ Array of amplitudes with RIN noise applied.
31
+
32
+ Equations
33
+ ---------
34
+ 1. Relative Intensity Noise (RIN):
35
+ RIN quantifies the fluctuations in the laser's intensity relative to its mean intensity.
36
+ RIN is typically specified as a power spectral density (PSD) in units of dB/Hz:
37
+ \[
38
+ \text{RIN (dB/Hz)} = 10 \cdot \log_{10}\left(\frac{\text{Noise Power (per Hz)}}{\text{Mean Power}}\right)
39
+ \]
40
+
41
+ 2. Conversion from dB/Hz to Linear Scale:
42
+ To compute noise power, RIN must be converted from dB to a linear scale:
43
+ \[
44
+ \text{RIN (linear)} = 10^{\text{RIN (dB/Hz)} / 10}
45
+ \]
46
+
47
+ 3. Total Noise Power:
48
+ The total noise power depends on the bandwidth (\(B\)) of the detection system:
49
+ \[
50
+ P_{\text{noise}} = \text{RIN (linear)} \cdot B
51
+ \]
52
+
53
+ 4. Standard Deviation of Amplitude Fluctuations:
54
+ The noise standard deviation for amplitude is derived from the total noise power:
55
+ \[
56
+ \sigma_{\text{amplitude}} = \sqrt{P_{\text{noise}}} \cdot \text{Amplitude}
57
+ \]
58
+ Substituting \(P_{\text{noise}}\), we get:
59
+ \[
60
+ \sigma_{\text{amplitude}} = \sqrt{\text{RIN (linear)} \cdot B} \cdot \text{Amplitude}
61
+ \]
62
+
63
+ Implementation
64
+ --------------
65
+ - The RIN value from the source is converted to linear scale using:
66
+ \[
67
+ \text{RIN (linear)} = 10^{\text{source.RIN} / 10}
68
+ \]
69
+ - The noise standard deviation is scaled by the detection bandwidth (\(B\)) in Hz:
70
+ \[
71
+ \sigma_{\text{amplitude}} = \sqrt{\text{RIN (linear)} \cdot B} \cdot \text{source.amplitude}
72
+ \]
73
+ - Gaussian noise with mean \(0\) and standard deviation \(\sigma_{\text{amplitude}}\) is applied to the source amplitude.
74
+
75
+ Notes
76
+ -----
77
+ - The bandwidth parameter (\(B\)) must be in Hz and reflects the frequency range of the detection system.
78
+ - The function assumes that RIN is specified in dB/Hz. If RIN is already in linear scale, the conversion step can be skipped.
79
+ """
80
+ amplitude_with_rin = np.ones(total_size) * source.amplitude
81
+
82
+ if NoiseSetting.include_RIN_noise and NoiseSetting.include_noises:
83
+ # Convert RIN from dB/Hz to linear scale if necessary
84
+ rin_linear = 10**(source.RIN / 10)
85
+
86
+ # Compute noise standard deviation, scaled by bandwidth
87
+ std_dev_amplitude = np.sqrt(rin_linear * bandwidth.to(hertz).magnitude) * source.amplitude
88
+
89
+ # Apply Gaussian noise to the amplitude
90
+ amplitude_with_rin += np.random.normal(
91
+ loc=0,
92
+ scale=std_dev_amplitude.to(source.amplitude.units).magnitude,
93
+ size=total_size
94
+ ) * source.amplitude.units
95
+
96
+ return amplitude_with_rin
97
+
98
+
99
+ def initialize_scatterer(scatterer_dataframe: pd.DataFrame, source: PlaneWave, medium_refractive_index: Quantity) -> PMS_SPHERE:
100
+ """
101
+ Initializes the scatterer object for the PyMieSim experiment.
102
+
103
+ Parameters
104
+ ----------
105
+ scatterer : ScattererCollection
106
+ The scatterer object containing particle data.
107
+ source : PlaneWave
108
+ The light source for the simulation.
109
+
110
+ Returns
111
+ -------
112
+ PMS_SPHERE
113
+ Initialized scatterer for the experiment.
114
+ """
115
+ size_list = scatterer_dataframe['Size'].values
116
+ ri_list = scatterer_dataframe['RefractiveIndex'].values
117
+
118
+ if len(size_list) == 0:
119
+ raise ValueError("ScattererCollection size list is empty.")
120
+
121
+ size_list = size_list.quantity.magnitude * size_list.units
122
+ ri_list = ri_list.quantity.magnitude * ri_list.units
123
+
124
+ return PMS_SPHERE(
125
+ diameter=size_list,
126
+ property=ri_list,
127
+ medium_property=np.ones(len(size_list)) * medium_refractive_index,
128
+ source=source
129
+ )
130
+
131
+
132
+ def initialize_detector(detector: Detector, total_size: int) -> PMS_PHOTODIODE:
133
+ """
134
+ Initializes the detector object for the PyMieSim experiment.
135
+
136
+ Parameters
137
+ ----------
138
+ detector : Detector
139
+ The detector object containing configuration data.
140
+ total_size : int
141
+ The number of particles being simulated.
142
+
143
+ Returns
144
+ -------
145
+ PMS_PHOTODIODE
146
+ Initialized detector for the experiment.
147
+ """
148
+ ONES = np.ones(total_size)
149
+
150
+ return PMS_PHOTODIODE(
151
+ NA=ONES * detector.numerical_aperture,
152
+ cache_NA=ONES * 0 * AU,
153
+ gamma_offset=ONES * detector.gamma_angle,
154
+ phi_offset=ONES * detector.phi_angle,
155
+ polarization_filter=ONES * np.nan * degree,
156
+ sampling=ONES * detector.sampling
157
+ )
158
+
159
+
160
+ def compute_detected_signal(source: BaseBeam, detector: Detector, scatterer_dataframe: pd.DataFrame, medium_refractive_index: Quantity) -> np.ndarray:
161
+ """
162
+ Computes the detected signal by analyzing the scattering properties of particles.
163
+
164
+ Parameters
165
+ ----------
166
+ source : BaseBeam
167
+ The light source object containing wavelength, power, and other optical properties.
168
+ detector : Detector
169
+ The detector object containing properties such as numerical aperture and angles.
170
+ scatterer : ScattererCollection
171
+ The scatterer object containing particle size and refractive index data.
172
+ tolerance : float, optional
173
+ The tolerance for deciding if two values of size and refractive index are "close enough" to be cached.
174
+
175
+ Returns
176
+ -------
177
+ np.ndarray
178
+ Array of coupling values for each particle, based on the detected signal.
179
+ """
180
+ size_list = scatterer_dataframe['Size'].values
181
+
182
+ if len(size_list) == 0:
183
+ return np.array([]) * watt
184
+
185
+ total_size = len(size_list)
186
+ amplitude_with_rin = apply_rin_noise(source, total_size, detector.signal_digitizer.bandwidth)
187
+
188
+ pms_source = PlaneWave(
189
+ wavelength=np.ones(total_size) * source.wavelength,
190
+ polarization=np.ones(total_size) * 0 * degree,
191
+ amplitude=amplitude_with_rin
192
+ )
193
+
194
+ pms_scatterer = initialize_scatterer(scatterer_dataframe, pms_source, medium_refractive_index)
195
+ pms_detector = initialize_detector(detector, total_size)
196
+
197
+ # Configure the detector
198
+ pms_detector.mode_number = ['NC00'] * total_size
199
+ pms_detector.rotation = np.ones(total_size) * 0 * degree
200
+ pms_detector.__post_init__()
201
+
202
+ # Set up the experiment
203
+ experiment = Setup(source=pms_source, scatterer=pms_scatterer, detector=pms_detector)
204
+
205
+ # Compute coupling values
206
+ coupling_value = experiment.get_sequential('coupling').squeeze()
207
+ return np.atleast_1d(coupling_value) * watt
@@ -0,0 +1,116 @@
1
+
2
+ import numpy as np
3
+ from FlowCyPy import ScattererCollection, Detector
4
+ from FlowCyPy.source import BaseBeam
5
+ from FlowCyPy.units import meter, Quantity
6
+ import pandas as pd
7
+
8
+
9
+ def compute_scattering_cross_section(scatterer_dataframe: pd.DataFrame, source: BaseBeam, detector: Detector) -> np.ndarray:
10
+ r"""
11
+ Computes the Rayleigh scattering cross-section for a spherical particle with angle dependency.
12
+
13
+ The Rayleigh scattering cross-section depends on the angle at which the scattered light is observed.
14
+ The total scattering cross-section is modified by a factor of :math:`\sin^2(\theta)`, where :math:`\theta`
15
+ is the angle between the direction of the incident light and the scattered light, as observed by the detector.
16
+
17
+ The Rayleigh scattering cross-section is given by the formula:
18
+
19
+ .. math::
20
+ \sigma_s(\theta) = \sigma_0 \sin^2(\theta)
21
+
22
+ Where:
23
+ - :math:`\sigma_s(\theta)` is the scattering cross-section at angle :math:`\theta` (in m²),
24
+ - :math:`\sigma_0 = \frac{8 \pi}{3} \left( \frac{2 \pi}{\lambda} \right)^4 \left( \frac{n^2 - 1}{n^2 + 2} \right)^2 r^6` is the total Rayleigh scattering cross-section (in m²),
25
+ - :math:`r` is the radius of the particle (in m),
26
+ - :math:`\lambda` is the wavelength of the incident light (in m),
27
+ - :math:`n` is the refractive index of the scatterer (dimensionless),
28
+ - :math:`\theta` is the angle of observation (in radians).
29
+
30
+ Parameters
31
+ ----------
32
+ scatterer : ScattererCollection
33
+ An instance of `ScattererCollection` containing the scatterer properties such as size and refractive index.
34
+ source : BaseBeam
35
+ An instance of `BaseBeam` containing the laser properties, including the wavelength.
36
+ detector : Detector
37
+ An instance of `Detector` that contains the angle of observation (`theta_angle` in radians).
38
+
39
+ Returns
40
+ -------
41
+ np.ndarray
42
+ The angle-dependent Rayleigh scattering cross-section (in square meters, m²).
43
+ """
44
+ size_list = scatterer_dataframe['Size'].pint.to(meter).values.numpy_data
45
+ ri_list = scatterer_dataframe['RefractiveIndex'].values.numpy_data
46
+
47
+ # Extract properties
48
+ wavelength = source.wavelength
49
+ phi = detector.phi_angle # Angle of observation in radians
50
+
51
+ # Rayleigh scattering cross-section formula components
52
+ factor_0 = 8 * np.pi / 3
53
+ factor_1 = (2 * np.pi / wavelength) ** 4
54
+
55
+ factor_2 = ((ri_list ** 2 - 1) / (ri_list ** 2 + 2)) ** 2
56
+
57
+ # Compute the total Rayleigh scattering cross-section (assuming size in meters)
58
+ sigma_0 = factor_0 * factor_1 * factor_2 * size_list ** 6
59
+
60
+ # Modify by the angular dependency: sin^2(theta)
61
+ cross_section = sigma_0 * np.sin(phi * np.pi / 180) ** 2
62
+
63
+ return cross_section.magnitude * meter**2
64
+
65
+
66
+ # def compute_detected_signal(source: BaseBeam, detector: Detector, scatterer: ScattererCollection) -> float:
67
+ def compute_detected_signal(source: BaseBeam, detector: Detector, scatterer_dataframe: pd.DataFrame, medium_refractive_index: Quantity) -> np.ndarray:
68
+ r"""
69
+ Computes the power detected by a detector from a Rayleigh scattering event.
70
+
71
+ The power scattered by a particle is proportional to the power density of the incident light
72
+ and the scattering cross-section of the particle:
73
+
74
+ .. math::
75
+ P_s = I_{\text{incident}} \cdot \sigma_s
76
+
77
+ The power detected by the detector depends on the solid angle subtended by the detector
78
+ and the total solid angle over which the power is scattered (assumed to be \( 4\pi \) steradians):
79
+
80
+ .. math::
81
+ P_{\text{detected}} = P_s \cdot \frac{\Omega}{4 \pi} \cdot \eta
82
+
83
+ Where:
84
+ - :math:`P_{\text{detected}}` is the power detected by the detector (in watts, W).
85
+ - :math:`\Omega` is the solid angle subtended by the detector (in steradians, sr).
86
+ - :math:`\eta` is the detector efficiency (dimensionless, between 0 and 1).
87
+
88
+ Parameters
89
+ ----------
90
+ source : BaseBeam
91
+ An instance of `BaseBeam` containing the laser properties, including the optical power and numerical aperture.
92
+ detector : Detector
93
+ An instance of `Detector` containing the detector properties, including numerical aperture and responsitivity.
94
+
95
+ Returns
96
+ -------
97
+ float
98
+ The power detected by the detector (in watts, W).
99
+ """
100
+ scattering_cross_section = compute_scattering_cross_section(
101
+ source=source,
102
+ scatterer_dataframe=scatterer_dataframe,
103
+ detector=detector
104
+ )
105
+
106
+ # Calculate incident power density at the beam waist
107
+ incident_power_density = (2 * source.optical_power) / (np.pi * source.waist ** 2)
108
+
109
+ # Calculate the scattered power using the scattering cross-section
110
+ scattered_power = incident_power_density * scattering_cross_section
111
+
112
+ # Detector captures a portion of scattered power proportional to its solid angle over 4π steradians
113
+ solid_angle = detector.numerical_aperture ** 2
114
+ detected_power = scattered_power * (solid_angle / (4 * np.pi))
115
+
116
+ return detected_power
@@ -0,0 +1,40 @@
1
+ import numpy as np
2
+ from FlowCyPy import ScattererCollection, Detector
3
+ from FlowCyPy.units import ureg
4
+ from FlowCyPy.source import BaseBeam
5
+
6
+
7
+ def compute_detected_signal(source: BaseBeam, detector: Detector, scatterer_dataframe: ScattererCollection, medium_refractive_index) -> np.ndarray:
8
+ r"""
9
+ Computes the power detected by a detector from a uniform distribution.
10
+
11
+ The power scattered by a particle is proportional to the power density of the incident light
12
+ and the scattering cross-section of the particle:
13
+
14
+ .. math::
15
+ P_s = I_{\text{incident}} \cdot \sigma_s
16
+
17
+ The power detected by the detector depends on the solid angle subtended by the detector
18
+ and the total solid angle over which the power is scattered (assumed to be \( 4\pi \) steradians):
19
+
20
+ .. math::
21
+ P_{\text{detected}} = P_s \cdot \frac{\Omega}{4 \pi} \cdot \eta
22
+
23
+ Where:
24
+ - :math:`P_{\text{detected}}` is the power detected by the detector (in watts, W).
25
+ - :math:`\Omega` is the solid angle subtended by the detector (in steradians, sr).
26
+ - :math:`\eta` is the detector efficiency (dimensionless, between 0 and 1).
27
+
28
+ Parameters
29
+ ----------
30
+ source : BaseBeam
31
+ An instance of `BaseBeam` containing the laser properties, including the optical power and numerical aperture.
32
+ detector : Detector
33
+ An instance of `Detector` containing the detector properties, including numerical aperture and responsitivity.
34
+
35
+ Returns
36
+ -------
37
+ float
38
+ The power detected by the detector (in watts, W).
39
+ """
40
+ return np.ones(len(scatterer_dataframe)) * ureg.watt