FlowCyPy 0.8.0__tar.gz → 0.8.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/_version.py +2 -2
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/acquisition.py +4 -178
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/cytometer.py +1 -1
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/dataframe_subclass.py +158 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/flow_cell.py +4 -1
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/helper.py +3 -0
- flowcypy-0.8.1/FlowCyPy/triggered_acquisition.py +200 -0
- flowcypy-0.8.1/FlowCyPy/utils.py +164 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy.egg-info/PKG-INFO +1 -1
- {flowcypy-0.8.0 → flowcypy-0.8.1}/PKG-INFO +1 -1
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/temp.py +7 -29
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/test_flow_cytometer.py +1 -1
- flowcypy-0.8.0/FlowCyPy/triggered_acquisition.py +0 -90
- flowcypy-0.8.0/FlowCyPy/utils.py +0 -85
- {flowcypy-0.8.0 → flowcypy-0.8.1}/.flake8 +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/.github/dependabot.yml +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/.github/workflows/deploy_PyPi.yml +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/.github/workflows/deploy_anaconda.yml +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/.github/workflows/deploy_coverage.yml +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/.github/workflows/deploy_documentation.yml +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/.gitignore +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/__init__.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/classifier.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/coupling_mechanism/__init__.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/coupling_mechanism/empirical.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/coupling_mechanism/mie.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/coupling_mechanism/rayleigh.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/coupling_mechanism/uniform.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/detector.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/directories.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/distribution/__init__.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/distribution/base_class.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/distribution/delta.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/distribution/lognormal.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/distribution/normal.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/distribution/particle_size_distribution.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/distribution/uniform.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/distribution/weibull.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/noises.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/particle_count.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/peak_locator/__init__.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/peak_locator/base_class.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/peak_locator/basic.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/peak_locator/derivative.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/peak_locator/moving_average.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/physical_constant.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/population.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/populations_instances.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/scatterer_collection.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/signal_digitizer.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/source.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy/units.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy.egg-info/SOURCES.txt +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy.egg-info/dependency_links.txt +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy.egg-info/requires.txt +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/FlowCyPy.egg-info/top_level.txt +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/LICENSE +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/README.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/Deep_peak_square.ipynb +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/Physics-informed_AI.ipynb +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/ROI_analysis-Copy1.ipynb +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/ROI_analysis.ipynb +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/Untitled.ipynb +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/Untitled1.ipynb +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/Untitled2.ipynb +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/ai_dev2.ipynb +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/best_model.h5 +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/best_model.keras +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/concentration_validation.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/doc/canto_spec.md +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/doc/internship.pdf +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/get_started.md +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/grad_cam_output.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/image.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/model.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/model_example.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/output_file.prof +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/AI_peak_detection.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/concentration_comparison.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/create_images.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/data_analysis.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/dev_beads_analysis.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/dev_canto.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/dev_classifier.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/dev_shot_noise_check.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/dev_stats_0.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/dev_stats_1.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/dev_stats_2.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/dev_study_on_ri.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/dev_study_on_size.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/mat2csv.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/scripts/profiler.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/developments/test.pdf +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/Makefile +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/extras/README.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/extras/distributions.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/extras/flow_cytometer_signal.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/extras/scatterer_distribution.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/extras/signal_acquisition.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/noise_sources/README.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/noise_sources/dark_current.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/noise_sources/shot_noise.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/noise_sources/thermal.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/tutorials/README.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/tutorials/limit_of_detection.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/examples/tutorials/workflow.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/distributions/Delta.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/distributions/LogNormal.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/distributions/Normal.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/distributions/RosinRammler.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/distributions/Uniform.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/distributions/Weibull.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/example_0.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/example_1.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/example_2.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/example_3.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/flow_cytometer.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/images/logo.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/make.bat +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/_static/default.css +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/_static/logo.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/_static/thumbnail.png +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/code/base.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/code/detector.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/code/distributions.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/code/flow_cell.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/code/flow_cytometer.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/code/peak_locator.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/code/scatterer.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/code/source.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/code.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/conf.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/examples.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/index.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal/core_components.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal/getting_started.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal/objectives/main.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal/objectives/pre.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal/objectives/stretch.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal/prerequisites/index.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal/prerequisites/mathematics.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal/prerequisites/optics.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal/prerequisites/programming.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal/ressources.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal/tasks.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/internal.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/references.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/sg_execution_times.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/docs/source/theory.rst +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/meta.yaml +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/notebook.ipynb +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/pyproject.toml +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/setup.cfg +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/__init__.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/test_classifiers.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/test_coupling_mechanism.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/test_detector_noise.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/test_distribution.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/test_noises.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/test_peak_algorithm.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/test_peak_analyzer.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/test_population.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/test_scatterer_distribution.py +0 -0
- {flowcypy-0.8.0 → flowcypy-0.8.1}/tests/test_source.py +0 -0
|
@@ -1,21 +1,10 @@
|
|
|
1
|
-
import logging
|
|
2
1
|
import warnings
|
|
3
|
-
from typing import Optional, Union, List
|
|
4
|
-
from MPSPlots.styles import mps
|
|
5
2
|
import pandas as pd
|
|
6
3
|
import numpy as np
|
|
7
4
|
from FlowCyPy import units
|
|
8
|
-
import matplotlib.pyplot as plt
|
|
9
|
-
import seaborn as sns
|
|
10
|
-
from tabulate import tabulate
|
|
11
|
-
from FlowCyPy import helper
|
|
12
5
|
from FlowCyPy.triggered_acquisition import TriggeredAcquisitions
|
|
13
6
|
from FlowCyPy.dataframe_subclass import TriggeredAcquisitionDataFrame
|
|
14
7
|
|
|
15
|
-
class DataAccessor:
|
|
16
|
-
def __init__(self, outer):
|
|
17
|
-
self._outer = outer
|
|
18
|
-
|
|
19
8
|
|
|
20
9
|
class Acquisition:
|
|
21
10
|
"""
|
|
@@ -45,8 +34,6 @@ class Acquisition:
|
|
|
45
34
|
DataFrame with detector signal data.
|
|
46
35
|
"""
|
|
47
36
|
self.cytometer = cytometer
|
|
48
|
-
self.logger = self.LoggerInterface(self)
|
|
49
|
-
|
|
50
37
|
self.signal = detector_dataframe
|
|
51
38
|
self.scatterer = scatterer_dataframe
|
|
52
39
|
self.run_time = run_time
|
|
@@ -189,174 +176,13 @@ class Acquisition:
|
|
|
189
176
|
triggered_signal.attrs['saturation_levels'] = self.signal.attrs['saturation_levels']
|
|
190
177
|
triggered_signal.attrs['scatterer_dataframe'] = self.signal.attrs['scatterer_dataframe']
|
|
191
178
|
|
|
192
|
-
|
|
179
|
+
triggered_acquisition = TriggeredAcquisitions(parent=self, dataframe=triggered_signal)
|
|
180
|
+
triggered_acquisition.scatterer = self.scatterer
|
|
181
|
+
|
|
182
|
+
return triggered_acquisition
|
|
193
183
|
else:
|
|
194
184
|
warnings.warn(
|
|
195
185
|
f"No signal were triggered during the run time, try changing the threshold. Signal min-max value is: {self.signal['Signal'].min().to_compact()}, {self.signal['Signal'].max().to_compact()}",
|
|
196
186
|
UserWarning
|
|
197
187
|
)
|
|
198
188
|
|
|
199
|
-
class LoggerInterface:
|
|
200
|
-
"""
|
|
201
|
-
A nested class for logging statistical information about the experiment.
|
|
202
|
-
|
|
203
|
-
Methods
|
|
204
|
-
-------
|
|
205
|
-
scatterer()
|
|
206
|
-
Logs statistics about the scatterer populations.
|
|
207
|
-
detector()
|
|
208
|
-
Logs statistics about the detector signals.
|
|
209
|
-
"""
|
|
210
|
-
|
|
211
|
-
def __init__(self, experiment: object):
|
|
212
|
-
self.experiment = experiment
|
|
213
|
-
|
|
214
|
-
def scatterer(self, table_format: str = "grid") -> None:
|
|
215
|
-
"""
|
|
216
|
-
Logs detailed information about scatterer populations.
|
|
217
|
-
|
|
218
|
-
Parameters
|
|
219
|
-
----------
|
|
220
|
-
table_format : str, optional
|
|
221
|
-
The format for the table display (default: 'grid').
|
|
222
|
-
Options include 'plain', 'github', 'grid', 'fancy_grid', etc.
|
|
223
|
-
|
|
224
|
-
Returns
|
|
225
|
-
-------
|
|
226
|
-
None
|
|
227
|
-
Logs scatterer population information, including refractive index, size, particle count,
|
|
228
|
-
number of events, and time statistics.
|
|
229
|
-
"""
|
|
230
|
-
logging.info("\n=== Scatterer Population Properties ===")
|
|
231
|
-
|
|
232
|
-
# Collect general population data
|
|
233
|
-
general_table_data = [
|
|
234
|
-
self._get_population_properties(population)
|
|
235
|
-
for population in self.experiment.scatterer.groupby("Population")
|
|
236
|
-
]
|
|
237
|
-
general_headers = [
|
|
238
|
-
"Name",
|
|
239
|
-
"Refractive Index",
|
|
240
|
-
"Medium Refractive Index",
|
|
241
|
-
"Size",
|
|
242
|
-
"Particle Count",
|
|
243
|
-
"Number of Events",
|
|
244
|
-
"Min Time Between Events",
|
|
245
|
-
"Avg Time Between Events",
|
|
246
|
-
]
|
|
247
|
-
|
|
248
|
-
formatted_general_table = tabulate(
|
|
249
|
-
general_table_data, headers=general_headers, tablefmt=table_format, floatfmt=".4f"
|
|
250
|
-
)
|
|
251
|
-
logging.info("\n" + formatted_general_table)
|
|
252
|
-
|
|
253
|
-
def _get_population_properties(self, population_group: tuple) -> List[Union[str, float]]:
|
|
254
|
-
"""
|
|
255
|
-
Extracts key properties of a scatterer population for the general properties table.
|
|
256
|
-
|
|
257
|
-
Parameters
|
|
258
|
-
----------
|
|
259
|
-
population_group : tuple
|
|
260
|
-
A tuple containing the population name and its corresponding DataFrame.
|
|
261
|
-
|
|
262
|
-
Returns
|
|
263
|
-
-------
|
|
264
|
-
list
|
|
265
|
-
List of scatterer properties: [name, refractive index, medium refractive index, size,
|
|
266
|
-
particle count, number of events, min time between events, avg time between events].
|
|
267
|
-
"""
|
|
268
|
-
population_name, population_df = population_group
|
|
269
|
-
|
|
270
|
-
name = population_name
|
|
271
|
-
refractive_index = f"{population_df['RefractiveIndex'].mean():~P}"
|
|
272
|
-
medium_refractive_index = f"{self.experiment.run_time:~P}" # Replace with actual medium refractive index if stored elsewhere
|
|
273
|
-
size = f"{population_df['Size'].mean():~P}"
|
|
274
|
-
particle_count = len(population_df)
|
|
275
|
-
num_events = particle_count
|
|
276
|
-
|
|
277
|
-
min_delta_position = population_df["Time"].diff().abs().min()
|
|
278
|
-
avg_delta_position = population_df["Time"].diff().mean()
|
|
279
|
-
|
|
280
|
-
return [
|
|
281
|
-
name,
|
|
282
|
-
refractive_index,
|
|
283
|
-
medium_refractive_index,
|
|
284
|
-
size,
|
|
285
|
-
particle_count,
|
|
286
|
-
num_events,
|
|
287
|
-
min_delta_position,
|
|
288
|
-
avg_delta_position,
|
|
289
|
-
]
|
|
290
|
-
|
|
291
|
-
def detector(self, table_format: str = "grid", include_totals: bool = True) -> None:
|
|
292
|
-
"""
|
|
293
|
-
Logs statistics about detector signals.
|
|
294
|
-
|
|
295
|
-
Parameters
|
|
296
|
-
----------
|
|
297
|
-
table_format : str, optional
|
|
298
|
-
The format for the table display (default: 'grid').
|
|
299
|
-
Options include 'plain', 'github', 'grid', 'fancy_grid', etc.
|
|
300
|
-
include_totals : bool, optional
|
|
301
|
-
If True, logs the total number of events across all detectors (default: True).
|
|
302
|
-
|
|
303
|
-
Returns
|
|
304
|
-
-------
|
|
305
|
-
None
|
|
306
|
-
Logs details about detector signals, including event counts,
|
|
307
|
-
timing statistics, and mean event rates.
|
|
308
|
-
"""
|
|
309
|
-
logging.info("\n=== Detector Signal Statistics ===")
|
|
310
|
-
|
|
311
|
-
# Compute statistics for each detector
|
|
312
|
-
df = self.experiment.signal
|
|
313
|
-
table_data = [
|
|
314
|
-
self._get_detector_stats(detector_name, df.xs(detector_name, level="Detector"))
|
|
315
|
-
for detector_name in df.index.levels[0]
|
|
316
|
-
]
|
|
317
|
-
headers = [
|
|
318
|
-
"Detector",
|
|
319
|
-
"Number of Acquisition",
|
|
320
|
-
"First Event Time",
|
|
321
|
-
"Last Event Time",
|
|
322
|
-
"Time Between Events",
|
|
323
|
-
]
|
|
324
|
-
|
|
325
|
-
formatted_table = tabulate(table_data, headers=headers, tablefmt=table_format, floatfmt=".3f")
|
|
326
|
-
logging.info("\n" + formatted_table)
|
|
327
|
-
|
|
328
|
-
if include_totals:
|
|
329
|
-
total_events = sum(stat[1] for stat in table_data)
|
|
330
|
-
logging.info(f"\nTotal number of events detected across all detectors: {total_events}")
|
|
331
|
-
|
|
332
|
-
def _get_detector_stats(self, detector_name: str, group: pd.DataFrame) -> list:
|
|
333
|
-
"""
|
|
334
|
-
Computes statistics for a detector.
|
|
335
|
-
|
|
336
|
-
Parameters
|
|
337
|
-
----------
|
|
338
|
-
detector_name : str
|
|
339
|
-
Name of the detector.
|
|
340
|
-
group : pd.DataFrame
|
|
341
|
-
DataFrame containing the detector data.
|
|
342
|
-
|
|
343
|
-
Returns
|
|
344
|
-
-------
|
|
345
|
-
list
|
|
346
|
-
List of computed statistics: [detector_name, num_events, first_event_time,
|
|
347
|
-
last_event_time, avg_time_between_events, min_time_between_events, mean_event_rate].
|
|
348
|
-
"""
|
|
349
|
-
num_acquisition = len(group["Time"])
|
|
350
|
-
first_event_time = group["Time"].min()
|
|
351
|
-
last_event_time = group["Time"].max()
|
|
352
|
-
|
|
353
|
-
time_diffs = group["Time"].diff().dropna()
|
|
354
|
-
time_between_events = time_diffs.mean()
|
|
355
|
-
|
|
356
|
-
return [
|
|
357
|
-
detector_name,
|
|
358
|
-
num_acquisition,
|
|
359
|
-
first_event_time,
|
|
360
|
-
last_event_time,
|
|
361
|
-
time_between_events,
|
|
362
|
-
]
|
|
@@ -8,7 +8,7 @@ import pandas as pd
|
|
|
8
8
|
from pint_pandas import PintArray
|
|
9
9
|
|
|
10
10
|
from FlowCyPy import units
|
|
11
|
-
from FlowCyPy.units import
|
|
11
|
+
from FlowCyPy.units import milliwatt
|
|
12
12
|
from FlowCyPy.flow_cell import FlowCell
|
|
13
13
|
from FlowCyPy.detector import Detector
|
|
14
14
|
from FlowCyPy.acquisition import Acquisition
|
|
@@ -5,6 +5,8 @@ import seaborn as sns
|
|
|
5
5
|
from FlowCyPy import helper
|
|
6
6
|
from MPSPlots.styles import mps
|
|
7
7
|
from FlowCyPy import units
|
|
8
|
+
import logging
|
|
9
|
+
from tabulate import tabulate
|
|
8
10
|
|
|
9
11
|
class ScattererDataFrame(pd.DataFrame):
|
|
10
12
|
"""
|
|
@@ -59,6 +61,83 @@ class ScattererDataFrame(pd.DataFrame):
|
|
|
59
61
|
|
|
60
62
|
return grid
|
|
61
63
|
|
|
64
|
+
def log(self, table_format: str = "grid") -> None:
|
|
65
|
+
"""
|
|
66
|
+
Logs detailed information about scatterer populations.
|
|
67
|
+
|
|
68
|
+
Parameters
|
|
69
|
+
----------
|
|
70
|
+
table_format : str, optional
|
|
71
|
+
The format for the table display (default: 'grid').
|
|
72
|
+
Options include 'plain', 'github', 'grid', 'fancy_grid', etc.
|
|
73
|
+
|
|
74
|
+
Returns
|
|
75
|
+
-------
|
|
76
|
+
None
|
|
77
|
+
Logs scatterer population information, including refractive index, size, particle count,
|
|
78
|
+
number of events, and time statistics.
|
|
79
|
+
"""
|
|
80
|
+
logging.info("\n=== Scatterer Population Properties ===")
|
|
81
|
+
|
|
82
|
+
# Collect general population data
|
|
83
|
+
general_table_data = [
|
|
84
|
+
self._get_population_properties(population)
|
|
85
|
+
for population in self.groupby("Population")
|
|
86
|
+
]
|
|
87
|
+
general_headers = [
|
|
88
|
+
"Name",
|
|
89
|
+
"Refractive Index",
|
|
90
|
+
"Medium Refractive Index",
|
|
91
|
+
"Size",
|
|
92
|
+
"Particle Count",
|
|
93
|
+
"Number of Events",
|
|
94
|
+
"Min Time Between Events",
|
|
95
|
+
"Avg Time Between Events",
|
|
96
|
+
]
|
|
97
|
+
|
|
98
|
+
formatted_general_table = tabulate(
|
|
99
|
+
general_table_data, headers=general_headers, tablefmt=table_format, floatfmt=".4f"
|
|
100
|
+
)
|
|
101
|
+
logging.info("\n" + formatted_general_table)
|
|
102
|
+
|
|
103
|
+
def _get_population_properties(self, population_group: tuple) -> List[Union[str, float]]:
|
|
104
|
+
"""
|
|
105
|
+
Extracts key properties of a scatterer population for the general properties table.
|
|
106
|
+
|
|
107
|
+
Parameters
|
|
108
|
+
----------
|
|
109
|
+
population_group : tuple
|
|
110
|
+
A tuple containing the population name and its corresponding DataFrame.
|
|
111
|
+
|
|
112
|
+
Returns
|
|
113
|
+
-------
|
|
114
|
+
list
|
|
115
|
+
List of scatterer properties: [name, refractive index, medium refractive index, size,
|
|
116
|
+
particle count, number of events, min time between events, avg time between events].
|
|
117
|
+
"""
|
|
118
|
+
population_name, population_df = population_group
|
|
119
|
+
|
|
120
|
+
name = population_name
|
|
121
|
+
refractive_index = f"{population_df['RefractiveIndex'].mean():~P}"
|
|
122
|
+
medium_refractive_index = f"{self.attrs['run_time']:~P}" # Replace with actual medium refractive index if stored elsewhere
|
|
123
|
+
size = f"{population_df['Size'].mean():~P}"
|
|
124
|
+
particle_count = len(population_df)
|
|
125
|
+
num_events = particle_count
|
|
126
|
+
|
|
127
|
+
min_delta_position = population_df["Time"].diff().abs().min()
|
|
128
|
+
avg_delta_position = population_df["Time"].diff().mean()
|
|
129
|
+
|
|
130
|
+
return [
|
|
131
|
+
name,
|
|
132
|
+
refractive_index,
|
|
133
|
+
medium_refractive_index,
|
|
134
|
+
size,
|
|
135
|
+
particle_count,
|
|
136
|
+
num_events,
|
|
137
|
+
min_delta_position,
|
|
138
|
+
avg_delta_position,
|
|
139
|
+
]
|
|
140
|
+
|
|
62
141
|
class ContinuousAcquisitionDataFrame(pd.DataFrame):
|
|
63
142
|
"""
|
|
64
143
|
A subclass of pandas DataFrame with a custom plot method.
|
|
@@ -213,6 +292,85 @@ class TriggeredAcquisitionDataFrame(pd.DataFrame):
|
|
|
213
292
|
if show:
|
|
214
293
|
plt.show()
|
|
215
294
|
|
|
295
|
+
def log(self, table_format: str = "grid", include_totals: bool = True) -> None:
|
|
296
|
+
"""
|
|
297
|
+
Logs statistics about detector signals.
|
|
298
|
+
|
|
299
|
+
Parameters
|
|
300
|
+
----------
|
|
301
|
+
table_format : str, optional
|
|
302
|
+
The format for the table display (default: 'grid').
|
|
303
|
+
Options include 'plain', 'github', 'grid', 'fancy_grid', etc.
|
|
304
|
+
include_totals : bool, optional
|
|
305
|
+
If True, logs the total number of events across all detectors (default: True).
|
|
306
|
+
|
|
307
|
+
Returns
|
|
308
|
+
-------
|
|
309
|
+
None
|
|
310
|
+
Logs details about detector signals, including event counts,
|
|
311
|
+
timing statistics, and mean event rates.
|
|
312
|
+
"""
|
|
313
|
+
logging.info("\n=== Detector Signal Statistics ===")
|
|
314
|
+
|
|
315
|
+
# Compute statistics for each detector
|
|
316
|
+
if self.empty:
|
|
317
|
+
logging.warning("No data available for detectors.")
|
|
318
|
+
return
|
|
319
|
+
|
|
320
|
+
table_data = [
|
|
321
|
+
self._get_detector_stats(detector_name, self.xs(detector_name, level="Detector"))
|
|
322
|
+
for detector_name in self.index.get_level_values("Detector").unique()
|
|
323
|
+
]
|
|
324
|
+
headers = [
|
|
325
|
+
"Detector",
|
|
326
|
+
"Number of Acquisition",
|
|
327
|
+
"First Event Time",
|
|
328
|
+
"Last Event Time",
|
|
329
|
+
"Time Between Events",
|
|
330
|
+
]
|
|
331
|
+
|
|
332
|
+
formatted_table = tabulate(table_data, headers=headers, tablefmt=table_format, floatfmt=".3f")
|
|
333
|
+
logging.info("\n" + formatted_table)
|
|
334
|
+
|
|
335
|
+
if include_totals:
|
|
336
|
+
total_events = sum(stat[1] for stat in table_data)
|
|
337
|
+
logging.info(f"\nTotal number of events detected across all detectors: {total_events}")
|
|
338
|
+
|
|
339
|
+
def _get_detector_stats(self, detector_name: str, group: pd.DataFrame) -> list:
|
|
340
|
+
"""
|
|
341
|
+
Computes statistics for a detector.
|
|
342
|
+
|
|
343
|
+
Parameters
|
|
344
|
+
----------
|
|
345
|
+
detector_name : str
|
|
346
|
+
Name of the detector.
|
|
347
|
+
group : pd.DataFrame
|
|
348
|
+
DataFrame containing the detector data.
|
|
349
|
+
|
|
350
|
+
Returns
|
|
351
|
+
-------
|
|
352
|
+
list
|
|
353
|
+
List of computed statistics: [detector_name, num_events, first_event_time,
|
|
354
|
+
last_event_time, avg_time_between_events].
|
|
355
|
+
"""
|
|
356
|
+
if group.empty:
|
|
357
|
+
return [detector_name, 0, None, None, None]
|
|
358
|
+
|
|
359
|
+
num_acquisition = len(group["Time"])
|
|
360
|
+
first_event_time = group["Time"].min()
|
|
361
|
+
last_event_time = group["Time"].max()
|
|
362
|
+
|
|
363
|
+
time_diffs = group["Time"].diff().dropna()
|
|
364
|
+
time_between_events = time_diffs.mean() if not time_diffs.empty else None
|
|
365
|
+
|
|
366
|
+
return [
|
|
367
|
+
detector_name,
|
|
368
|
+
num_acquisition,
|
|
369
|
+
first_event_time,
|
|
370
|
+
last_event_time,
|
|
371
|
+
time_between_events,
|
|
372
|
+
]
|
|
373
|
+
|
|
216
374
|
|
|
217
375
|
class ClassifierDataFrame(pd.DataFrame):
|
|
218
376
|
"""
|
|
@@ -111,7 +111,10 @@ class FlowCell:
|
|
|
111
111
|
if self.event_scheme.lower() == 'uniform-random':
|
|
112
112
|
numpy.random.shuffle(evenly_spaced_times.magnitude)
|
|
113
113
|
event_dataframe['Time'] = PintArray(evenly_spaced_times.to(units.second).magnitude, units.second)
|
|
114
|
-
|
|
114
|
+
|
|
115
|
+
scatterer_dataframe = ScattererDataFrame(event_dataframe)
|
|
116
|
+
scatterer_dataframe.attrs['run_time'] = run_time
|
|
117
|
+
return scatterer_dataframe
|
|
115
118
|
|
|
116
119
|
def _generate_poisson_events(self, run_time: Quantity, population: Population) -> pd.DataFrame:
|
|
117
120
|
"""
|
|
@@ -33,6 +33,9 @@ def validate_units(**expected_units):
|
|
|
33
33
|
if arg_name in bound_args.arguments:
|
|
34
34
|
value = bound_args.arguments[arg_name]
|
|
35
35
|
|
|
36
|
+
if value is None:
|
|
37
|
+
continue
|
|
38
|
+
|
|
36
39
|
# Check if the value is a Pint Quantity
|
|
37
40
|
if not isinstance(value, Quantity):
|
|
38
41
|
raise TypeError(f"Argument '{arg_name}' must be a Pint Quantity, but got {type(value)}")
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from FlowCyPy import units
|
|
3
|
+
from scipy.signal import find_peaks
|
|
4
|
+
from FlowCyPy.utils import bessel_lowpass_filter, dc_highpass_filter
|
|
5
|
+
from FlowCyPy.dataframe_subclass import PeakDataFrame
|
|
6
|
+
from FlowCyPy.helper import validate_units
|
|
7
|
+
import pint_pandas
|
|
8
|
+
|
|
9
|
+
class TriggeredAcquisitions:
|
|
10
|
+
"""
|
|
11
|
+
A class for handling and processing triggered acquisition data,
|
|
12
|
+
including peak detection and signal filtering.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def __init__(self, parent, dataframe: pd.DataFrame):
|
|
16
|
+
"""
|
|
17
|
+
Initializes the TriggeredAcquisitions instance.
|
|
18
|
+
|
|
19
|
+
Parameters
|
|
20
|
+
----------
|
|
21
|
+
parent : object
|
|
22
|
+
Parent object containing cytometer and detector metadata.
|
|
23
|
+
dataframe : pd.DataFrame
|
|
24
|
+
Dataframe containing the acquired signals.
|
|
25
|
+
"""
|
|
26
|
+
self.signal = dataframe
|
|
27
|
+
self.parent = parent
|
|
28
|
+
|
|
29
|
+
self.detect_peaks()
|
|
30
|
+
|
|
31
|
+
def get_detector(self, name: str):
|
|
32
|
+
"""
|
|
33
|
+
Retrieves a detector instance by name.
|
|
34
|
+
|
|
35
|
+
Parameters
|
|
36
|
+
----------
|
|
37
|
+
name : str
|
|
38
|
+
Name of the detector to retrieve.
|
|
39
|
+
|
|
40
|
+
Returns
|
|
41
|
+
-------
|
|
42
|
+
object or None
|
|
43
|
+
Detector object if found, otherwise None.
|
|
44
|
+
"""
|
|
45
|
+
for detector in self.parent.cytometer.detectors:
|
|
46
|
+
if detector.name == name:
|
|
47
|
+
return detector
|
|
48
|
+
|
|
49
|
+
def detect_peaks(self, multi_peak_strategy: str = 'max') -> None:
|
|
50
|
+
"""
|
|
51
|
+
Detects peaks for each segment and stores results in a DataFrame.
|
|
52
|
+
|
|
53
|
+
Parameters
|
|
54
|
+
----------
|
|
55
|
+
multi_peak_strategy : str, optional
|
|
56
|
+
Strategy for handling multiple peaks in a segment. Options are:
|
|
57
|
+
- 'max': Take the maximum peak in the segment.
|
|
58
|
+
- 'mean': Take the average of the peaks in the segment.
|
|
59
|
+
- 'sum': Sum all peaks in the segment.
|
|
60
|
+
- 'discard': Remove entries with multiple peaks.
|
|
61
|
+
- 'keep': Keep all peaks without aggregation.
|
|
62
|
+
Default is 'max'.
|
|
63
|
+
"""
|
|
64
|
+
if multi_peak_strategy not in {'max', 'mean', 'sum', 'discard', 'keep'}:
|
|
65
|
+
raise ValueError("Invalid multi_peak_strategy. Choose from 'max', 'mean', 'sum', 'discard', 'keep'.")
|
|
66
|
+
|
|
67
|
+
def process_segment(segment):
|
|
68
|
+
signal = segment['DigitizedSignal'].values
|
|
69
|
+
time = segment['Time'].values
|
|
70
|
+
peaks, properties = find_peaks(signal, width=1)
|
|
71
|
+
|
|
72
|
+
return pd.DataFrame({
|
|
73
|
+
"SegmentID": segment.name[1],
|
|
74
|
+
"Detector": segment.name[0],
|
|
75
|
+
"Height": signal[peaks],
|
|
76
|
+
"Time": time[peaks],
|
|
77
|
+
**{k: v for k, v in properties.items()}
|
|
78
|
+
})
|
|
79
|
+
|
|
80
|
+
# Process peaks for each group
|
|
81
|
+
results = self.signal.groupby(level=['Detector', 'SegmentID']).apply(process_segment)
|
|
82
|
+
results = results.reset_index(drop=True)
|
|
83
|
+
|
|
84
|
+
# Process multi-peak strategies
|
|
85
|
+
_temp = results.reset_index()[['Detector', 'SegmentID', 'Height']].pint.dequantify().droplevel('unit', axis=1)
|
|
86
|
+
|
|
87
|
+
self.peaks = (
|
|
88
|
+
results.reset_index()
|
|
89
|
+
.loc[_temp.groupby(['Detector', 'SegmentID'])['Height'].idxmax()]
|
|
90
|
+
.set_index(['Detector', 'SegmentID'])
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
return PeakDataFrame(self.peaks)
|
|
94
|
+
|
|
95
|
+
def _apply_lowpass_filter(self, cutoff_freq: units.Quantity, order: int = 4) -> None:
|
|
96
|
+
"""
|
|
97
|
+
Applies a Bessel low-pass filter to the signal data in-place.
|
|
98
|
+
|
|
99
|
+
Parameters
|
|
100
|
+
----------
|
|
101
|
+
cutoff_freq : units.Quantity
|
|
102
|
+
The cutoff frequency for the low-pass filter (must have frequency units).
|
|
103
|
+
order : int, optional
|
|
104
|
+
The order of the Bessel filter, default is 4.
|
|
105
|
+
|
|
106
|
+
Raises
|
|
107
|
+
------
|
|
108
|
+
ValueError
|
|
109
|
+
If cutoff frequency is missing or exceeds Nyquist frequency.
|
|
110
|
+
TypeError
|
|
111
|
+
If signal data is not stored as a PintArray.
|
|
112
|
+
"""
|
|
113
|
+
if cutoff_freq is None:
|
|
114
|
+
raise ValueError("Cutoff frequency must be specified for low-pass filtering.")
|
|
115
|
+
|
|
116
|
+
# Get sampling frequency and ensure it's in Hertz
|
|
117
|
+
fs = self.parent.cytometer.signal_digitizer.sampling_freq.to("hertz")
|
|
118
|
+
nyquist_freq = fs / 2
|
|
119
|
+
|
|
120
|
+
# Validate cutoff frequency
|
|
121
|
+
if cutoff_freq.to("hertz") >= nyquist_freq:
|
|
122
|
+
raise ValueError(f"Cutoff frequency ({cutoff_freq}) must be below the Nyquist frequency ({nyquist_freq}).")
|
|
123
|
+
|
|
124
|
+
# Ensure the signal column is a PintArray
|
|
125
|
+
if not isinstance(self.signal["DigitizedSignal"].array, pint_pandas.PintArray):
|
|
126
|
+
raise TypeError("Expected 'DigitizedSignal' to be a PintArray, but got a different type.")
|
|
127
|
+
|
|
128
|
+
# Iterate through each detector-segment pair and apply filtering
|
|
129
|
+
for (detector, segment_id), segment_data in self.signal.groupby(level=['Detector', 'SegmentID']):
|
|
130
|
+
filtered_values = bessel_lowpass_filter(
|
|
131
|
+
signal=segment_data["DigitizedSignal"].pint.quantity.magnitude,
|
|
132
|
+
cutoff=cutoff_freq,
|
|
133
|
+
sampling_rate=fs,
|
|
134
|
+
order=order
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
# Ensure values remain as a PintArray and keep original units
|
|
138
|
+
self.signal.loc[(detector, segment_id), "DigitizedSignal"] = filtered_values.astype(int)
|
|
139
|
+
|
|
140
|
+
def _apply_highpass_filter(self, cutoff_freq: units.Quantity, order: int = 4) -> None:
|
|
141
|
+
"""
|
|
142
|
+
Applies a DC high-pass filter to the signal data in-place.
|
|
143
|
+
|
|
144
|
+
Parameters
|
|
145
|
+
----------
|
|
146
|
+
cutoff_freq : units.Quantity
|
|
147
|
+
The cutoff frequency for the high-pass filter (must have frequency units).
|
|
148
|
+
order : int, optional
|
|
149
|
+
The order of the high-pass filter, default is 4.
|
|
150
|
+
|
|
151
|
+
Raises
|
|
152
|
+
------
|
|
153
|
+
ValueError
|
|
154
|
+
If cutoff frequency is missing or exceeds Nyquist frequency.
|
|
155
|
+
TypeError
|
|
156
|
+
If signal data is not stored as a PintArray.
|
|
157
|
+
"""
|
|
158
|
+
if cutoff_freq is None:
|
|
159
|
+
raise ValueError("Cutoff frequency must be specified for high-pass filtering.")
|
|
160
|
+
|
|
161
|
+
# Get sampling frequency and ensure it's in Hertz
|
|
162
|
+
fs = self.parent.cytometer.signal_digitizer.sampling_freq.to("hertz")
|
|
163
|
+
nyquist_freq = fs / 2
|
|
164
|
+
|
|
165
|
+
# Validate cutoff frequency
|
|
166
|
+
if cutoff_freq.to("hertz") >= nyquist_freq:
|
|
167
|
+
raise ValueError(f"Cutoff frequency ({cutoff_freq}) must be below the Nyquist frequency ({nyquist_freq}).")
|
|
168
|
+
|
|
169
|
+
# Ensure the signal column is a PintArray
|
|
170
|
+
if not isinstance(self.signal["DigitizedSignal"].array, pint_pandas.PintArray):
|
|
171
|
+
raise TypeError("Expected 'DigitizedSignal' to be a PintArray, but got a different type.")
|
|
172
|
+
|
|
173
|
+
# Iterate through each detector-segment pair and apply filtering
|
|
174
|
+
for (detector, segment_id), segment_data in self.signal.groupby(level=['Detector', 'SegmentID']):
|
|
175
|
+
filtered_values = dc_highpass_filter(
|
|
176
|
+
signal=segment_data["DigitizedSignal"].pint.quantity.magnitude,
|
|
177
|
+
cutoff=cutoff_freq,
|
|
178
|
+
sampling_rate=fs,
|
|
179
|
+
order=order
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
# Ensure values remain as a PintArray and keep original units
|
|
183
|
+
self.signal.loc[(detector, segment_id), "DigitizedSignal"] = filtered_values.astype(int)
|
|
184
|
+
|
|
185
|
+
@validate_units(low_cutoff=units.hertz, high_cutoff=units.hertz)
|
|
186
|
+
def apply_filters(self, low_cutoff: units.Quantity = None, high_cutoff: units.Quantity = None) -> None:
|
|
187
|
+
"""
|
|
188
|
+
Applies low-pass and/or high-pass filters to the signal data in-place.
|
|
189
|
+
|
|
190
|
+
Parameters
|
|
191
|
+
----------
|
|
192
|
+
low_cutoff : units.Quantity, optional
|
|
193
|
+
The cutoff frequency for the low-pass filter. If None, no low-pass filtering is applied.
|
|
194
|
+
high_cutoff : units.Quantity, optional
|
|
195
|
+
The cutoff frequency for the high-pass filter. If None, no high-pass filtering is applied.
|
|
196
|
+
"""
|
|
197
|
+
if low_cutoff is not None:
|
|
198
|
+
self._apply_lowpass_filter(low_cutoff)
|
|
199
|
+
if high_cutoff is not None:
|
|
200
|
+
self._apply_highpass_filter(high_cutoff)
|