FlowCyPy 0.5.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowcypy-0.5.0/.condarc +37 -0
- flowcypy-0.5.0/.flake8 +12 -0
- flowcypy-0.5.0/.github/dependabot.yml +11 -0
- flowcypy-0.5.0/.github/workflows/deploy_PyPi.yml +20 -0
- flowcypy-0.5.0/.github/workflows/deploy_anaconda.yml +21 -0
- flowcypy-0.5.0/.github/workflows/deploy_coverage.yml +25 -0
- flowcypy-0.5.0/.github/workflows/deploy_documentation.yml +24 -0
- flowcypy-0.5.0/.gitignore +28 -0
- flowcypy-0.5.0/.readthedocs.yml +35 -0
- flowcypy-0.5.0/FlowCyPy/__init__.py +15 -0
- flowcypy-0.5.0/FlowCyPy/_version.py +16 -0
- flowcypy-0.5.0/FlowCyPy/classifier.py +196 -0
- flowcypy-0.5.0/FlowCyPy/coupling_mechanism/__init__.py +4 -0
- flowcypy-0.5.0/FlowCyPy/coupling_mechanism/empirical.py +47 -0
- flowcypy-0.5.0/FlowCyPy/coupling_mechanism/mie.py +205 -0
- flowcypy-0.5.0/FlowCyPy/coupling_mechanism/rayleigh.py +115 -0
- flowcypy-0.5.0/FlowCyPy/coupling_mechanism/uniform.py +39 -0
- flowcypy-0.5.0/FlowCyPy/cytometer.py +198 -0
- flowcypy-0.5.0/FlowCyPy/detector.py +616 -0
- flowcypy-0.5.0/FlowCyPy/directories.py +36 -0
- flowcypy-0.5.0/FlowCyPy/distribution/__init__.py +16 -0
- flowcypy-0.5.0/FlowCyPy/distribution/base_class.py +59 -0
- flowcypy-0.5.0/FlowCyPy/distribution/delta.py +86 -0
- flowcypy-0.5.0/FlowCyPy/distribution/lognormal.py +94 -0
- flowcypy-0.5.0/FlowCyPy/distribution/normal.py +95 -0
- flowcypy-0.5.0/FlowCyPy/distribution/particle_size_distribution.py +110 -0
- flowcypy-0.5.0/FlowCyPy/distribution/uniform.py +96 -0
- flowcypy-0.5.0/FlowCyPy/distribution/weibull.py +80 -0
- flowcypy-0.5.0/FlowCyPy/event_correlator.py +244 -0
- flowcypy-0.5.0/FlowCyPy/flow_cell.py +122 -0
- flowcypy-0.5.0/FlowCyPy/helper.py +85 -0
- flowcypy-0.5.0/FlowCyPy/logger.py +322 -0
- flowcypy-0.5.0/FlowCyPy/noises.py +29 -0
- flowcypy-0.5.0/FlowCyPy/particle_count.py +102 -0
- flowcypy-0.5.0/FlowCyPy/peak_locator/__init__.py +4 -0
- flowcypy-0.5.0/FlowCyPy/peak_locator/base_class.py +163 -0
- flowcypy-0.5.0/FlowCyPy/peak_locator/basic.py +108 -0
- flowcypy-0.5.0/FlowCyPy/peak_locator/derivative.py +143 -0
- flowcypy-0.5.0/FlowCyPy/peak_locator/moving_average.py +114 -0
- flowcypy-0.5.0/FlowCyPy/physical_constant.py +19 -0
- flowcypy-0.5.0/FlowCyPy/plottings.py +270 -0
- flowcypy-0.5.0/FlowCyPy/population.py +239 -0
- flowcypy-0.5.0/FlowCyPy/populations_instances.py +49 -0
- flowcypy-0.5.0/FlowCyPy/report.py +236 -0
- flowcypy-0.5.0/FlowCyPy/scatterer.py +373 -0
- flowcypy-0.5.0/FlowCyPy/source.py +249 -0
- flowcypy-0.5.0/FlowCyPy/units.py +26 -0
- flowcypy-0.5.0/FlowCyPy/utils.py +191 -0
- flowcypy-0.5.0/FlowCyPy.egg-info/PKG-INFO +252 -0
- flowcypy-0.5.0/FlowCyPy.egg-info/SOURCES.txt +128 -0
- flowcypy-0.5.0/FlowCyPy.egg-info/dependency_links.txt +1 -0
- flowcypy-0.5.0/FlowCyPy.egg-info/requires.txt +27 -0
- flowcypy-0.5.0/FlowCyPy.egg-info/top_level.txt +1 -0
- flowcypy-0.5.0/LICENSE +21 -0
- flowcypy-0.5.0/PKG-INFO +252 -0
- flowcypy-0.5.0/README.rst +186 -0
- flowcypy-0.5.0/Untitled.ipynb +978 -0
- flowcypy-0.5.0/developments/dev_beads_analysis.py +154 -0
- flowcypy-0.5.0/developments/dev_canto.py +146 -0
- flowcypy-0.5.0/developments/dev_classifier.py +140 -0
- flowcypy-0.5.0/developments/dev_shot_noise_check.py +138 -0
- flowcypy-0.5.0/developments/dev_study_on_ri.py +155 -0
- flowcypy-0.5.0/developments/dev_study_on_size.py +161 -0
- flowcypy-0.5.0/developments/get_started.md +23 -0
- flowcypy-0.5.0/developments/image.png +0 -0
- flowcypy-0.5.0/developments/internship.pdf +0 -0
- flowcypy-0.5.0/developments/output_file.prof +0 -0
- flowcypy-0.5.0/developments/profiler.py +68 -0
- flowcypy-0.5.0/developments/test.pdf +0 -0
- flowcypy-0.5.0/docs/Makefile +19 -0
- flowcypy-0.5.0/docs/examples/density_plots/1_populations.py +132 -0
- flowcypy-0.5.0/docs/examples/density_plots/2_populations.py +131 -0
- flowcypy-0.5.0/docs/examples/density_plots/3_populations.py +127 -0
- flowcypy-0.5.0/docs/examples/density_plots/README.rst +2 -0
- flowcypy-0.5.0/docs/examples/density_plots/custom_populations.py +139 -0
- flowcypy-0.5.0/docs/examples/extras/README.rst +2 -0
- flowcypy-0.5.0/docs/examples/extras/distributions.py +89 -0
- flowcypy-0.5.0/docs/examples/extras/flow_cytometer_signal.py +121 -0
- flowcypy-0.5.0/docs/examples/extras/full_workflow.py +125 -0
- flowcypy-0.5.0/docs/examples/extras/scatterer_distribution.py +74 -0
- flowcypy-0.5.0/docs/examples/noise_sources/README.rst +2 -0
- flowcypy-0.5.0/docs/examples/noise_sources/dark_current.py +67 -0
- flowcypy-0.5.0/docs/examples/noise_sources/shot_noise.py +65 -0
- flowcypy-0.5.0/docs/examples/noise_sources/thermal.py +61 -0
- flowcypy-0.5.0/docs/examples/tutorials/README.rst +2 -0
- flowcypy-0.5.0/docs/examples/tutorials/workflow.py +182 -0
- flowcypy-0.5.0/docs/images/example_0.png +0 -0
- flowcypy-0.5.0/docs/images/example_1.png +0 -0
- flowcypy-0.5.0/docs/images/example_2.png +0 -0
- flowcypy-0.5.0/docs/images/example_3.png +0 -0
- flowcypy-0.5.0/docs/images/flow_cytometer.png +0 -0
- flowcypy-0.5.0/docs/images/logo.png +0 -0
- flowcypy-0.5.0/docs/make.bat +35 -0
- flowcypy-0.5.0/docs/source/_static/default.css +19 -0
- flowcypy-0.5.0/docs/source/_static/logo.png +0 -0
- flowcypy-0.5.0/docs/source/_static/thumbnail.png +0 -0
- flowcypy-0.5.0/docs/source/code/base.rst +90 -0
- flowcypy-0.5.0/docs/source/code/distributions.rst +63 -0
- flowcypy-0.5.0/docs/source/code/peak_locator.rst +19 -0
- flowcypy-0.5.0/docs/source/code.rst +18 -0
- flowcypy-0.5.0/docs/source/conf.py +196 -0
- flowcypy-0.5.0/docs/source/examples.rst +15 -0
- flowcypy-0.5.0/docs/source/index.rst +14 -0
- flowcypy-0.5.0/docs/source/internal/core_components.rst +32 -0
- flowcypy-0.5.0/docs/source/internal/getting_started.rst +102 -0
- flowcypy-0.5.0/docs/source/internal/prerequisites/index.rst +12 -0
- flowcypy-0.5.0/docs/source/internal/prerequisites/mathematics.rst +120 -0
- flowcypy-0.5.0/docs/source/internal/prerequisites/optics.rst +16 -0
- flowcypy-0.5.0/docs/source/internal/prerequisites/programming.rst +41 -0
- flowcypy-0.5.0/docs/source/internal/ressources.rst +6 -0
- flowcypy-0.5.0/docs/source/internal/tasks.rst +136 -0
- flowcypy-0.5.0/docs/source/internal.rst +81 -0
- flowcypy-0.5.0/docs/source/references.rst +4 -0
- flowcypy-0.5.0/docs/source/theory.rst +154 -0
- flowcypy-0.5.0/meta.yaml +62 -0
- flowcypy-0.5.0/notebook.ipynb +417 -0
- flowcypy-0.5.0/pyproject.toml +103 -0
- flowcypy-0.5.0/setup.cfg +4 -0
- flowcypy-0.5.0/tests/__init__.py +0 -0
- flowcypy-0.5.0/tests/test_coupling_mechanism.py +131 -0
- flowcypy-0.5.0/tests/test_detector_noise.py +119 -0
- flowcypy-0.5.0/tests/test_distribution.py +102 -0
- flowcypy-0.5.0/tests/test_extra.py +55 -0
- flowcypy-0.5.0/tests/test_flow_cytometer.py +142 -0
- flowcypy-0.5.0/tests/test_noises.py +139 -0
- flowcypy-0.5.0/tests/test_peak_algorithm.py +48 -0
- flowcypy-0.5.0/tests/test_peak_analyzer.py +249 -0
- flowcypy-0.5.0/tests/test_population.py +97 -0
- flowcypy-0.5.0/tests/test_scatterer_distribution.py +154 -0
- flowcypy-0.5.0/tests/test_source.py +132 -0
flowcypy-0.5.0/.condarc
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# Set the default channels for Conda to search packages
|
|
2
|
+
channels:
|
|
3
|
+
- conda-forge
|
|
4
|
+
- defaults
|
|
5
|
+
- martinpdes
|
|
6
|
+
|
|
7
|
+
# Specify whether to automatically activate the base environment
|
|
8
|
+
auto_activate_base: true
|
|
9
|
+
|
|
10
|
+
# Enable SSL verification
|
|
11
|
+
ssl_verify: true
|
|
12
|
+
|
|
13
|
+
# Proxy settings (if behind a proxy, uncomment and configure)
|
|
14
|
+
#proxy_servers:
|
|
15
|
+
# http: http://username:password@proxy.example.com:port
|
|
16
|
+
# https: https://username:password@proxy.example.com:port
|
|
17
|
+
|
|
18
|
+
# Enable or disable Conda’s package signature verification
|
|
19
|
+
# (useful for debugging or offline setups)
|
|
20
|
+
#verify_ssl: true
|
|
21
|
+
|
|
22
|
+
# Show channel URLs when searching or listing packages
|
|
23
|
+
show_channel_urls: true
|
|
24
|
+
|
|
25
|
+
# Automatically update Conda when a new version is available
|
|
26
|
+
auto_update_conda: true
|
|
27
|
+
|
|
28
|
+
# Configure pip interoperability
|
|
29
|
+
pip_interop_enabled: true
|
|
30
|
+
|
|
31
|
+
# Override the default number of threads for solving the environment
|
|
32
|
+
# (useful for large environments)
|
|
33
|
+
#sat_solver_threads: 4
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
plugins:
|
|
37
|
+
anaconda_telemetry: false
|
flowcypy-0.5.0/.flake8
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
name: Deploy to PyPi
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches: [ "master" ]
|
|
6
|
+
tags:
|
|
7
|
+
- '*'
|
|
8
|
+
pull_request:
|
|
9
|
+
branches: [ "master" ]
|
|
10
|
+
|
|
11
|
+
permissions:
|
|
12
|
+
contents: write
|
|
13
|
+
|
|
14
|
+
jobs:
|
|
15
|
+
pure_package:
|
|
16
|
+
uses: MartinPdeS/MPSActions/.github/workflows/publish_pure_package_to_PyPi.yml@master
|
|
17
|
+
with:
|
|
18
|
+
python-version: "3.11"
|
|
19
|
+
secrets:
|
|
20
|
+
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
name: Deploy Conda Packages
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches: [ "master" ]
|
|
6
|
+
tags:
|
|
7
|
+
- '*'
|
|
8
|
+
pull_request:
|
|
9
|
+
branches: [ "master" ]
|
|
10
|
+
|
|
11
|
+
permissions:
|
|
12
|
+
contents: write
|
|
13
|
+
|
|
14
|
+
jobs:
|
|
15
|
+
ManyLinux_x86_64:
|
|
16
|
+
uses: MartinPdeS/MPSActions/.github/workflows/publish_compiled_package_to_anaconda.yml@master
|
|
17
|
+
with:
|
|
18
|
+
python-versions: '["3.11"]'
|
|
19
|
+
secrets:
|
|
20
|
+
ANACONDA_API_TOKEN: ${{ secrets.ANACONDA_API_TOKEN }}
|
|
21
|
+
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# Simple workflow for deploying static content to GitHub Pages
|
|
2
|
+
name: Deploy coverage
|
|
3
|
+
|
|
4
|
+
on:
|
|
5
|
+
push:
|
|
6
|
+
branches: [ "master" ]
|
|
7
|
+
tags:
|
|
8
|
+
- '*'
|
|
9
|
+
pull_request:
|
|
10
|
+
branches: [ "master" ]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
|
14
|
+
permissions:
|
|
15
|
+
contents: write
|
|
16
|
+
pages: write
|
|
17
|
+
id-token: write
|
|
18
|
+
|
|
19
|
+
jobs:
|
|
20
|
+
ManyLinux_x86_64:
|
|
21
|
+
uses: MartinPdeS/MPSActions/.github/workflows/publish_coverage.yml@master
|
|
22
|
+
with:
|
|
23
|
+
python-version: "3.11"
|
|
24
|
+
secrets:
|
|
25
|
+
_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# Simple workflow for deploying static content to GitHub Pages
|
|
2
|
+
name: Deploy documentation to github page
|
|
3
|
+
|
|
4
|
+
on:
|
|
5
|
+
push:
|
|
6
|
+
branches: [ "master" ]
|
|
7
|
+
tags:
|
|
8
|
+
- '*'
|
|
9
|
+
pull_request:
|
|
10
|
+
branches: [ "master" ]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
|
14
|
+
permissions:
|
|
15
|
+
contents: write
|
|
16
|
+
pages: write
|
|
17
|
+
id-token: write
|
|
18
|
+
|
|
19
|
+
jobs:
|
|
20
|
+
ManyLinux_x86_64:
|
|
21
|
+
uses: MartinPdeS/MPSActions/.github/workflows/publish_documentation.yml@master
|
|
22
|
+
with:
|
|
23
|
+
python-version: "3.11"
|
|
24
|
+
package-name: "FlowCyPy"
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# General patterns
|
|
2
|
+
*.DS_store
|
|
3
|
+
*.ipynb_checkpoints*
|
|
4
|
+
*__pycache__*
|
|
5
|
+
*.so
|
|
6
|
+
*.a
|
|
7
|
+
.coverage*
|
|
8
|
+
*.pytest_cache
|
|
9
|
+
htmlcov
|
|
10
|
+
dist
|
|
11
|
+
build
|
|
12
|
+
*.egg*
|
|
13
|
+
*.whl
|
|
14
|
+
CMakeCache.txt
|
|
15
|
+
CMakeFiles
|
|
16
|
+
install_manifest.txt
|
|
17
|
+
cmake_install.cmake
|
|
18
|
+
wheelhouse
|
|
19
|
+
|
|
20
|
+
# Docs-specific patterns
|
|
21
|
+
docs/source/gallery/*
|
|
22
|
+
!docs/source/gallery/index.rst
|
|
23
|
+
PyOptik/data/**/test*
|
|
24
|
+
PyOptik/data/**/example*
|
|
25
|
+
|
|
26
|
+
# Override for documentation_build
|
|
27
|
+
!documentation_build
|
|
28
|
+
!documentation_build/**
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# .readthedocs.yml
|
|
2
|
+
# Read the Docs configuration file
|
|
3
|
+
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
|
4
|
+
|
|
5
|
+
# Required
|
|
6
|
+
version: 2
|
|
7
|
+
|
|
8
|
+
submodules:
|
|
9
|
+
include: all
|
|
10
|
+
recursive: true
|
|
11
|
+
|
|
12
|
+
build:
|
|
13
|
+
os: ubuntu-22.04
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
tools:
|
|
17
|
+
python: "3.10"
|
|
18
|
+
jobs:
|
|
19
|
+
pre_install:
|
|
20
|
+
- Xvfb :99 &
|
|
21
|
+
- export DISPLAY=:99
|
|
22
|
+
- cd docs && rm -rf source/gallery
|
|
23
|
+
- sleep 3
|
|
24
|
+
|
|
25
|
+
python:
|
|
26
|
+
install:
|
|
27
|
+
- method: pip
|
|
28
|
+
path: .
|
|
29
|
+
extra_requirements:
|
|
30
|
+
- documentation
|
|
31
|
+
|
|
32
|
+
sphinx:
|
|
33
|
+
builder: html
|
|
34
|
+
configuration: docs/source/conf.py
|
|
35
|
+
fail_on_warning: false
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
try:
|
|
2
|
+
from ._version import version as __version__ # noqa: F401
|
|
3
|
+
|
|
4
|
+
except ImportError:
|
|
5
|
+
__version__ = "0.0.0"
|
|
6
|
+
|
|
7
|
+
from .units import ureg, watt, meter, second, liter, particle
|
|
8
|
+
from .cytometer import FlowCytometer
|
|
9
|
+
from .event_correlator import EventCorrelator
|
|
10
|
+
from .scatterer import Scatterer, CouplingModel
|
|
11
|
+
from .population import Population
|
|
12
|
+
from .detector import Detector
|
|
13
|
+
from .flow_cell import FlowCell
|
|
14
|
+
from .source import GaussianBeam
|
|
15
|
+
from .noises import NoiseSetting
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# file generated by setuptools_scm
|
|
2
|
+
# don't change, don't track in version control
|
|
3
|
+
TYPE_CHECKING = False
|
|
4
|
+
if TYPE_CHECKING:
|
|
5
|
+
from typing import Tuple, Union
|
|
6
|
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
7
|
+
else:
|
|
8
|
+
VERSION_TUPLE = object
|
|
9
|
+
|
|
10
|
+
version: str
|
|
11
|
+
__version__: str
|
|
12
|
+
__version_tuple__: VERSION_TUPLE
|
|
13
|
+
version_tuple: VERSION_TUPLE
|
|
14
|
+
|
|
15
|
+
__version__ = version = '0.5.0'
|
|
16
|
+
__version_tuple__ = version_tuple = (0, 5, 0)
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
from sklearn.cluster import KMeans
|
|
2
|
+
from sklearn.cluster import DBSCAN
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from typing import List, Dict, Tuple
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class BaseClassifier:
|
|
8
|
+
def filter_dataframe(self, features: list, detectors: list = None) -> object:
|
|
9
|
+
"""
|
|
10
|
+
Filter the DataFrame based on the selected features and detectors.
|
|
11
|
+
|
|
12
|
+
Parameters
|
|
13
|
+
----------
|
|
14
|
+
features : list
|
|
15
|
+
List of features to use for filtering. Options include 'Heights', 'Widths', 'Areas'.
|
|
16
|
+
detectors : list, optional
|
|
17
|
+
List of detectors to use. If None, use all detectors.
|
|
18
|
+
|
|
19
|
+
Returns
|
|
20
|
+
-------
|
|
21
|
+
DataFrame
|
|
22
|
+
A filtered DataFrame containing only the selected detectors and features.
|
|
23
|
+
|
|
24
|
+
Raises
|
|
25
|
+
------
|
|
26
|
+
ValueError
|
|
27
|
+
If no matching features are found for the given detectors and features.
|
|
28
|
+
"""
|
|
29
|
+
# Determine detectors to use
|
|
30
|
+
if detectors is None:
|
|
31
|
+
detectors = self.dataframe.columns.get_level_values(0).unique().tolist()
|
|
32
|
+
|
|
33
|
+
# Build the list of selected columns
|
|
34
|
+
selected_features = [
|
|
35
|
+
(detector, feature) for detector in detectors for feature in features
|
|
36
|
+
if (detector, feature) in self.dataframe.columns
|
|
37
|
+
]
|
|
38
|
+
|
|
39
|
+
if not selected_features:
|
|
40
|
+
raise ValueError("No matching features found for the given detectors and features.")
|
|
41
|
+
|
|
42
|
+
# Return the filtered DataFrame
|
|
43
|
+
return self.dataframe[selected_features]
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class KmeansClassifier(BaseClassifier):
|
|
47
|
+
def __init__(self, dataframe: object) -> None:
|
|
48
|
+
"""
|
|
49
|
+
Initialize the Classifier.
|
|
50
|
+
|
|
51
|
+
Parameters
|
|
52
|
+
----------
|
|
53
|
+
dataframe : DataFrame
|
|
54
|
+
The input dataframe with multi-index columns.
|
|
55
|
+
"""
|
|
56
|
+
self.dataframe = dataframe
|
|
57
|
+
self.dataframe['Label'] = 0 # Initialize labels as 0
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def run(self, number_of_cluster: int, features: list = ['Heights'], detectors: list = None, random_state: int = 42) -> None:
|
|
62
|
+
"""
|
|
63
|
+
Run KMeans clustering on the selected features and detectors.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
number_of_cluster : int
|
|
68
|
+
Number of clusters for KMeans.
|
|
69
|
+
features : list
|
|
70
|
+
List of features to use for clustering. Options include 'Heights', 'Widths', 'Areas'.
|
|
71
|
+
detectors : list, optional
|
|
72
|
+
List of detectors to use. If None, use all detectors.
|
|
73
|
+
random_state : int, optional
|
|
74
|
+
Random state for KMeans, by default 42.
|
|
75
|
+
"""
|
|
76
|
+
# Filter the DataFrame
|
|
77
|
+
X = self.filter_dataframe(features=features, detectors=detectors)
|
|
78
|
+
|
|
79
|
+
# Ensure data is dequantified if it uses Pint quantities
|
|
80
|
+
if hasattr(X, 'pint'):
|
|
81
|
+
X = X.pint.dequantify()
|
|
82
|
+
|
|
83
|
+
# Run KMeans
|
|
84
|
+
kmeans = KMeans(n_clusters=number_of_cluster, random_state=random_state)
|
|
85
|
+
self.dataframe['Label'] = kmeans.fit_predict(X)
|
|
86
|
+
|
|
87
|
+
class DBScanClassifier(BaseClassifier):
|
|
88
|
+
def __init__(self, dataframe: object) -> None:
|
|
89
|
+
"""
|
|
90
|
+
Initialize the DBScanClassifier.
|
|
91
|
+
|
|
92
|
+
Parameters
|
|
93
|
+
----------
|
|
94
|
+
dataframe : DataFrame
|
|
95
|
+
The input dataframe with multi-index columns.
|
|
96
|
+
"""
|
|
97
|
+
self.dataframe = dataframe
|
|
98
|
+
self.dataframe['Label'] = -1 # Initialize labels as -1 (noise for DBSCAN)
|
|
99
|
+
|
|
100
|
+
def run(self, eps: float = 0.5, min_samples: int = 5, features: list = ['Heights'], detectors: list = None) -> None:
|
|
101
|
+
"""
|
|
102
|
+
Run DBSCAN clustering on the selected features and detectors.
|
|
103
|
+
|
|
104
|
+
Parameters
|
|
105
|
+
----------
|
|
106
|
+
eps : float, optional
|
|
107
|
+
The maximum distance between two samples for them to be considered as in the same neighborhood, by default 0.5.
|
|
108
|
+
min_samples : int, optional
|
|
109
|
+
The number of samples in a neighborhood for a point to be considered a core point, by default 5.
|
|
110
|
+
features : list
|
|
111
|
+
List of features to use for clustering. Options include 'Heights', 'Widths', 'Areas'.
|
|
112
|
+
detectors : list, optional
|
|
113
|
+
List of detectors to use. If None, use all detectors.
|
|
114
|
+
"""
|
|
115
|
+
# Filter the DataFrame
|
|
116
|
+
X = self.filter_dataframe(features=features, detectors=detectors)
|
|
117
|
+
|
|
118
|
+
# Ensure data is dequantified if it uses Pint quantities
|
|
119
|
+
if hasattr(X, 'pint'):
|
|
120
|
+
X = X.pint.dequantify()
|
|
121
|
+
|
|
122
|
+
# Handle missing values (if necessary)
|
|
123
|
+
X = X.fillna(0).to_numpy()
|
|
124
|
+
|
|
125
|
+
# Run DBSCAN
|
|
126
|
+
dbscan = DBSCAN(eps=eps, min_samples=min_samples)
|
|
127
|
+
self.dataframe['Label'] = dbscan.fit_predict(X)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
class RangeClassifier:
|
|
131
|
+
"""
|
|
132
|
+
A classifier for assigning population labels based on defined ranges.
|
|
133
|
+
|
|
134
|
+
Parameters
|
|
135
|
+
----------
|
|
136
|
+
dataframe : pd.DataFrame
|
|
137
|
+
The input dataframe with features to classify.
|
|
138
|
+
feature : str
|
|
139
|
+
The column name of the feature to classify.
|
|
140
|
+
|
|
141
|
+
Attributes
|
|
142
|
+
----------
|
|
143
|
+
dataframe : pd.DataFrame
|
|
144
|
+
The dataframe with an added 'Label' column.
|
|
145
|
+
ranges : List[Tuple[float, float, str]]
|
|
146
|
+
The list of ranges and their associated labels.
|
|
147
|
+
"""
|
|
148
|
+
|
|
149
|
+
def __init__(self, dataframe: pd.DataFrame) -> None:
|
|
150
|
+
"""
|
|
151
|
+
Initialize the classifier.
|
|
152
|
+
|
|
153
|
+
Parameters
|
|
154
|
+
----------
|
|
155
|
+
dataframe : pd.DataFrame
|
|
156
|
+
The input dataframe with features to classify.
|
|
157
|
+
feature : str
|
|
158
|
+
The column name of the feature to classify.
|
|
159
|
+
"""
|
|
160
|
+
self.dataframe = dataframe
|
|
161
|
+
self.ranges = [] # To store the ranges and their labels
|
|
162
|
+
|
|
163
|
+
def run(self, ranges: Dict[str, Tuple[float, float]]) -> None:
|
|
164
|
+
"""
|
|
165
|
+
Classify the dataframe by assigning population labels based on specified ranges applied to the index.
|
|
166
|
+
|
|
167
|
+
Parameters
|
|
168
|
+
----------
|
|
169
|
+
ranges : dict
|
|
170
|
+
A dictionary where keys are population names (labels) and values are tuples
|
|
171
|
+
specifying the (lower, upper) bounds of the range for that population.
|
|
172
|
+
|
|
173
|
+
Example
|
|
174
|
+
-------
|
|
175
|
+
>>> ranges = {
|
|
176
|
+
>>> 'Population 0': (0, 100),
|
|
177
|
+
>>> 'Population 1': (100, 150),
|
|
178
|
+
>>> 'Population 2': (150, 200)
|
|
179
|
+
>>> }
|
|
180
|
+
>>> classifier.run(ranges)
|
|
181
|
+
"""
|
|
182
|
+
# Create conditions and corresponding labels
|
|
183
|
+
conditions = []
|
|
184
|
+
labels = []
|
|
185
|
+
for label, (lower, upper) in ranges.items():
|
|
186
|
+
conditions.append((self.dataframe.index >= lower) & (self.dataframe.index < upper))
|
|
187
|
+
labels.append(label)
|
|
188
|
+
|
|
189
|
+
# Use np.select to efficiently apply conditions
|
|
190
|
+
self.dataframe['Label'] = pd.Series(
|
|
191
|
+
pd.cut(self.dataframe.index,
|
|
192
|
+
bins=[float('-inf')] + [upper for _, (_, upper) in ranges.items()],
|
|
193
|
+
labels=list(ranges.keys()),
|
|
194
|
+
include_lowest=True),
|
|
195
|
+
index=self.dataframe.index)
|
|
196
|
+
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from FlowCyPy import Scatterer, Detector
|
|
3
|
+
from FlowCyPy.source import BaseBeam
|
|
4
|
+
from FlowCyPy.units import watt, meter
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def compute_detected_signal(source: BaseBeam, detector: Detector, scatterer: Scatterer, granularity: float = 1.0, A: float = 1.5, n: float = 2.0) -> float:
|
|
8
|
+
"""
|
|
9
|
+
Empirical model for scattering intensity based on particle size, granularity, and detector angle.
|
|
10
|
+
|
|
11
|
+
This function models forward scatter (FSC) as proportional to the particle's size squared and
|
|
12
|
+
side scatter (SSC) as proportional to the granularity and modulated by angular dependence
|
|
13
|
+
(sin^n(theta)). Granularity is a dimensionless measure of the particle's internal complexity or
|
|
14
|
+
surface irregularities:
|
|
15
|
+
|
|
16
|
+
- A default value of 1.0 is used for moderate granularity (e.g., typical white blood cells).
|
|
17
|
+
- Granularity values < 1.0 represent smoother particles with less internal complexity (e.g., bacteria).
|
|
18
|
+
- Granularity values > 1.0 represent particles with higher internal complexity or surface irregularities (e.g., granulocytes).
|
|
19
|
+
|
|
20
|
+
Parameters
|
|
21
|
+
----------
|
|
22
|
+
detector : Detector
|
|
23
|
+
The detector object containing theta_angle (in radians).
|
|
24
|
+
particle_size : float
|
|
25
|
+
The size of the particle (in meters).
|
|
26
|
+
granularity : float, optional
|
|
27
|
+
A measure of the particle's internal complexity or surface irregularities (dimensionless).
|
|
28
|
+
Default is 1.0.
|
|
29
|
+
A : float, optional
|
|
30
|
+
Empirical scaling factor for angular dependence. Default is 1.5.
|
|
31
|
+
n : float, optional
|
|
32
|
+
Power of sine function for angular dependence. Default is 2.0.
|
|
33
|
+
|
|
34
|
+
Returns
|
|
35
|
+
-------
|
|
36
|
+
float
|
|
37
|
+
The detected scattering intensity for the given particle and detector.
|
|
38
|
+
"""
|
|
39
|
+
size_list = scatterer.dataframe['Size'].pint.to(meter).values.numpy_data
|
|
40
|
+
|
|
41
|
+
# Forward scatter is proportional to size^2
|
|
42
|
+
fsc_intensity = size_list**2
|
|
43
|
+
|
|
44
|
+
# Side scatter is proportional to granularity and modulated by angular dependence
|
|
45
|
+
ssc_intensity = granularity * (1 + A * np.sin(np.radians(detector.phi_angle))**n) * np.ones_like(size_list)
|
|
46
|
+
|
|
47
|
+
return fsc_intensity * watt if detector.phi_angle < np.radians(10) else ssc_intensity * watt
|