singlebehaviorlab 2.0.0__tar.gz → 2.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. {singlebehaviorlab-2.0.0/singlebehaviorlab.egg-info → singlebehaviorlab-2.1.0}/PKG-INFO +38 -1
  2. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/README.md +37 -0
  3. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/pyproject.toml +2 -2
  4. singlebehaviorlab-2.1.0/singlebehaviorlab/__init__.py +56 -0
  5. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/__main__.py +10 -52
  6. singlebehaviorlab-2.1.0/singlebehaviorlab/backend/clustering.py +316 -0
  7. singlebehaviorlab-2.1.0/singlebehaviorlab/backend/inference.py +318 -0
  8. singlebehaviorlab-2.1.0/singlebehaviorlab/backend/registration.py +243 -0
  9. singlebehaviorlab-2.1.0/singlebehaviorlab/backend/segmentation.py +363 -0
  10. singlebehaviorlab-2.1.0/singlebehaviorlab/backend/training_runner.py +320 -0
  11. singlebehaviorlab-2.1.0/singlebehaviorlab/cli.py +551 -0
  12. singlebehaviorlab-2.1.0/singlebehaviorlab/config.py +51 -0
  13. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/segmentation_tracking_widget.py +69 -1
  14. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0/singlebehaviorlab.egg-info}/PKG-INFO +38 -1
  15. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab.egg-info/SOURCES.txt +7 -0
  16. singlebehaviorlab-2.1.0/singlebehaviorlab.egg-info/entry_points.txt +2 -0
  17. singlebehaviorlab-2.0.0/singlebehaviorlab/__init__.py +0 -4
  18. singlebehaviorlab-2.0.0/singlebehaviorlab.egg-info/entry_points.txt +0 -2
  19. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/LICENSE +0 -0
  20. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/setup.cfg +0 -0
  21. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/_paths.py +0 -0
  22. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/backend/__init__.py +0 -0
  23. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/backend/augmentations.py +0 -0
  24. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/backend/data_store.py +0 -0
  25. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/backend/model.py +0 -0
  26. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/backend/train.py +0 -0
  27. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/backend/uncertainty.py +0 -0
  28. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/backend/video_processor.py +0 -0
  29. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/backend/video_utils.py +0 -0
  30. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/data/config/config.yaml +0 -0
  31. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/data/training_profiles.json +0 -0
  32. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/__init__.py +0 -0
  33. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/analysis_widget.py +0 -0
  34. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/attention_export.py +0 -0
  35. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/clip_extraction_widget.py +0 -0
  36. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/clustering_widget.py +0 -0
  37. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/inference_popups.py +0 -0
  38. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/inference_widget.py +0 -0
  39. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/inference_worker.py +0 -0
  40. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/labeling_widget.py +0 -0
  41. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/main_window.py +0 -0
  42. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/metadata_management_widget.py +0 -0
  43. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/motion_tracking.py +0 -0
  44. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/overlay_export.py +0 -0
  45. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/plot_integration.py +0 -0
  46. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/qt_helpers.py +0 -0
  47. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/registration_widget.py +0 -0
  48. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/review_widget.py +0 -0
  49. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/tab_tutorial_dialog.py +0 -0
  50. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/timeline_themes.py +0 -0
  51. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/training_profiles.py +0 -0
  52. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/training_widget.py +0 -0
  53. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/gui/video_utils.py +0 -0
  54. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/licenses/SAM2-LICENSE +0 -0
  55. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab/licenses/VideoPrism-LICENSE +0 -0
  56. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab.egg-info/dependency_links.txt +0 -0
  57. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab.egg-info/requires.txt +0 -0
  58. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/singlebehaviorlab.egg-info/top_level.txt +0 -0
  59. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/tests/test_clustering_smoke.py +0 -0
  60. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/tests/test_config.py +0 -0
  61. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/tests/test_motion_tracking.py +0 -0
  62. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/tests/test_paths.py +0 -0
  63. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/tests/test_sam2_smoke.py +0 -0
  64. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/__init__.py +0 -0
  65. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/automatic_mask_generator.py +0 -0
  66. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/benchmark.py +0 -0
  67. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/build_sam.py +0 -0
  68. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/configs/sam2/sam2_hiera_b+.yaml +0 -0
  69. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/configs/sam2/sam2_hiera_l.yaml +0 -0
  70. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/configs/sam2/sam2_hiera_s.yaml +0 -0
  71. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/configs/sam2/sam2_hiera_t.yaml +0 -0
  72. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/configs/sam2.1/sam2.1_hiera_b+.yaml +0 -0
  73. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/configs/sam2.1/sam2.1_hiera_l.yaml +0 -0
  74. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/configs/sam2.1/sam2.1_hiera_s.yaml +0 -0
  75. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/configs/sam2.1/sam2.1_hiera_t.yaml +0 -0
  76. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/configs/sam2.1_training/sam2.1_hiera_b+_MOSE_finetune.yaml +0 -0
  77. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/__init__.py +0 -0
  78. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/backbones/__init__.py +0 -0
  79. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/backbones/hieradet.py +0 -0
  80. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/backbones/image_encoder.py +0 -0
  81. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/backbones/utils.py +0 -0
  82. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/memory_attention.py +0 -0
  83. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/memory_encoder.py +0 -0
  84. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/position_encoding.py +0 -0
  85. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/sam/__init__.py +0 -0
  86. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/sam/mask_decoder.py +0 -0
  87. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/sam/prompt_encoder.py +0 -0
  88. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/sam/transformer.py +0 -0
  89. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/sam2_base.py +0 -0
  90. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/modeling/sam2_utils.py +0 -0
  91. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/sam2_hiera_b+.yaml +0 -0
  92. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/sam2_hiera_l.yaml +0 -0
  93. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/sam2_hiera_s.yaml +0 -0
  94. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/sam2_hiera_t.yaml +0 -0
  95. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/sam2_image_predictor.py +0 -0
  96. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/sam2_video_predictor.py +0 -0
  97. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/sam2_video_predictor_legacy.py +0 -0
  98. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/utils/__init__.py +0 -0
  99. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/utils/amg.py +0 -0
  100. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/utils/misc.py +0 -0
  101. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/sam2_backend/sam2/utils/transforms.py +0 -0
  102. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/videoprism_backend/videoprism/__init__.py +0 -0
  103. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/videoprism_backend/videoprism/encoders.py +0 -0
  104. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/videoprism_backend/videoprism/layers.py +0 -0
  105. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/videoprism_backend/videoprism/models.py +0 -0
  106. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/videoprism_backend/videoprism/tokenizers.py +0 -0
  107. {singlebehaviorlab-2.0.0 → singlebehaviorlab-2.1.0}/third_party/videoprism_backend/videoprism/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: singlebehaviorlab
3
- Version: 2.0.0
3
+ Version: 2.1.0
4
4
  Summary: Semi-automated behavioral video annotation, training, and analysis tool
5
5
  Author: Almir Aljovic
6
6
  Maintainer: Almir Aljovic
@@ -164,6 +164,43 @@ singlebehaviorlab
164
164
 
165
165
  Equivalent module form: `python -m singlebehaviorlab`.
166
166
 
167
+ ### Headless / server use (CLI)
168
+
169
+ The same `singlebehaviorlab` command also runs as a headless CLI when a subcommand is supplied. Use it to run the GPU-heavy pipeline steps on a remote machine without opening the GUI:
170
+
171
+ ```bash
172
+ # Train a classifier from an experiment directory
173
+ singlebehaviorlab train --experiment /path/to/my_experiment --profile balanced
174
+
175
+ # Run a trained model on a long video
176
+ singlebehaviorlab infer --experiment /path/to/my_experiment \
177
+ --model /path/to/my_experiment/models/behavior_heads/model.pt \
178
+ --video /data/recording.mp4 \
179
+ --out /data/recording_inference.json
180
+
181
+ # Extract VideoPrism embeddings from a video + mask
182
+ singlebehaviorlab register --experiment /path/to/my_experiment \
183
+ --video /data/recording.mp4 \
184
+ --mask /data/recording_masks.h5 \
185
+ --out /data/recording_matrix.npz
186
+
187
+ # Cluster an embedding matrix (loadable via the GUI's "Load Analysis State")
188
+ singlebehaviorlab cluster --matrix /data/recording_matrix.npz \
189
+ --metadata /data/recording_matrix_metadata.npz \
190
+ --out /data/recording_clusters.pkl
191
+
192
+ # Run SAM2 tracking from a prompts JSON exported in the Segmentation tab
193
+ singlebehaviorlab segment --video /data/recording.mp4 \
194
+ --prompts /data/prompts.json \
195
+ --out /data/recording_masks.h5
196
+ ```
197
+
198
+ Run `singlebehaviorlab <command> --help` for the full flag list on each subcommand. The GUI-only steps (labeling, refinement review, cluster inspection) still require the graphical interface; the CLI covers the batch-processing steps where no human input is needed.
199
+
200
+ > **Full CLI reference:** [**CLI.md**](CLI.md) — detailed per-command docs, file-format reference, Python API, and troubleshooting.
201
+ >
202
+ > **Notebook demos:** [**demo/**](demo/) — two Jupyter notebooks walking through behavior sequencing and segmentation/clustering end-to-end. Drop your own demo video + prompts into `demo/data/` and step through the cells.
203
+
167
204
  ---
168
205
 
169
206
  ## 4. First Launch
@@ -97,6 +97,43 @@ singlebehaviorlab
97
97
 
98
98
  Equivalent module form: `python -m singlebehaviorlab`.
99
99
 
100
+ ### Headless / server use (CLI)
101
+
102
+ The same `singlebehaviorlab` command also runs as a headless CLI when a subcommand is supplied. Use it to run the GPU-heavy pipeline steps on a remote machine without opening the GUI:
103
+
104
+ ```bash
105
+ # Train a classifier from an experiment directory
106
+ singlebehaviorlab train --experiment /path/to/my_experiment --profile balanced
107
+
108
+ # Run a trained model on a long video
109
+ singlebehaviorlab infer --experiment /path/to/my_experiment \
110
+ --model /path/to/my_experiment/models/behavior_heads/model.pt \
111
+ --video /data/recording.mp4 \
112
+ --out /data/recording_inference.json
113
+
114
+ # Extract VideoPrism embeddings from a video + mask
115
+ singlebehaviorlab register --experiment /path/to/my_experiment \
116
+ --video /data/recording.mp4 \
117
+ --mask /data/recording_masks.h5 \
118
+ --out /data/recording_matrix.npz
119
+
120
+ # Cluster an embedding matrix (loadable via the GUI's "Load Analysis State")
121
+ singlebehaviorlab cluster --matrix /data/recording_matrix.npz \
122
+ --metadata /data/recording_matrix_metadata.npz \
123
+ --out /data/recording_clusters.pkl
124
+
125
+ # Run SAM2 tracking from a prompts JSON exported in the Segmentation tab
126
+ singlebehaviorlab segment --video /data/recording.mp4 \
127
+ --prompts /data/prompts.json \
128
+ --out /data/recording_masks.h5
129
+ ```
130
+
131
+ Run `singlebehaviorlab <command> --help` for the full flag list on each subcommand. The GUI-only steps (labeling, refinement review, cluster inspection) still require the graphical interface; the CLI covers the batch-processing steps where no human input is needed.
132
+
133
+ > **Full CLI reference:** [**CLI.md**](CLI.md) — detailed per-command docs, file-format reference, Python API, and troubleshooting.
134
+ >
135
+ > **Notebook demos:** [**demo/**](demo/) — two Jupyter notebooks walking through behavior sequencing and segmentation/clustering end-to-end. Drop your own demo video + prompts into `demo/data/` and step through the cells.
136
+
100
137
  ---
101
138
 
102
139
  ## 4. First Launch
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "singlebehaviorlab"
7
- version = "2.0.0"
7
+ version = "2.1.0"
8
8
  description = "Semi-automated behavioral video annotation, training, and analysis tool"
9
9
  readme = "README.md"
10
10
  license = { file = "LICENSE" }
@@ -54,7 +54,7 @@ Issues = "https://github.com/alms93/SingleBehaviorLab/issues"
54
54
  test = ["pytest", "pytest-cov"]
55
55
 
56
56
  [project.scripts]
57
- singlebehaviorlab = "singlebehaviorlab.__main__:main"
57
+ singlebehaviorlab = "singlebehaviorlab.cli:main"
58
58
 
59
59
  [tool.pytest.ini_options]
60
60
  testpaths = ["tests"]
@@ -0,0 +1,56 @@
1
+ """SingleBehaviorLab — behavioral video annotation and training tool.
2
+
3
+ A short Python API re-exports the backend pipeline functions at the package
4
+ top level, so the typical usage is::
5
+
6
+ import singlebehaviorlab as sbl
7
+
8
+ sbl.segment(video, prompts, out_masks)
9
+ sbl.register(video, out_masks, out_matrix)
10
+ sbl.cluster(out_matrix, out_pkl, metadata_path=out_metadata)
11
+ sbl.plot_umap_clusters(out_pkl, show=True, save="umap.pdf")
12
+
13
+ sbl.train(experiment_dir, profile="balanced")
14
+ sbl.infer(model, video, out_json, experiment_dir=experiment_dir)
15
+
16
+ The re-exports use lazy attribute loading (PEP 562 ``__getattr__``) so that
17
+ ``import singlebehaviorlab`` does not pull in torch, jax, tensorflow, sam2,
18
+ or videoprism. Each symbol triggers its underlying backend module only on
19
+ first access.
20
+ """
21
+
22
+ __version__ = "2.1.0"
23
+ __author__ = "Almir Aljovic"
24
+
25
+ # Mapping of public name → (backend module, attribute name).
26
+ _PUBLIC_API = {
27
+ "segment": ("singlebehaviorlab.backend.segmentation", "run_sam2_segmentation"),
28
+ "load_prompts_json": ("singlebehaviorlab.backend.segmentation", "load_prompts_json"),
29
+ "save_prompts_json": ("singlebehaviorlab.backend.segmentation", "save_prompts_json"),
30
+ "register": ("singlebehaviorlab.backend.registration", "run_registration"),
31
+ "RegistrationParams": ("singlebehaviorlab.backend.registration", "RegistrationParams"),
32
+ "cluster": ("singlebehaviorlab.backend.clustering", "run_clustering"),
33
+ "ClusteringParams": ("singlebehaviorlab.backend.clustering", "ClusteringParams"),
34
+ "plot_umap_clusters": ("singlebehaviorlab.backend.clustering", "plot_umap_clusters"),
35
+ "infer": ("singlebehaviorlab.backend.inference", "run_inference_on_video"),
36
+ "train": ("singlebehaviorlab.backend.training_runner", "run_training_session"),
37
+ "load_config": ("singlebehaviorlab.config", "load_config"),
38
+ }
39
+
40
+ __all__ = ["__version__", "__author__", *sorted(_PUBLIC_API)]
41
+
42
+
43
+ def __getattr__(name):
44
+ try:
45
+ module_name, attr = _PUBLIC_API[name]
46
+ except KeyError as exc:
47
+ raise AttributeError(f"module 'singlebehaviorlab' has no attribute {name!r}") from exc
48
+ import importlib
49
+ module = importlib.import_module(module_name)
50
+ value = getattr(module, attr)
51
+ globals()[name] = value
52
+ return value
53
+
54
+
55
+ def __dir__():
56
+ return sorted(set(list(globals().keys()) + list(_PUBLIC_API.keys())))
@@ -7,71 +7,23 @@ Runs when invoked as:
7
7
  """
8
8
 
9
9
  import logging
10
- import sys
11
10
  import os
11
+ import sys
12
12
 
13
13
  os.environ.setdefault("TF_CPP_MIN_LOG_LEVEL", "3")
14
14
  os.environ.setdefault("TF_ENABLE_ONEDNN_OPTS", "0")
15
15
  os.environ.setdefault("GRPC_VERBOSITY", "ERROR")
16
16
  os.environ.setdefault("GLOG_minloglevel", "2")
17
17
 
18
- # Let JAX grow GPU memory on demand and leave headroom for PyTorch.
19
18
  os.environ["XLA_PYTHON_CLIENT_PREALLOCATE"] = "false"
20
19
  os.environ["XLA_PYTHON_CLIENT_MEM_FRACTION"] = "0.45"
21
- # Fall back to driver JIT compilation when ptxas/nvlink is unavailable.
22
20
  os.environ["XLA_FLAGS"] = "--xla_gpu_unsafe_fallback_to_driver_on_ptxas_not_found"
23
21
 
24
- import yaml
25
- from singlebehaviorlab._paths import get_default_config_path, get_experiments_dir
26
- from singlebehaviorlab.gui.main_window import MainWindow
27
-
28
-
29
- def load_config(config_path: str = None) -> dict:
30
- """Load configuration from YAML file."""
31
- if config_path is None:
32
- config_path = str(get_default_config_path())
33
-
34
- if os.path.exists(config_path):
35
- with open(config_path, 'r') as f:
36
- config = yaml.safe_load(f) or {}
37
- else:
38
- config = {}
39
-
40
- # For path keys that are missing or blank, resolve relative to the
41
- # experiments directory (pip install) or the package parent (source install).
42
- from singlebehaviorlab._paths import get_package_dir
43
- base_dir = str(get_package_dir().parent)
44
-
45
- defaults = {
46
- "data_dir": os.path.join(base_dir, "data"),
47
- "raw_videos_dir": os.path.join(base_dir, "data", "raw_videos"),
48
- "clips_dir": os.path.join(base_dir, "data", "clips"),
49
- "annotations_dir": os.path.join(base_dir, "data", "annotations"),
50
- "models_dir": os.path.join(base_dir, "models", "behavior_heads"),
51
- "backbone_dir": os.path.join(base_dir, "models", "videoprism_backbone"),
52
- "annotation_file": os.path.join(base_dir, "data", "annotations", "annotations.json"),
53
- }
54
-
55
- for key, value in defaults.items():
56
- if not config.get(key):
57
- config[key] = value
58
- elif not os.path.isabs(config[key]):
59
- config[key] = os.path.join(base_dir, config[key])
22
+ from singlebehaviorlab.config import load_config # noqa: E402 (re-exported for backward compat)
60
23
 
61
- experiments_dir = str(get_experiments_dir())
62
- if not config.get("experiments_dir"):
63
- config["experiments_dir"] = experiments_dir
64
- os.makedirs(config["experiments_dir"], exist_ok=True)
65
24
 
66
- config["config_path"] = config_path
67
- config.setdefault("experiment_name", None)
68
- config.setdefault("experiment_path", None)
69
-
70
- return config
71
-
72
-
73
- def main():
74
- """Application entry point."""
25
+ def run_gui_app():
26
+ """Launch the PyQt6 graphical interface."""
75
27
  logging.basicConfig(
76
28
  level=logging.WARNING,
77
29
  format="%(levelname)s [%(name)s] %(message)s",
@@ -79,6 +31,7 @@ def main():
79
31
  from PyQt6.QtWidgets import (
80
32
  QApplication, QDialog, QVBoxLayout, QPushButton, QLabel,
81
33
  )
34
+ from singlebehaviorlab.gui.main_window import MainWindow
82
35
 
83
36
  app = QApplication(sys.argv)
84
37
  app.setApplicationName("SingleBehaviorLab")
@@ -126,5 +79,10 @@ def main():
126
79
  sys.exit(app.exec())
127
80
 
128
81
 
82
+ def main():
83
+ from singlebehaviorlab.cli import main as cli_main
84
+ cli_main()
85
+
86
+
129
87
  if __name__ == "__main__":
130
88
  main()
@@ -0,0 +1,316 @@
1
+ """Headless UMAP + Leiden/HDBSCAN clustering for the CLI.
2
+
3
+ Produces a pickle file that the GUI Clustering tab loads via the existing
4
+ "Load Analysis State" action. The state schema matches
5
+ ``clustering_widget._save_analysis_state``.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import os
11
+ import pickle
12
+ from dataclasses import dataclass
13
+ from datetime import datetime
14
+ from pathlib import Path
15
+ from typing import Any, Callable, Optional, Union
16
+
17
+ import numpy as np
18
+ import pandas as pd
19
+
20
+ __all__ = ["ClusteringParams", "run_clustering", "plot_umap_clusters"]
21
+
22
+
23
+ @dataclass
24
+ class ClusteringParams:
25
+ """Knobs for ``run_clustering``. Defaults mirror the GUI sliders."""
26
+
27
+ method: str = "leiden"
28
+ n_components: int = 2
29
+ n_neighbors: int = 15
30
+ min_dist: float = 0.1
31
+ normalization: str = "standard" # standard | minmax | l2 | none
32
+ leiden_resolution: float = 1.0
33
+ leiden_k: int = 15
34
+ min_cluster_size: int = 10
35
+ min_samples: int = 5
36
+ hdbscan_epsilon: float = 0.0
37
+
38
+
39
+ def _load_matrix_metadata(
40
+ matrix_path: str,
41
+ metadata_path: Optional[str],
42
+ ) -> tuple[pd.DataFrame, Optional[pd.DataFrame]]:
43
+ if matrix_path.endswith(".npz"):
44
+ npz = np.load(matrix_path, allow_pickle=True)
45
+ matrix = npz["matrix"]
46
+ feature_names = npz["feature_names"]
47
+ snippet_ids = npz["snippet_ids"] if "snippet_ids" in npz else npz.get("span_ids")
48
+ if snippet_ids is None:
49
+ snippet_ids = np.array([f"snippet{i + 1}" for i in range(matrix.shape[1])])
50
+ matrix_df = pd.DataFrame(matrix, index=feature_names, columns=snippet_ids)
51
+ elif matrix_path.endswith(".parquet"):
52
+ matrix_df = pd.read_parquet(matrix_path, engine="pyarrow")
53
+ else:
54
+ matrix_df = pd.read_csv(matrix_path, index_col=0)
55
+
56
+ metadata_df: Optional[pd.DataFrame] = None
57
+ if metadata_path:
58
+ if metadata_path.endswith(".npz"):
59
+ meta_npz = np.load(metadata_path, allow_pickle=True)
60
+ metadata_df = pd.DataFrame(meta_npz["metadata"], columns=meta_npz["columns"])
61
+ elif metadata_path.endswith(".parquet"):
62
+ metadata_df = pd.read_parquet(metadata_path, engine="pyarrow")
63
+ else:
64
+ metadata_df = pd.read_csv(metadata_path)
65
+ return matrix_df, metadata_df
66
+
67
+
68
+ def _normalize(X: pd.DataFrame, method: str) -> pd.DataFrame:
69
+ X = X.replace([np.inf, -np.inf], np.nan).fillna(0.0)
70
+ method = method.lower()
71
+ if method == "standard":
72
+ from sklearn.preprocessing import StandardScaler
73
+ arr = StandardScaler().fit_transform(X)
74
+ elif method == "minmax":
75
+ from sklearn.preprocessing import MinMaxScaler
76
+ arr = MinMaxScaler().fit_transform(X)
77
+ elif method == "l2":
78
+ from sklearn.preprocessing import Normalizer
79
+ arr = Normalizer(norm="l2").fit_transform(X)
80
+ elif method in ("none", "raw"):
81
+ arr = X.values
82
+ else:
83
+ raise ValueError(f"Unknown normalization method: {method}")
84
+ return pd.DataFrame(arr, index=X.index, columns=X.columns)
85
+
86
+
87
+ def _run_umap(
88
+ data: pd.DataFrame,
89
+ n_components: int,
90
+ n_neighbors: int,
91
+ min_dist: float,
92
+ ) -> np.ndarray:
93
+ import umap
94
+ reducer = umap.UMAP(
95
+ n_neighbors=n_neighbors,
96
+ min_dist=min_dist,
97
+ n_components=n_components,
98
+ random_state=42,
99
+ )
100
+ return reducer.fit_transform(data.values)
101
+
102
+
103
+ def _run_leiden(data: pd.DataFrame, leiden_k: int, resolution: float) -> np.ndarray:
104
+ from sklearn.neighbors import kneighbors_graph
105
+ import igraph as ig
106
+ import leidenalg as la
107
+
108
+ knn = kneighbors_graph(data.values, n_neighbors=leiden_k, mode="connectivity", include_self=False)
109
+ sources, targets = knn.nonzero()
110
+ edges = list(zip(sources.tolist(), targets.tolist()))
111
+ graph = ig.Graph(n=data.shape[0], edges=edges, directed=False)
112
+ partition = la.find_partition(
113
+ graph,
114
+ la.RBConfigurationVertexPartition,
115
+ resolution_parameter=resolution,
116
+ )
117
+ return np.array(partition.membership)
118
+
119
+
120
+ def _run_hdbscan(data: pd.DataFrame, params: ClusteringParams) -> np.ndarray:
121
+ import hdbscan
122
+ clusterer = hdbscan.HDBSCAN(
123
+ min_cluster_size=params.min_cluster_size,
124
+ min_samples=params.min_samples,
125
+ cluster_selection_epsilon=params.hdbscan_epsilon,
126
+ )
127
+ return clusterer.fit_predict(data.values)
128
+
129
+
130
+ def run_clustering(
131
+ matrix_path: str | os.PathLike[str],
132
+ output_path: str | os.PathLike[str],
133
+ *,
134
+ metadata_path: Optional[str | os.PathLike[str]] = None,
135
+ params: Optional[ClusteringParams] = None,
136
+ log_fn: Optional[Callable[[str], None]] = None,
137
+ ) -> str:
138
+ """Cluster a feature matrix and write a GUI-loadable analysis pickle.
139
+
140
+ Returns the written pickle path.
141
+ """
142
+ params = params or ClusteringParams()
143
+
144
+ matrix_path_str = str(Path(matrix_path).expanduser().resolve())
145
+ metadata_path_str: Optional[str] = None
146
+ if metadata_path:
147
+ metadata_path_str = str(Path(metadata_path).expanduser().resolve())
148
+
149
+ output_path_obj = Path(output_path).expanduser().resolve()
150
+ output_path_obj.parent.mkdir(parents=True, exist_ok=True)
151
+
152
+ def _log(msg: str) -> None:
153
+ if log_fn:
154
+ log_fn(msg)
155
+
156
+ _log(f"Loading matrix: {matrix_path_str}")
157
+ matrix_df, metadata_df = _load_matrix_metadata(matrix_path_str, metadata_path_str)
158
+ _log(f"Matrix shape: {matrix_df.shape[0]} features × {matrix_df.shape[1]} samples")
159
+
160
+ processed = _normalize(matrix_df.T, params.normalization)
161
+ _log(f"Processed shape: {processed.shape} (samples × features); normalization={params.normalization}")
162
+
163
+ _log(
164
+ f"Running UMAP (n_neighbors={params.n_neighbors}, "
165
+ f"min_dist={params.min_dist}, n_components={params.n_components})"
166
+ )
167
+ embedding = _run_umap(
168
+ processed,
169
+ n_components=params.n_components,
170
+ n_neighbors=params.n_neighbors,
171
+ min_dist=params.min_dist,
172
+ )
173
+
174
+ if params.method == "leiden":
175
+ _log(f"Running Leiden clustering (k={params.leiden_k}, resolution={params.leiden_resolution})")
176
+ clusters = _run_leiden(processed, params.leiden_k, params.leiden_resolution)
177
+ elif params.method == "hdbscan":
178
+ _log(
179
+ f"Running HDBSCAN (min_cluster_size={params.min_cluster_size}, "
180
+ f"min_samples={params.min_samples}, epsilon={params.hdbscan_epsilon})"
181
+ )
182
+ clusters = _run_hdbscan(processed, params)
183
+ else:
184
+ raise ValueError(f"Unknown clustering method: {params.method}")
185
+
186
+ unique_clusters = sorted(set(int(c) for c in clusters))
187
+ _log(f"Clusters found: {len(unique_clusters)} (labels: {unique_clusters})")
188
+
189
+ snippet_to_clip_map: dict[str, str] = {}
190
+ if metadata_df is not None and "clip_path" in metadata_df.columns:
191
+ snippet_col = (
192
+ "snippet"
193
+ if "snippet" in metadata_df.columns
194
+ else ("span_id" if "span_id" in metadata_df.columns else None)
195
+ )
196
+ if snippet_col is not None:
197
+ for _, row in metadata_df.iterrows():
198
+ snippet_id = str(row.get(snippet_col, "")).strip()
199
+ clip_path_val = str(row.get("clip_path", "")).strip()
200
+ if snippet_id and clip_path_val and os.path.exists(clip_path_val):
201
+ snippet_to_clip_map[snippet_id] = clip_path_val
202
+ _log(f"Built snippet→clip map with {len(snippet_to_clip_map)} entries from metadata.")
203
+
204
+ state = {
205
+ "matrix_data": matrix_df,
206
+ "metadata": metadata_df,
207
+ "processed_data": processed,
208
+ "embedding": embedding,
209
+ "clusters": clusters,
210
+ "selected_features": list(matrix_df.index),
211
+ "snippet_to_clip_map": snippet_to_clip_map,
212
+ "metadata_file_path": metadata_path_str,
213
+ "timestamp": datetime.now().strftime("%Y%m%d_%H%M%S"),
214
+ "version": "1.0",
215
+ }
216
+
217
+ with open(output_path_obj, "wb") as f:
218
+ pickle.dump(state, f)
219
+ _log(f"Wrote analysis state: {output_path_obj}")
220
+ return str(output_path_obj)
221
+
222
+
223
+ def plot_umap_clusters(
224
+ state: Union[dict[str, Any], str, os.PathLike[str]],
225
+ *,
226
+ show: bool = False,
227
+ save: Optional[str | os.PathLike[str]] = None,
228
+ title: Optional[str] = None,
229
+ figsize: tuple[float, float] = (8.0, 6.0),
230
+ point_size: float = 6.0,
231
+ ):
232
+ """Plot the UMAP embedding produced by ``run_clustering``.
233
+
234
+ Follows the scanpy ``sc.pl.umap`` convention: pass ``show=True`` to pop up
235
+ an interactive window, pass ``save='foo.pdf'`` to write the figure to
236
+ disk (format inferred from the extension). Both can be combined.
237
+
238
+ Args:
239
+ state: Either an analysis-state dict (with ``embedding`` and
240
+ ``clusters`` keys) or the path to a ``.pkl`` file produced by
241
+ ``run_clustering``.
242
+ show: Call ``plt.show()`` to open an interactive window.
243
+ save: Destination path for the rendered figure. PDF, PNG, and SVG
244
+ are all supported.
245
+ title: Optional plot title.
246
+ figsize: Figure size in inches.
247
+ point_size: Scatter marker size.
248
+
249
+ Returns:
250
+ The matplotlib ``Figure`` object so the caller can further customise
251
+ or embed it.
252
+ """
253
+ import matplotlib
254
+ if not show and matplotlib.get_backend().lower() not in {"agg", "pdf", "svg", "ps"}:
255
+ matplotlib.use("Agg")
256
+ import matplotlib.pyplot as plt
257
+
258
+ if isinstance(state, (str, os.PathLike)):
259
+ with open(state, "rb") as f:
260
+ state = pickle.load(f)
261
+ if not isinstance(state, dict):
262
+ raise TypeError("state must be a dict or a path to a clustering .pkl file.")
263
+
264
+ embedding = np.asarray(state.get("embedding"))
265
+ clusters = np.asarray(state.get("clusters"))
266
+ if embedding is None or embedding.ndim != 2 or embedding.shape[1] < 2:
267
+ raise ValueError("Analysis state is missing a valid 2D+ UMAP embedding.")
268
+ if clusters is None or len(clusters) != embedding.shape[0]:
269
+ raise ValueError("Cluster labels are missing or the wrong length.")
270
+
271
+ unique_labels = sorted({int(c) for c in clusters})
272
+ non_noise = [c for c in unique_labels if c >= 0]
273
+ cmap = plt.get_cmap("tab20", max(len(non_noise), 1))
274
+
275
+ fig, ax = plt.subplots(figsize=figsize)
276
+ noise_mask = clusters < 0
277
+ if np.any(noise_mask):
278
+ ax.scatter(
279
+ embedding[noise_mask, 0],
280
+ embedding[noise_mask, 1],
281
+ s=point_size,
282
+ color="lightgray",
283
+ alpha=0.5,
284
+ label="Noise",
285
+ linewidths=0,
286
+ )
287
+ for i, c in enumerate(non_noise):
288
+ mask = clusters == c
289
+ ax.scatter(
290
+ embedding[mask, 0],
291
+ embedding[mask, 1],
292
+ s=point_size,
293
+ color=cmap(i % cmap.N),
294
+ alpha=0.85,
295
+ label=f"Cluster {c}",
296
+ linewidths=0,
297
+ )
298
+
299
+ ax.set_xlabel("UMAP1")
300
+ ax.set_ylabel("UMAP2")
301
+ if title:
302
+ ax.set_title(title)
303
+ else:
304
+ ax.set_title(f"UMAP + {'Noise + ' if np.any(noise_mask) else ''}{len(non_noise)} clusters")
305
+ ax.set_aspect("equal", adjustable="datalim")
306
+ ax.legend(loc="best", fontsize="x-small", framealpha=0.85, markerscale=1.5)
307
+ fig.tight_layout()
308
+
309
+ if save:
310
+ save_path = Path(save).expanduser().resolve()
311
+ save_path.parent.mkdir(parents=True, exist_ok=True)
312
+ fig.savefig(save_path, bbox_inches="tight")
313
+ if show:
314
+ plt.show()
315
+
316
+ return fig