canns 0.13.1__py3-none-any.whl → 0.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- canns/analyzer/data/__init__.py +5 -1
- canns/analyzer/data/asa/__init__.py +27 -12
- canns/analyzer/data/asa/cohospace.py +336 -10
- canns/analyzer/data/asa/config.py +3 -0
- canns/analyzer/data/asa/embedding.py +48 -45
- canns/analyzer/data/asa/path.py +104 -2
- canns/analyzer/data/asa/plotting.py +88 -19
- canns/analyzer/data/asa/tda.py +11 -4
- canns/analyzer/data/cell_classification/__init__.py +97 -0
- canns/analyzer/data/cell_classification/core/__init__.py +26 -0
- canns/analyzer/data/cell_classification/core/grid_cells.py +633 -0
- canns/analyzer/data/cell_classification/core/grid_modules_leiden.py +288 -0
- canns/analyzer/data/cell_classification/core/head_direction.py +347 -0
- canns/analyzer/data/cell_classification/core/spatial_analysis.py +431 -0
- canns/analyzer/data/cell_classification/io/__init__.py +5 -0
- canns/analyzer/data/cell_classification/io/matlab_loader.py +417 -0
- canns/analyzer/data/cell_classification/utils/__init__.py +39 -0
- canns/analyzer/data/cell_classification/utils/circular_stats.py +383 -0
- canns/analyzer/data/cell_classification/utils/correlation.py +318 -0
- canns/analyzer/data/cell_classification/utils/geometry.py +442 -0
- canns/analyzer/data/cell_classification/utils/image_processing.py +416 -0
- canns/analyzer/data/cell_classification/visualization/__init__.py +19 -0
- canns/analyzer/data/cell_classification/visualization/grid_plots.py +292 -0
- canns/analyzer/data/cell_classification/visualization/hd_plots.py +200 -0
- canns/analyzer/metrics/__init__.py +2 -1
- canns/analyzer/visualization/core/config.py +46 -4
- canns/data/__init__.py +6 -1
- canns/data/datasets.py +154 -1
- canns/data/loaders.py +37 -0
- canns/pipeline/__init__.py +13 -9
- canns/pipeline/__main__.py +6 -0
- canns/pipeline/asa/runner.py +105 -41
- canns/pipeline/asa_gui/__init__.py +68 -0
- canns/pipeline/asa_gui/__main__.py +6 -0
- canns/pipeline/asa_gui/analysis_modes/__init__.py +42 -0
- canns/pipeline/asa_gui/analysis_modes/base.py +39 -0
- canns/pipeline/asa_gui/analysis_modes/batch_mode.py +21 -0
- canns/pipeline/asa_gui/analysis_modes/cohomap_mode.py +56 -0
- canns/pipeline/asa_gui/analysis_modes/cohospace_mode.py +194 -0
- canns/pipeline/asa_gui/analysis_modes/decode_mode.py +52 -0
- canns/pipeline/asa_gui/analysis_modes/fr_mode.py +81 -0
- canns/pipeline/asa_gui/analysis_modes/frm_mode.py +92 -0
- canns/pipeline/asa_gui/analysis_modes/gridscore_mode.py +123 -0
- canns/pipeline/asa_gui/analysis_modes/pathcompare_mode.py +199 -0
- canns/pipeline/asa_gui/analysis_modes/tda_mode.py +112 -0
- canns/pipeline/asa_gui/app.py +29 -0
- canns/pipeline/asa_gui/controllers/__init__.py +6 -0
- canns/pipeline/asa_gui/controllers/analysis_controller.py +59 -0
- canns/pipeline/asa_gui/controllers/preprocess_controller.py +89 -0
- canns/pipeline/asa_gui/core/__init__.py +15 -0
- canns/pipeline/asa_gui/core/cache.py +14 -0
- canns/pipeline/asa_gui/core/runner.py +1936 -0
- canns/pipeline/asa_gui/core/state.py +324 -0
- canns/pipeline/asa_gui/core/worker.py +260 -0
- canns/pipeline/asa_gui/main_window.py +184 -0
- canns/pipeline/asa_gui/models/__init__.py +7 -0
- canns/pipeline/asa_gui/models/config.py +14 -0
- canns/pipeline/asa_gui/models/job.py +31 -0
- canns/pipeline/asa_gui/models/presets.py +21 -0
- canns/pipeline/asa_gui/resources/__init__.py +16 -0
- canns/pipeline/asa_gui/resources/dark.qss +167 -0
- canns/pipeline/asa_gui/resources/light.qss +163 -0
- canns/pipeline/asa_gui/resources/styles.qss +130 -0
- canns/pipeline/asa_gui/utils/__init__.py +1 -0
- canns/pipeline/asa_gui/utils/formatters.py +15 -0
- canns/pipeline/asa_gui/utils/io_adapters.py +40 -0
- canns/pipeline/asa_gui/utils/validators.py +41 -0
- canns/pipeline/asa_gui/views/__init__.py +1 -0
- canns/pipeline/asa_gui/views/help_content.py +171 -0
- canns/pipeline/asa_gui/views/pages/__init__.py +6 -0
- canns/pipeline/asa_gui/views/pages/analysis_page.py +565 -0
- canns/pipeline/asa_gui/views/pages/preprocess_page.py +492 -0
- canns/pipeline/asa_gui/views/panels/__init__.py +1 -0
- canns/pipeline/asa_gui/views/widgets/__init__.py +21 -0
- canns/pipeline/asa_gui/views/widgets/artifacts_tab.py +44 -0
- canns/pipeline/asa_gui/views/widgets/drop_zone.py +80 -0
- canns/pipeline/asa_gui/views/widgets/file_list.py +27 -0
- canns/pipeline/asa_gui/views/widgets/gridscore_tab.py +308 -0
- canns/pipeline/asa_gui/views/widgets/help_dialog.py +27 -0
- canns/pipeline/asa_gui/views/widgets/image_tab.py +50 -0
- canns/pipeline/asa_gui/views/widgets/image_viewer.py +97 -0
- canns/pipeline/asa_gui/views/widgets/log_box.py +16 -0
- canns/pipeline/asa_gui/views/widgets/pathcompare_tab.py +200 -0
- canns/pipeline/asa_gui/views/widgets/popup_combo.py +25 -0
- canns/pipeline/gallery/__init__.py +15 -5
- canns/pipeline/gallery/__main__.py +11 -0
- canns/pipeline/gallery/app.py +705 -0
- canns/pipeline/gallery/runner.py +790 -0
- canns/pipeline/gallery/state.py +51 -0
- canns/pipeline/gallery/styles.tcss +123 -0
- canns/pipeline/launcher.py +81 -0
- {canns-0.13.1.dist-info → canns-0.14.0.dist-info}/METADATA +11 -1
- canns-0.14.0.dist-info/RECORD +163 -0
- canns-0.14.0.dist-info/entry_points.txt +5 -0
- canns/pipeline/_base.py +0 -50
- canns-0.13.1.dist-info/RECORD +0 -89
- canns-0.13.1.dist-info/entry_points.txt +0 -3
- {canns-0.13.1.dist-info → canns-0.14.0.dist-info}/WHEEL +0 -0
- {canns-0.13.1.dist-info → canns-0.14.0.dist-info}/licenses/LICENSE +0 -0
canns/data/datasets.py
CHANGED
|
@@ -38,6 +38,7 @@ DEFAULT_DATA_DIR = Path.home() / ".canns" / "data"
|
|
|
38
38
|
# URLs for datasets on Hugging Face
|
|
39
39
|
HUGGINGFACE_REPO = "canns-team/data-analysis-datasets"
|
|
40
40
|
BASE_URL = f"https://huggingface.co/datasets/{HUGGINGFACE_REPO}/resolve/main/"
|
|
41
|
+
LEFT_RIGHT_DATASET_DIR = "Left_Right_data_of"
|
|
41
42
|
|
|
42
43
|
# Dataset registry with metadata
|
|
43
44
|
DATASETS = {
|
|
@@ -68,6 +69,16 @@ DATASETS = {
|
|
|
68
69
|
"sha256": None,
|
|
69
70
|
"url": f"{BASE_URL}grid_2.npz",
|
|
70
71
|
},
|
|
72
|
+
"left_right_data_of": {
|
|
73
|
+
"filename": LEFT_RIGHT_DATASET_DIR,
|
|
74
|
+
"description": "ASA type data from Left-Right sweep paper",
|
|
75
|
+
"size_mb": 604.0,
|
|
76
|
+
"format": "directory",
|
|
77
|
+
"usage": "ASA analysis, left-right sweep sessions",
|
|
78
|
+
"sha256": None,
|
|
79
|
+
"url": f"{BASE_URL}{LEFT_RIGHT_DATASET_DIR}/",
|
|
80
|
+
"is_collection": True,
|
|
81
|
+
},
|
|
71
82
|
}
|
|
72
83
|
|
|
73
84
|
|
|
@@ -130,7 +141,10 @@ def list_datasets() -> None:
|
|
|
130
141
|
print("=" * 60)
|
|
131
142
|
|
|
132
143
|
for key, info in DATASETS.items():
|
|
133
|
-
|
|
144
|
+
if info.get("is_collection"):
|
|
145
|
+
status = "Collection (use session getter)"
|
|
146
|
+
else:
|
|
147
|
+
status = "Available" if info["url"] else "Setup required"
|
|
134
148
|
print(f"\nDataset: {key}")
|
|
135
149
|
print(f" File: {info['filename']}")
|
|
136
150
|
print(f" Size: {info['size_mb']} MB")
|
|
@@ -162,6 +176,11 @@ def download_dataset(dataset_key: str, force: bool = False) -> Path | None:
|
|
|
162
176
|
|
|
163
177
|
info = DATASETS[dataset_key]
|
|
164
178
|
|
|
179
|
+
if info.get("is_collection"):
|
|
180
|
+
print(f"{dataset_key} is a dataset collection.")
|
|
181
|
+
print("Use get_left_right_data_session(session_id) to download a session.")
|
|
182
|
+
return None
|
|
183
|
+
|
|
165
184
|
if not info["url"]:
|
|
166
185
|
print(f"{dataset_key} not yet available for download")
|
|
167
186
|
print("Please use setup_local_datasets() to copy from local repository")
|
|
@@ -213,6 +232,10 @@ def get_dataset_path(dataset_key: str, auto_setup: bool = True) -> Path | None:
|
|
|
213
232
|
if dataset_key not in DATASETS:
|
|
214
233
|
print(f"Unknown dataset: {dataset_key}")
|
|
215
234
|
return None
|
|
235
|
+
if DATASETS[dataset_key].get("is_collection"):
|
|
236
|
+
print(f"{dataset_key} is a dataset collection.")
|
|
237
|
+
print("Use get_left_right_data_session(session_id) to access session files.")
|
|
238
|
+
return None
|
|
216
239
|
|
|
217
240
|
data_dir = get_data_dir()
|
|
218
241
|
filepath = data_dir / DATASETS[dataset_key]["filename"]
|
|
@@ -236,6 +259,136 @@ def get_dataset_path(dataset_key: str, auto_setup: bool = True) -> Path | None:
|
|
|
236
259
|
return None
|
|
237
260
|
|
|
238
261
|
|
|
262
|
+
def get_left_right_data_session(
|
|
263
|
+
session_id: str, auto_download: bool = True, force: bool = False
|
|
264
|
+
) -> dict[str, Path | list[Path] | None] | None:
|
|
265
|
+
"""
|
|
266
|
+
Download and return files for a Left_Right_data_of session.
|
|
267
|
+
|
|
268
|
+
Parameters
|
|
269
|
+
----------
|
|
270
|
+
session_id : str
|
|
271
|
+
Session folder name, e.g. "24365_2".
|
|
272
|
+
auto_download : bool
|
|
273
|
+
Whether to download missing files automatically.
|
|
274
|
+
force : bool
|
|
275
|
+
Whether to force re-download of existing files.
|
|
276
|
+
|
|
277
|
+
Returns
|
|
278
|
+
-------
|
|
279
|
+
dict or None
|
|
280
|
+
Mapping with keys: "manifest", "full_file", "module_files".
|
|
281
|
+
"""
|
|
282
|
+
if not session_id:
|
|
283
|
+
raise ValueError("session_id must be non-empty")
|
|
284
|
+
|
|
285
|
+
session_dir = get_data_dir() / LEFT_RIGHT_DATASET_DIR / session_id
|
|
286
|
+
session_dir.mkdir(parents=True, exist_ok=True)
|
|
287
|
+
|
|
288
|
+
manifest_filename = f"{session_id}_ASA_manifest.json"
|
|
289
|
+
manifest_url = f"{BASE_URL}{LEFT_RIGHT_DATASET_DIR}/{session_id}/{manifest_filename}"
|
|
290
|
+
manifest_path = session_dir / manifest_filename
|
|
291
|
+
|
|
292
|
+
if auto_download and (force or not manifest_path.exists()):
|
|
293
|
+
if not download_file_with_progress(manifest_url, manifest_path):
|
|
294
|
+
print(f"Failed to download manifest for session {session_id}")
|
|
295
|
+
return None
|
|
296
|
+
|
|
297
|
+
if not manifest_path.exists():
|
|
298
|
+
print(f"Manifest not found for session {session_id}")
|
|
299
|
+
return None
|
|
300
|
+
|
|
301
|
+
import json
|
|
302
|
+
|
|
303
|
+
with open(manifest_path) as f:
|
|
304
|
+
manifest = json.load(f)
|
|
305
|
+
|
|
306
|
+
full_file = manifest.get("full_file")
|
|
307
|
+
module_files = manifest.get("module_files", [])
|
|
308
|
+
requested_files: list[str] = []
|
|
309
|
+
|
|
310
|
+
if isinstance(full_file, str):
|
|
311
|
+
requested_files.append(Path(full_file).name)
|
|
312
|
+
|
|
313
|
+
if isinstance(module_files, list):
|
|
314
|
+
for module_file in module_files:
|
|
315
|
+
if isinstance(module_file, str):
|
|
316
|
+
requested_files.append(Path(module_file).name)
|
|
317
|
+
|
|
318
|
+
# De-duplicate while preserving order
|
|
319
|
+
seen: set[str] = set()
|
|
320
|
+
unique_files: list[str] = []
|
|
321
|
+
for filename in requested_files:
|
|
322
|
+
if filename and filename not in seen:
|
|
323
|
+
seen.add(filename)
|
|
324
|
+
unique_files.append(filename)
|
|
325
|
+
|
|
326
|
+
for filename in unique_files:
|
|
327
|
+
file_path = session_dir / filename
|
|
328
|
+
if auto_download and (force or not file_path.exists()):
|
|
329
|
+
file_url = f"{BASE_URL}{LEFT_RIGHT_DATASET_DIR}/{session_id}/{filename}"
|
|
330
|
+
if not download_file_with_progress(file_url, file_path):
|
|
331
|
+
print(f"Failed to download {filename} for session {session_id}")
|
|
332
|
+
return None
|
|
333
|
+
|
|
334
|
+
return {
|
|
335
|
+
"manifest": manifest_path,
|
|
336
|
+
"full_file": session_dir / Path(full_file).name if isinstance(full_file, str) else None,
|
|
337
|
+
"module_files": [
|
|
338
|
+
session_dir / Path(module_file).name
|
|
339
|
+
for module_file in module_files
|
|
340
|
+
if isinstance(module_file, str)
|
|
341
|
+
],
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
def get_left_right_npz(
|
|
346
|
+
session_id: str, filename: str, auto_download: bool = True, force: bool = False
|
|
347
|
+
) -> Path | None:
|
|
348
|
+
"""
|
|
349
|
+
Download and return a specific Left_Right_data_of NPZ file.
|
|
350
|
+
|
|
351
|
+
Parameters
|
|
352
|
+
----------
|
|
353
|
+
session_id : str
|
|
354
|
+
Session folder name, e.g. "26034_3".
|
|
355
|
+
filename : str
|
|
356
|
+
File name inside the session folder, e.g.
|
|
357
|
+
"26034_3_ASA_mec_gridModule02_n104_cm.npz".
|
|
358
|
+
auto_download : bool
|
|
359
|
+
Whether to download the file if missing.
|
|
360
|
+
force : bool
|
|
361
|
+
Whether to force re-download of existing files.
|
|
362
|
+
|
|
363
|
+
Returns
|
|
364
|
+
-------
|
|
365
|
+
Path or None
|
|
366
|
+
Path to the requested file if available, None otherwise.
|
|
367
|
+
"""
|
|
368
|
+
if not session_id:
|
|
369
|
+
raise ValueError("session_id must be non-empty")
|
|
370
|
+
if not filename:
|
|
371
|
+
raise ValueError("filename must be non-empty")
|
|
372
|
+
|
|
373
|
+
safe_name = Path(filename).name
|
|
374
|
+
session_dir = get_data_dir() / LEFT_RIGHT_DATASET_DIR / session_id
|
|
375
|
+
session_dir.mkdir(parents=True, exist_ok=True)
|
|
376
|
+
|
|
377
|
+
file_path = session_dir / safe_name
|
|
378
|
+
if file_path.exists() and not force:
|
|
379
|
+
return file_path
|
|
380
|
+
|
|
381
|
+
if not auto_download:
|
|
382
|
+
return None
|
|
383
|
+
|
|
384
|
+
file_url = f"{BASE_URL}{LEFT_RIGHT_DATASET_DIR}/{session_id}/{safe_name}"
|
|
385
|
+
if not download_file_with_progress(file_url, file_path):
|
|
386
|
+
print(f"Failed to download {safe_name} for session {session_id}")
|
|
387
|
+
return None
|
|
388
|
+
|
|
389
|
+
return file_path
|
|
390
|
+
|
|
391
|
+
|
|
239
392
|
def detect_file_type(filepath: Path) -> str:
|
|
240
393
|
"""Detect file type based on extension."""
|
|
241
394
|
suffix = filepath.suffix.lower()
|
canns/data/loaders.py
CHANGED
|
@@ -211,6 +211,43 @@ def load_grid_data(
|
|
|
211
211
|
return None
|
|
212
212
|
|
|
213
213
|
|
|
214
|
+
def load_left_right_npz(
|
|
215
|
+
session_id: str, filename: str, auto_download: bool = True, force: bool = False
|
|
216
|
+
) -> dict[str, Any] | None:
|
|
217
|
+
"""
|
|
218
|
+
Load a Left_Right_data_of NPZ file.
|
|
219
|
+
|
|
220
|
+
Parameters
|
|
221
|
+
----------
|
|
222
|
+
session_id : str
|
|
223
|
+
Session folder name, e.g. "26034_3".
|
|
224
|
+
filename : str
|
|
225
|
+
File name inside the session folder.
|
|
226
|
+
auto_download : bool
|
|
227
|
+
Whether to download the file if missing.
|
|
228
|
+
force : bool
|
|
229
|
+
Whether to force re-download of existing files.
|
|
230
|
+
|
|
231
|
+
Returns
|
|
232
|
+
-------
|
|
233
|
+
dict or None
|
|
234
|
+
Dictionary of npz arrays if successful, None otherwise.
|
|
235
|
+
"""
|
|
236
|
+
try:
|
|
237
|
+
path = _datasets.get_left_right_npz(
|
|
238
|
+
session_id=session_id,
|
|
239
|
+
filename=filename,
|
|
240
|
+
auto_download=auto_download,
|
|
241
|
+
force=force,
|
|
242
|
+
)
|
|
243
|
+
if path is None:
|
|
244
|
+
return None
|
|
245
|
+
return dict(np.load(path, allow_pickle=True))
|
|
246
|
+
except Exception as e:
|
|
247
|
+
print(f"Failed to load Left-Right npz {session_id}/{filename}: {e}")
|
|
248
|
+
return None
|
|
249
|
+
|
|
250
|
+
|
|
214
251
|
def validate_roi_data(data: np.ndarray) -> bool:
|
|
215
252
|
"""
|
|
216
253
|
Validate ROI data format for 1D CANN analysis.
|
canns/pipeline/__init__.py
CHANGED
|
@@ -1,17 +1,21 @@
|
|
|
1
|
-
"""
|
|
2
|
-
CANNs Pipeline Module
|
|
1
|
+
"""CANNs pipeline entrypoints."""
|
|
3
2
|
|
|
4
|
-
High-level pipelines for common analysis workflows, designed to make CANN models
|
|
5
|
-
accessible to experimental researchers without requiring detailed knowledge of
|
|
6
|
-
the underlying implementations.
|
|
7
|
-
"""
|
|
8
|
-
|
|
9
|
-
from ._base import Pipeline
|
|
10
3
|
from .asa import ASAApp
|
|
11
4
|
from .asa import main as asa_main
|
|
5
|
+
from .gallery import GalleryApp
|
|
6
|
+
from .gallery import main as gallery_main
|
|
7
|
+
from .launcher import main as launcher_main
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
from .asa_gui import ASAGuiApp
|
|
11
|
+
except Exception: # PySide6 may be missing
|
|
12
|
+
ASAGuiApp = None # type: ignore
|
|
12
13
|
|
|
13
14
|
__all__ = [
|
|
14
|
-
"Pipeline",
|
|
15
15
|
"ASAApp",
|
|
16
16
|
"asa_main",
|
|
17
|
+
"GalleryApp",
|
|
18
|
+
"gallery_main",
|
|
19
|
+
"launcher_main",
|
|
20
|
+
"ASAGuiApp",
|
|
17
21
|
]
|
canns/pipeline/asa/runner.py
CHANGED
|
@@ -496,6 +496,7 @@ class PipelineRunner:
|
|
|
496
496
|
do_shuffle=params.get("do_shuffle", False),
|
|
497
497
|
num_shuffles=params.get("num_shuffles", 1000),
|
|
498
498
|
progress_bar=False,
|
|
499
|
+
standardize=False,
|
|
499
500
|
)
|
|
500
501
|
|
|
501
502
|
log_callback("Computing persistent homology...")
|
|
@@ -682,9 +683,11 @@ class PipelineRunner:
|
|
|
682
683
|
) -> dict[str, Path]:
|
|
683
684
|
"""Run path comparison visualization."""
|
|
684
685
|
from canns.analyzer.data.asa import (
|
|
685
|
-
|
|
686
|
+
align_coords_to_position_1d,
|
|
687
|
+
align_coords_to_position_2d,
|
|
686
688
|
apply_angle_scale,
|
|
687
|
-
|
|
689
|
+
plot_path_compare_1d,
|
|
690
|
+
plot_path_compare_2d,
|
|
688
691
|
)
|
|
689
692
|
from canns.analyzer.data.asa.path import (
|
|
690
693
|
find_coords_matrix,
|
|
@@ -731,7 +734,7 @@ class PipelineRunner:
|
|
|
731
734
|
idx = max(0, dim - 1)
|
|
732
735
|
if idx >= coords_raw.shape[1]:
|
|
733
736
|
raise ProcessingError(f"dim out of range for coords shape {coords_raw.shape}")
|
|
734
|
-
|
|
737
|
+
coords1 = coords_raw[:, idx]
|
|
735
738
|
else:
|
|
736
739
|
idx1 = max(0, dim1 - 1)
|
|
737
740
|
idx2 = max(0, dim2 - 1)
|
|
@@ -753,15 +756,26 @@ class PipelineRunner:
|
|
|
753
756
|
times_box = None
|
|
754
757
|
|
|
755
758
|
log_callback("Aligning decoded coordinates to position...")
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
759
|
+
if dim_mode == "1d":
|
|
760
|
+
t_use, x_use, y_use, coords_use, _ = align_coords_to_position_1d(
|
|
761
|
+
t_full=t_full,
|
|
762
|
+
x_full=x_full,
|
|
763
|
+
y_full=y_full,
|
|
764
|
+
coords1=coords1,
|
|
765
|
+
use_box=use_box,
|
|
766
|
+
times_box=times_box,
|
|
767
|
+
interp_to_full=interp_full,
|
|
768
|
+
)
|
|
769
|
+
else:
|
|
770
|
+
t_use, x_use, y_use, coords_use, _ = align_coords_to_position_2d(
|
|
771
|
+
t_full=t_full,
|
|
772
|
+
x_full=x_full,
|
|
773
|
+
y_full=y_full,
|
|
774
|
+
coords2=coords2,
|
|
775
|
+
use_box=use_box,
|
|
776
|
+
times_box=times_box,
|
|
777
|
+
interp_to_full=interp_full,
|
|
778
|
+
)
|
|
765
779
|
scale = str(angle_scale) if str(angle_scale) in {"rad", "deg", "unit", "auto"} else "rad"
|
|
766
780
|
coords_use = apply_angle_scale(coords_use, scale)
|
|
767
781
|
|
|
@@ -809,8 +823,12 @@ class PipelineRunner:
|
|
|
809
823
|
return {"path_compare": out_path}
|
|
810
824
|
|
|
811
825
|
log_callback("Generating path comparison...")
|
|
812
|
-
|
|
813
|
-
|
|
826
|
+
if dim_mode == "1d":
|
|
827
|
+
config = PlotConfigs.path_compare_1d(show=False, save_path=str(out_path))
|
|
828
|
+
plot_path_compare_1d(x_use, y_use, coords_use, config=config)
|
|
829
|
+
else:
|
|
830
|
+
config = PlotConfigs.path_compare_2d(show=False, save_path=str(out_path))
|
|
831
|
+
plot_path_compare_2d(x_use, y_use, coords_use, config=config)
|
|
814
832
|
|
|
815
833
|
self._write_cache_meta(self._stage_cache_path(out_dir), {"hash": stage_hash})
|
|
816
834
|
return {"path_compare": out_path}
|
|
@@ -820,9 +838,12 @@ class PipelineRunner:
|
|
|
820
838
|
) -> dict[str, Path]:
|
|
821
839
|
"""Run cohomology space visualization."""
|
|
822
840
|
from canns.analyzer.data.asa import (
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
841
|
+
plot_cohospace_neuron_1d,
|
|
842
|
+
plot_cohospace_neuron_2d,
|
|
843
|
+
plot_cohospace_population_1d,
|
|
844
|
+
plot_cohospace_population_2d,
|
|
845
|
+
plot_cohospace_trajectory_1d,
|
|
846
|
+
plot_cohospace_trajectory_2d,
|
|
826
847
|
)
|
|
827
848
|
from canns.analyzer.data.asa.cohospace import (
|
|
828
849
|
plot_cohospace_neuron_skewed,
|
|
@@ -865,8 +886,7 @@ class PipelineRunner:
|
|
|
865
886
|
idx = max(0, dim - 1)
|
|
866
887
|
if idx >= arr.shape[1]:
|
|
867
888
|
raise ProcessingError(f"dim out of range for coords shape {arr.shape}")
|
|
868
|
-
|
|
869
|
-
return np.hstack([one, np.zeros_like(one)])
|
|
889
|
+
return arr[:, idx]
|
|
870
890
|
idx1 = max(0, dim1 - 1)
|
|
871
891
|
idx2 = max(0, dim2 - 1)
|
|
872
892
|
if idx1 >= arr.shape[1] or idx2 >= arr.shape[1]:
|
|
@@ -915,15 +935,29 @@ class PipelineRunner:
|
|
|
915
935
|
|
|
916
936
|
log_callback("Plotting cohomology space trajectory...")
|
|
917
937
|
traj_path = out_dir / "cohospace_trajectory.png"
|
|
918
|
-
|
|
919
|
-
|
|
938
|
+
if dim_mode == "1d":
|
|
939
|
+
traj_cfg = PlotConfigs.cohospace_trajectory_1d(show=False, save_path=str(traj_path))
|
|
940
|
+
plot_cohospace_trajectory_1d(
|
|
941
|
+
coords=coords2,
|
|
942
|
+
times=None,
|
|
943
|
+
subsample=subsample,
|
|
944
|
+
config=traj_cfg,
|
|
945
|
+
)
|
|
946
|
+
else:
|
|
947
|
+
traj_cfg = PlotConfigs.cohospace_trajectory_2d(show=False, save_path=str(traj_path))
|
|
948
|
+
plot_cohospace_trajectory_2d(
|
|
949
|
+
coords=coords2,
|
|
950
|
+
times=None,
|
|
951
|
+
subsample=subsample,
|
|
952
|
+
config=traj_cfg,
|
|
953
|
+
)
|
|
920
954
|
artifacts["trajectory"] = traj_path
|
|
921
955
|
|
|
922
956
|
neuron_id = params.get("neuron_id", None)
|
|
923
957
|
if neuron_id is not None and view in {"both", "single"}:
|
|
924
958
|
log_callback(f"Plotting neuron {neuron_id}...")
|
|
925
959
|
neuron_path = out_dir / f"cohospace_neuron_{neuron_id}.png"
|
|
926
|
-
if unfold == "skew":
|
|
960
|
+
if unfold == "skew" and dim_mode != "1d":
|
|
927
961
|
plot_cohospace_neuron_skewed(
|
|
928
962
|
coords=coordsbox2,
|
|
929
963
|
activity=activity,
|
|
@@ -936,22 +970,37 @@ class PipelineRunner:
|
|
|
936
970
|
n_tiles=skew_tiles,
|
|
937
971
|
)
|
|
938
972
|
else:
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
973
|
+
if dim_mode == "1d":
|
|
974
|
+
neuron_cfg = PlotConfigs.cohospace_neuron_1d(
|
|
975
|
+
show=False, save_path=str(neuron_path)
|
|
976
|
+
)
|
|
977
|
+
plot_cohospace_neuron_1d(
|
|
978
|
+
coords=coordsbox2,
|
|
979
|
+
activity=activity,
|
|
980
|
+
neuron_id=int(neuron_id),
|
|
981
|
+
mode=mode,
|
|
982
|
+
top_percent=top_percent,
|
|
983
|
+
config=neuron_cfg,
|
|
984
|
+
)
|
|
985
|
+
else:
|
|
986
|
+
neuron_cfg = PlotConfigs.cohospace_neuron_2d(
|
|
987
|
+
show=False, save_path=str(neuron_path)
|
|
988
|
+
)
|
|
989
|
+
plot_cohospace_neuron_2d(
|
|
990
|
+
coords=coordsbox2,
|
|
991
|
+
activity=activity,
|
|
992
|
+
neuron_id=int(neuron_id),
|
|
993
|
+
mode=mode,
|
|
994
|
+
top_percent=top_percent,
|
|
995
|
+
config=neuron_cfg,
|
|
996
|
+
)
|
|
948
997
|
artifacts["neuron"] = neuron_path
|
|
949
998
|
|
|
950
999
|
if view in {"both", "population"}:
|
|
951
1000
|
log_callback("Plotting population activity...")
|
|
952
1001
|
pop_path = out_dir / "cohospace_population.png"
|
|
953
1002
|
neuron_ids = list(range(activity.shape[1]))
|
|
954
|
-
if unfold == "skew":
|
|
1003
|
+
if unfold == "skew" and dim_mode != "1d":
|
|
955
1004
|
plot_cohospace_population_skewed(
|
|
956
1005
|
coords=coords2,
|
|
957
1006
|
activity=activity,
|
|
@@ -964,15 +1013,30 @@ class PipelineRunner:
|
|
|
964
1013
|
n_tiles=skew_tiles,
|
|
965
1014
|
)
|
|
966
1015
|
else:
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
1016
|
+
if dim_mode == "1d":
|
|
1017
|
+
pop_cfg = PlotConfigs.cohospace_population_1d(
|
|
1018
|
+
show=False, save_path=str(pop_path)
|
|
1019
|
+
)
|
|
1020
|
+
plot_cohospace_population_1d(
|
|
1021
|
+
coords=coords2,
|
|
1022
|
+
activity=activity,
|
|
1023
|
+
neuron_ids=neuron_ids,
|
|
1024
|
+
mode=mode,
|
|
1025
|
+
top_percent=top_percent,
|
|
1026
|
+
config=pop_cfg,
|
|
1027
|
+
)
|
|
1028
|
+
else:
|
|
1029
|
+
pop_cfg = PlotConfigs.cohospace_population_2d(
|
|
1030
|
+
show=False, save_path=str(pop_path)
|
|
1031
|
+
)
|
|
1032
|
+
plot_cohospace_population_2d(
|
|
1033
|
+
coords=coords2,
|
|
1034
|
+
activity=activity,
|
|
1035
|
+
neuron_ids=neuron_ids,
|
|
1036
|
+
mode=mode,
|
|
1037
|
+
top_percent=top_percent,
|
|
1038
|
+
config=pop_cfg,
|
|
1039
|
+
)
|
|
976
1040
|
artifacts["population"] = pop_path
|
|
977
1041
|
|
|
978
1042
|
self._write_cache_meta(meta_path, {"hash": stage_hash})
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"""ASA GUI - PySide6-based graphical interface for Attractor Structure Analyzer."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import sys
|
|
6
|
+
import os
|
|
7
|
+
import importlib.util
|
|
8
|
+
|
|
9
|
+
__all__ = ["main", "ASAGuiApp"]
|
|
10
|
+
|
|
11
|
+
_pyside6_missing = importlib.util.find_spec("PySide6") is None
|
|
12
|
+
if _pyside6_missing:
|
|
13
|
+
if "pytest" in sys.modules or "PYTEST_CURRENT_TEST" in os.environ:
|
|
14
|
+
try: # pragma: no cover - only used in CI/test runs
|
|
15
|
+
import pytest
|
|
16
|
+
|
|
17
|
+
pytest.skip("PySide6 is not installed; skipping asa_gui module.", allow_module_level=True)
|
|
18
|
+
except Exception:
|
|
19
|
+
pass
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def main() -> int:
|
|
23
|
+
"""Entry point for canns-gui command."""
|
|
24
|
+
try:
|
|
25
|
+
from PySide6.QtWidgets import QApplication
|
|
26
|
+
except ImportError as e:
|
|
27
|
+
print(
|
|
28
|
+
"PySide6 is not installed. Please install with: pip install canns[gui]",
|
|
29
|
+
file=sys.stderr,
|
|
30
|
+
)
|
|
31
|
+
raise SystemExit(1) from e
|
|
32
|
+
|
|
33
|
+
from pathlib import Path
|
|
34
|
+
|
|
35
|
+
from PySide6.QtGui import QGuiApplication, QIcon
|
|
36
|
+
|
|
37
|
+
from .app import ASAGuiApp
|
|
38
|
+
|
|
39
|
+
app = QApplication(sys.argv)
|
|
40
|
+
app.setOrganizationName("canns")
|
|
41
|
+
app.setApplicationName("ASA GUI")
|
|
42
|
+
app.setApplicationDisplayName("ASA GUI")
|
|
43
|
+
QGuiApplication.setApplicationDisplayName("ASA GUI")
|
|
44
|
+
|
|
45
|
+
base = Path(__file__).resolve().parents[4] / "images"
|
|
46
|
+
logo_path = base / "logo_256.png"
|
|
47
|
+
if not logo_path.exists():
|
|
48
|
+
logo_path = base / "logo.svg"
|
|
49
|
+
if not logo_path.exists():
|
|
50
|
+
logo_path = base / "logo.ico"
|
|
51
|
+
icon = QIcon(str(logo_path)) if logo_path.exists() else QIcon()
|
|
52
|
+
if not icon.isNull():
|
|
53
|
+
app.setWindowIcon(icon)
|
|
54
|
+
|
|
55
|
+
window = ASAGuiApp()
|
|
56
|
+
if not icon.isNull():
|
|
57
|
+
window.setWindowIcon(icon)
|
|
58
|
+
window.show()
|
|
59
|
+
return app.exec()
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
# Lazy import for ASAGuiApp
|
|
63
|
+
def __getattr__(name: str):
|
|
64
|
+
if name == "ASAGuiApp":
|
|
65
|
+
from .app import ASAGuiApp
|
|
66
|
+
|
|
67
|
+
return ASAGuiApp
|
|
68
|
+
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"""Analysis modes for ASA GUI."""
|
|
2
|
+
|
|
3
|
+
from .base import AbstractAnalysisMode
|
|
4
|
+
from .batch_mode import BatchMode
|
|
5
|
+
from .cohomap_mode import CohoMapMode
|
|
6
|
+
from .cohospace_mode import CohoSpaceMode
|
|
7
|
+
from .decode_mode import DecodeMode
|
|
8
|
+
from .fr_mode import FRMode
|
|
9
|
+
from .frm_mode import FRMMode
|
|
10
|
+
from .gridscore_mode import GridScoreInspectMode, GridScoreMode
|
|
11
|
+
from .pathcompare_mode import PathCompareMode
|
|
12
|
+
from .tda_mode import TDAMode
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"AbstractAnalysisMode",
|
|
16
|
+
"BatchMode",
|
|
17
|
+
"CohoMapMode",
|
|
18
|
+
"CohoSpaceMode",
|
|
19
|
+
"DecodeMode",
|
|
20
|
+
"FRMode",
|
|
21
|
+
"FRMMode",
|
|
22
|
+
"GridScoreMode",
|
|
23
|
+
"GridScoreInspectMode",
|
|
24
|
+
"PathCompareMode",
|
|
25
|
+
"TDAMode",
|
|
26
|
+
"get_analysis_modes",
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def get_analysis_modes() -> list[AbstractAnalysisMode]:
|
|
31
|
+
return [
|
|
32
|
+
TDAMode(),
|
|
33
|
+
DecodeMode(),
|
|
34
|
+
CohoMapMode(),
|
|
35
|
+
PathCompareMode(),
|
|
36
|
+
CohoSpaceMode(),
|
|
37
|
+
FRMode(),
|
|
38
|
+
FRMMode(),
|
|
39
|
+
GridScoreMode(),
|
|
40
|
+
GridScoreInspectMode(),
|
|
41
|
+
BatchMode(),
|
|
42
|
+
]
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""Base classes for analysis modes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from abc import ABC, abstractmethod
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from PySide6.QtCore import Qt
|
|
9
|
+
from PySide6.QtWidgets import QFormLayout, QWidget
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class AbstractAnalysisMode(ABC):
|
|
13
|
+
name: str
|
|
14
|
+
display_name: str
|
|
15
|
+
|
|
16
|
+
@abstractmethod
|
|
17
|
+
def create_params_widget(self) -> QWidget:
|
|
18
|
+
"""Create and return the parameter editor widget."""
|
|
19
|
+
|
|
20
|
+
@abstractmethod
|
|
21
|
+
def collect_params(self) -> dict[str, Any]:
|
|
22
|
+
"""Collect parameters from the widget into a dict."""
|
|
23
|
+
|
|
24
|
+
def apply_preset(self, preset: str) -> None:
|
|
25
|
+
"""Apply preset hints (grid/hd) to parameters."""
|
|
26
|
+
return None
|
|
27
|
+
|
|
28
|
+
def apply_ranges(self, neuron_count: int | None, total_steps: int | None) -> None:
|
|
29
|
+
"""Apply neuron/time ranges based on loaded data."""
|
|
30
|
+
return None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def configure_form_layout(form: QFormLayout) -> None:
|
|
34
|
+
"""Apply consistent spacing/alignment for analysis parameter forms."""
|
|
35
|
+
form.setLabelAlignment(Qt.AlignRight | Qt.AlignVCenter)
|
|
36
|
+
form.setFormAlignment(Qt.AlignTop | Qt.AlignLeft)
|
|
37
|
+
form.setHorizontalSpacing(12)
|
|
38
|
+
form.setVerticalSpacing(6)
|
|
39
|
+
form.setFieldGrowthPolicy(QFormLayout.AllNonFixedFieldsGrow)
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"""Batch analysis mode placeholder."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from PySide6.QtWidgets import QLabel, QVBoxLayout, QWidget
|
|
6
|
+
|
|
7
|
+
from .base import AbstractAnalysisMode
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class BatchMode(AbstractAnalysisMode):
|
|
11
|
+
name = "batch"
|
|
12
|
+
display_name = "Batch"
|
|
13
|
+
|
|
14
|
+
def create_params_widget(self) -> QWidget:
|
|
15
|
+
widget = QWidget()
|
|
16
|
+
layout = QVBoxLayout(widget)
|
|
17
|
+
layout.addWidget(QLabel("Batch mode parameters will be added here."))
|
|
18
|
+
return widget
|
|
19
|
+
|
|
20
|
+
def collect_params(self) -> dict:
|
|
21
|
+
return {}
|