spec-kitty-cli 0.12.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- spec_kitty_cli-0.12.1.dist-info/METADATA +1767 -0
- spec_kitty_cli-0.12.1.dist-info/RECORD +242 -0
- spec_kitty_cli-0.12.1.dist-info/WHEEL +4 -0
- spec_kitty_cli-0.12.1.dist-info/entry_points.txt +2 -0
- spec_kitty_cli-0.12.1.dist-info/licenses/LICENSE +21 -0
- specify_cli/__init__.py +171 -0
- specify_cli/acceptance.py +627 -0
- specify_cli/agent_utils/README.md +157 -0
- specify_cli/agent_utils/__init__.py +9 -0
- specify_cli/agent_utils/status.py +356 -0
- specify_cli/cli/__init__.py +6 -0
- specify_cli/cli/commands/__init__.py +46 -0
- specify_cli/cli/commands/accept.py +189 -0
- specify_cli/cli/commands/agent/__init__.py +22 -0
- specify_cli/cli/commands/agent/config.py +382 -0
- specify_cli/cli/commands/agent/context.py +191 -0
- specify_cli/cli/commands/agent/feature.py +1057 -0
- specify_cli/cli/commands/agent/release.py +11 -0
- specify_cli/cli/commands/agent/tasks.py +1253 -0
- specify_cli/cli/commands/agent/workflow.py +801 -0
- specify_cli/cli/commands/context.py +246 -0
- specify_cli/cli/commands/dashboard.py +85 -0
- specify_cli/cli/commands/implement.py +973 -0
- specify_cli/cli/commands/init.py +827 -0
- specify_cli/cli/commands/init_help.py +62 -0
- specify_cli/cli/commands/merge.py +755 -0
- specify_cli/cli/commands/mission.py +240 -0
- specify_cli/cli/commands/ops.py +265 -0
- specify_cli/cli/commands/orchestrate.py +640 -0
- specify_cli/cli/commands/repair.py +175 -0
- specify_cli/cli/commands/research.py +165 -0
- specify_cli/cli/commands/sync.py +364 -0
- specify_cli/cli/commands/upgrade.py +249 -0
- specify_cli/cli/commands/validate_encoding.py +186 -0
- specify_cli/cli/commands/validate_tasks.py +186 -0
- specify_cli/cli/commands/verify.py +310 -0
- specify_cli/cli/helpers.py +123 -0
- specify_cli/cli/step_tracker.py +91 -0
- specify_cli/cli/ui.py +192 -0
- specify_cli/core/__init__.py +53 -0
- specify_cli/core/agent_context.py +311 -0
- specify_cli/core/config.py +96 -0
- specify_cli/core/context_validation.py +362 -0
- specify_cli/core/dependency_graph.py +351 -0
- specify_cli/core/git_ops.py +129 -0
- specify_cli/core/multi_parent_merge.py +323 -0
- specify_cli/core/paths.py +260 -0
- specify_cli/core/project_resolver.py +110 -0
- specify_cli/core/stale_detection.py +263 -0
- specify_cli/core/tool_checker.py +79 -0
- specify_cli/core/utils.py +43 -0
- specify_cli/core/vcs/__init__.py +114 -0
- specify_cli/core/vcs/detection.py +341 -0
- specify_cli/core/vcs/exceptions.py +85 -0
- specify_cli/core/vcs/git.py +1304 -0
- specify_cli/core/vcs/jujutsu.py +1208 -0
- specify_cli/core/vcs/protocol.py +285 -0
- specify_cli/core/vcs/types.py +249 -0
- specify_cli/core/version_checker.py +261 -0
- specify_cli/core/worktree.py +506 -0
- specify_cli/dashboard/__init__.py +28 -0
- specify_cli/dashboard/diagnostics.py +204 -0
- specify_cli/dashboard/handlers/__init__.py +17 -0
- specify_cli/dashboard/handlers/api.py +143 -0
- specify_cli/dashboard/handlers/base.py +65 -0
- specify_cli/dashboard/handlers/features.py +390 -0
- specify_cli/dashboard/handlers/router.py +81 -0
- specify_cli/dashboard/handlers/static.py +50 -0
- specify_cli/dashboard/lifecycle.py +541 -0
- specify_cli/dashboard/scanner.py +437 -0
- specify_cli/dashboard/server.py +123 -0
- specify_cli/dashboard/static/dashboard/dashboard.css +722 -0
- specify_cli/dashboard/static/dashboard/dashboard.js +1424 -0
- specify_cli/dashboard/static/spec-kitty.png +0 -0
- specify_cli/dashboard/templates/__init__.py +36 -0
- specify_cli/dashboard/templates/index.html +258 -0
- specify_cli/doc_generators.py +621 -0
- specify_cli/doc_state.py +408 -0
- specify_cli/frontmatter.py +384 -0
- specify_cli/gap_analysis.py +915 -0
- specify_cli/gitignore_manager.py +300 -0
- specify_cli/guards.py +145 -0
- specify_cli/legacy_detector.py +83 -0
- specify_cli/manifest.py +286 -0
- specify_cli/merge/__init__.py +63 -0
- specify_cli/merge/executor.py +653 -0
- specify_cli/merge/forecast.py +215 -0
- specify_cli/merge/ordering.py +126 -0
- specify_cli/merge/preflight.py +230 -0
- specify_cli/merge/state.py +185 -0
- specify_cli/merge/status_resolver.py +354 -0
- specify_cli/mission.py +654 -0
- specify_cli/missions/documentation/command-templates/implement.md +309 -0
- specify_cli/missions/documentation/command-templates/plan.md +275 -0
- specify_cli/missions/documentation/command-templates/review.md +344 -0
- specify_cli/missions/documentation/command-templates/specify.md +206 -0
- specify_cli/missions/documentation/command-templates/tasks.md +189 -0
- specify_cli/missions/documentation/mission.yaml +113 -0
- specify_cli/missions/documentation/templates/divio/explanation-template.md +192 -0
- specify_cli/missions/documentation/templates/divio/howto-template.md +168 -0
- specify_cli/missions/documentation/templates/divio/reference-template.md +179 -0
- specify_cli/missions/documentation/templates/divio/tutorial-template.md +146 -0
- specify_cli/missions/documentation/templates/generators/jsdoc.json.template +18 -0
- specify_cli/missions/documentation/templates/generators/sphinx-conf.py.template +36 -0
- specify_cli/missions/documentation/templates/plan-template.md +269 -0
- specify_cli/missions/documentation/templates/release-template.md +222 -0
- specify_cli/missions/documentation/templates/spec-template.md +172 -0
- specify_cli/missions/documentation/templates/task-prompt-template.md +140 -0
- specify_cli/missions/documentation/templates/tasks-template.md +159 -0
- specify_cli/missions/research/command-templates/merge.md +388 -0
- specify_cli/missions/research/command-templates/plan.md +125 -0
- specify_cli/missions/research/command-templates/review.md +144 -0
- specify_cli/missions/research/command-templates/tasks.md +225 -0
- specify_cli/missions/research/mission.yaml +115 -0
- specify_cli/missions/research/templates/data-model-template.md +33 -0
- specify_cli/missions/research/templates/plan-template.md +161 -0
- specify_cli/missions/research/templates/research/evidence-log.csv +18 -0
- specify_cli/missions/research/templates/research/source-register.csv +18 -0
- specify_cli/missions/research/templates/research-template.md +35 -0
- specify_cli/missions/research/templates/spec-template.md +64 -0
- specify_cli/missions/research/templates/task-prompt-template.md +148 -0
- specify_cli/missions/research/templates/tasks-template.md +114 -0
- specify_cli/missions/software-dev/command-templates/accept.md +75 -0
- specify_cli/missions/software-dev/command-templates/analyze.md +183 -0
- specify_cli/missions/software-dev/command-templates/checklist.md +286 -0
- specify_cli/missions/software-dev/command-templates/clarify.md +157 -0
- specify_cli/missions/software-dev/command-templates/constitution.md +432 -0
- specify_cli/missions/software-dev/command-templates/dashboard.md +101 -0
- specify_cli/missions/software-dev/command-templates/implement.md +41 -0
- specify_cli/missions/software-dev/command-templates/merge.md +383 -0
- specify_cli/missions/software-dev/command-templates/plan.md +171 -0
- specify_cli/missions/software-dev/command-templates/review.md +32 -0
- specify_cli/missions/software-dev/command-templates/specify.md +321 -0
- specify_cli/missions/software-dev/command-templates/tasks.md +566 -0
- specify_cli/missions/software-dev/mission.yaml +100 -0
- specify_cli/missions/software-dev/templates/plan-template.md +132 -0
- specify_cli/missions/software-dev/templates/spec-template.md +116 -0
- specify_cli/missions/software-dev/templates/task-prompt-template.md +140 -0
- specify_cli/missions/software-dev/templates/tasks-template.md +159 -0
- specify_cli/orchestrator/__init__.py +75 -0
- specify_cli/orchestrator/agent_config.py +224 -0
- specify_cli/orchestrator/agents/__init__.py +170 -0
- specify_cli/orchestrator/agents/augment.py +112 -0
- specify_cli/orchestrator/agents/base.py +243 -0
- specify_cli/orchestrator/agents/claude.py +112 -0
- specify_cli/orchestrator/agents/codex.py +106 -0
- specify_cli/orchestrator/agents/copilot.py +137 -0
- specify_cli/orchestrator/agents/cursor.py +139 -0
- specify_cli/orchestrator/agents/gemini.py +115 -0
- specify_cli/orchestrator/agents/kilocode.py +94 -0
- specify_cli/orchestrator/agents/opencode.py +132 -0
- specify_cli/orchestrator/agents/qwen.py +96 -0
- specify_cli/orchestrator/config.py +455 -0
- specify_cli/orchestrator/executor.py +642 -0
- specify_cli/orchestrator/integration.py +1230 -0
- specify_cli/orchestrator/monitor.py +898 -0
- specify_cli/orchestrator/scheduler.py +832 -0
- specify_cli/orchestrator/state.py +508 -0
- specify_cli/orchestrator/testing/__init__.py +122 -0
- specify_cli/orchestrator/testing/availability.py +346 -0
- specify_cli/orchestrator/testing/fixtures.py +684 -0
- specify_cli/orchestrator/testing/paths.py +218 -0
- specify_cli/plan_validation.py +107 -0
- specify_cli/scripts/debug-dashboard-scan.py +61 -0
- specify_cli/scripts/tasks/acceptance_support.py +695 -0
- specify_cli/scripts/tasks/task_helpers.py +506 -0
- specify_cli/scripts/tasks/tasks_cli.py +848 -0
- specify_cli/scripts/validate_encoding.py +180 -0
- specify_cli/task_metadata_validation.py +274 -0
- specify_cli/tasks_support.py +447 -0
- specify_cli/template/__init__.py +47 -0
- specify_cli/template/asset_generator.py +206 -0
- specify_cli/template/github_client.py +334 -0
- specify_cli/template/manager.py +193 -0
- specify_cli/template/renderer.py +99 -0
- specify_cli/templates/AGENTS.md +190 -0
- specify_cli/templates/POWERSHELL_SYNTAX.md +229 -0
- specify_cli/templates/agent-file-template.md +35 -0
- specify_cli/templates/checklist-template.md +42 -0
- specify_cli/templates/claudeignore-template +58 -0
- specify_cli/templates/command-templates/accept.md +141 -0
- specify_cli/templates/command-templates/analyze.md +253 -0
- specify_cli/templates/command-templates/checklist.md +352 -0
- specify_cli/templates/command-templates/clarify.md +224 -0
- specify_cli/templates/command-templates/constitution.md +432 -0
- specify_cli/templates/command-templates/dashboard.md +175 -0
- specify_cli/templates/command-templates/implement.md +190 -0
- specify_cli/templates/command-templates/merge.md +374 -0
- specify_cli/templates/command-templates/plan.md +171 -0
- specify_cli/templates/command-templates/research.md +88 -0
- specify_cli/templates/command-templates/review.md +510 -0
- specify_cli/templates/command-templates/specify.md +321 -0
- specify_cli/templates/command-templates/status.md +92 -0
- specify_cli/templates/command-templates/tasks.md +199 -0
- specify_cli/templates/git-hooks/pre-commit +22 -0
- specify_cli/templates/git-hooks/pre-commit-agent-check +37 -0
- specify_cli/templates/git-hooks/pre-commit-encoding-check +142 -0
- specify_cli/templates/plan-template.md +108 -0
- specify_cli/templates/spec-template.md +118 -0
- specify_cli/templates/task-prompt-template.md +165 -0
- specify_cli/templates/tasks-template.md +161 -0
- specify_cli/templates/vscode-settings.json +13 -0
- specify_cli/text_sanitization.py +225 -0
- specify_cli/upgrade/__init__.py +18 -0
- specify_cli/upgrade/detector.py +239 -0
- specify_cli/upgrade/metadata.py +182 -0
- specify_cli/upgrade/migrations/__init__.py +65 -0
- specify_cli/upgrade/migrations/base.py +80 -0
- specify_cli/upgrade/migrations/m_0_10_0_python_only.py +359 -0
- specify_cli/upgrade/migrations/m_0_10_12_constitution_cleanup.py +99 -0
- specify_cli/upgrade/migrations/m_0_10_14_update_implement_slash_command.py +176 -0
- specify_cli/upgrade/migrations/m_0_10_1_populate_slash_commands.py +174 -0
- specify_cli/upgrade/migrations/m_0_10_2_update_slash_commands.py +172 -0
- specify_cli/upgrade/migrations/m_0_10_6_workflow_simplification.py +174 -0
- specify_cli/upgrade/migrations/m_0_10_8_fix_memory_structure.py +252 -0
- specify_cli/upgrade/migrations/m_0_10_9_repair_templates.py +168 -0
- specify_cli/upgrade/migrations/m_0_11_0_workspace_per_wp.py +182 -0
- specify_cli/upgrade/migrations/m_0_11_1_improved_workflow_templates.py +173 -0
- specify_cli/upgrade/migrations/m_0_11_1_update_implement_slash_command.py +160 -0
- specify_cli/upgrade/migrations/m_0_11_2_improved_workflow_templates.py +173 -0
- specify_cli/upgrade/migrations/m_0_11_3_workflow_agent_flag.py +114 -0
- specify_cli/upgrade/migrations/m_0_12_0_documentation_mission.py +155 -0
- specify_cli/upgrade/migrations/m_0_12_1_remove_kitty_specs_from_gitignore.py +183 -0
- specify_cli/upgrade/migrations/m_0_2_0_specify_to_kittify.py +80 -0
- specify_cli/upgrade/migrations/m_0_4_8_gitignore_agents.py +118 -0
- specify_cli/upgrade/migrations/m_0_5_0_encoding_hooks.py +141 -0
- specify_cli/upgrade/migrations/m_0_6_5_commands_rename.py +169 -0
- specify_cli/upgrade/migrations/m_0_6_7_ensure_missions.py +228 -0
- specify_cli/upgrade/migrations/m_0_7_2_worktree_commands_dedup.py +89 -0
- specify_cli/upgrade/migrations/m_0_7_3_update_scripts.py +114 -0
- specify_cli/upgrade/migrations/m_0_8_0_remove_active_mission.py +82 -0
- specify_cli/upgrade/migrations/m_0_8_0_worktree_agents_symlink.py +148 -0
- specify_cli/upgrade/migrations/m_0_9_0_frontmatter_only_lanes.py +346 -0
- specify_cli/upgrade/migrations/m_0_9_1_complete_lane_migration.py +656 -0
- specify_cli/upgrade/migrations/m_0_9_2_research_mission_templates.py +221 -0
- specify_cli/upgrade/registry.py +121 -0
- specify_cli/upgrade/runner.py +284 -0
- specify_cli/validators/__init__.py +14 -0
- specify_cli/validators/paths.py +154 -0
- specify_cli/validators/research.py +428 -0
- specify_cli/verify_enhanced.py +270 -0
- specify_cli/workspace_context.py +224 -0
|
@@ -0,0 +1,437 @@
|
|
|
1
|
+
"""Feature scanning helpers for the Spec Kitty dashboard."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import os
|
|
8
|
+
import re
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any, Dict, List, Optional
|
|
11
|
+
|
|
12
|
+
from specify_cli.legacy_detector import is_legacy_format
|
|
13
|
+
from specify_cli.template import parse_frontmatter
|
|
14
|
+
from specify_cli.text_sanitization import sanitize_file
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
__all__ = [
|
|
19
|
+
"format_path_for_display",
|
|
20
|
+
"gather_feature_paths",
|
|
21
|
+
"get_feature_artifacts",
|
|
22
|
+
"get_workflow_status",
|
|
23
|
+
"read_file_resilient",
|
|
24
|
+
"resolve_feature_dir",
|
|
25
|
+
"scan_all_features",
|
|
26
|
+
"scan_feature_kanban",
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def read_file_resilient(
|
|
31
|
+
file_path: Path, *, auto_fix: bool = True
|
|
32
|
+
) -> tuple[Optional[str], Optional[str]]:
|
|
33
|
+
"""Read a file with resilience to encoding errors.
|
|
34
|
+
|
|
35
|
+
This function attempts to read a file as UTF-8, and if that fails:
|
|
36
|
+
1. Tries alternative encodings (cp1252, latin-1)
|
|
37
|
+
2. Optionally auto-fixes the file by sanitizing and re-saving as UTF-8
|
|
38
|
+
3. Returns clear error messages for the dashboard to display
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
file_path: Path to the file to read
|
|
42
|
+
auto_fix: If True, automatically sanitize and fix encoding errors
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Tuple of (content, error_message)
|
|
46
|
+
- content: File content if successful, None if failed
|
|
47
|
+
- error_message: None if successful, error description if failed
|
|
48
|
+
|
|
49
|
+
Examples:
|
|
50
|
+
>>> from pathlib import Path
|
|
51
|
+
>>> content, error = read_file_resilient(Path("good-file.md"))
|
|
52
|
+
>>> content is not None
|
|
53
|
+
True
|
|
54
|
+
>>> error is None
|
|
55
|
+
True
|
|
56
|
+
"""
|
|
57
|
+
if not file_path.exists():
|
|
58
|
+
return None, f"File not found: {file_path.name}"
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
# Try strict UTF-8 first
|
|
62
|
+
content = file_path.read_text(encoding="utf-8-sig")
|
|
63
|
+
return content, None
|
|
64
|
+
except UnicodeDecodeError as exc:
|
|
65
|
+
# Log the encoding error
|
|
66
|
+
logger.warning(
|
|
67
|
+
f"UTF-8 decoding failed for {file_path.name} at byte {exc.start}: {exc.reason}"
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
if not auto_fix:
|
|
71
|
+
return None, (
|
|
72
|
+
f"Encoding error in {file_path.name} at byte {exc.start}. "
|
|
73
|
+
f"File contains non-UTF-8 characters (possibly Windows-1252 smart quotes). "
|
|
74
|
+
f"Run 'spec-kitty validate-encoding --fix' to repair."
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
# Attempt auto-fix
|
|
78
|
+
try:
|
|
79
|
+
logger.info(f"Attempting to auto-fix encoding for {file_path.name}")
|
|
80
|
+
was_modified, error = sanitize_file(file_path, backup=True, dry_run=False)
|
|
81
|
+
|
|
82
|
+
if error:
|
|
83
|
+
return None, error
|
|
84
|
+
|
|
85
|
+
if was_modified:
|
|
86
|
+
# Read the fixed file
|
|
87
|
+
content = file_path.read_text(encoding="utf-8-sig")
|
|
88
|
+
logger.info(f"Successfully fixed encoding for {file_path.name}")
|
|
89
|
+
return content, None
|
|
90
|
+
else:
|
|
91
|
+
# Shouldn't happen, but handle it
|
|
92
|
+
return None, f"Auto-fix failed for {file_path.name}: no changes made"
|
|
93
|
+
|
|
94
|
+
except Exception as fix_exc:
|
|
95
|
+
logger.error(f"Auto-fix failed for {file_path.name}: {fix_exc}")
|
|
96
|
+
return None, (
|
|
97
|
+
f"Encoding error in {file_path.name} and auto-fix failed: {fix_exc}. "
|
|
98
|
+
f"Manually repair the file or run 'spec-kitty validate-encoding --fix'."
|
|
99
|
+
)
|
|
100
|
+
except Exception as exc:
|
|
101
|
+
logger.error(f"Unexpected error reading {file_path.name}: {exc}")
|
|
102
|
+
return None, f"Error reading {file_path.name}: {exc}"
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def format_path_for_display(path_str: Optional[str]) -> Optional[str]:
|
|
106
|
+
"""Return a human-readable path that shortens the user's home directory."""
|
|
107
|
+
if not path_str:
|
|
108
|
+
return path_str
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
path = Path(path_str).expanduser()
|
|
112
|
+
except (TypeError, ValueError):
|
|
113
|
+
return path_str
|
|
114
|
+
|
|
115
|
+
try:
|
|
116
|
+
resolved = path.resolve()
|
|
117
|
+
except Exception:
|
|
118
|
+
resolved = path
|
|
119
|
+
|
|
120
|
+
try:
|
|
121
|
+
home = Path.home().resolve()
|
|
122
|
+
except Exception:
|
|
123
|
+
home = Path.home()
|
|
124
|
+
|
|
125
|
+
try:
|
|
126
|
+
relative = resolved.relative_to(home)
|
|
127
|
+
except ValueError:
|
|
128
|
+
return str(resolved)
|
|
129
|
+
|
|
130
|
+
relative_str = str(relative)
|
|
131
|
+
if relative_str in {"", "."}:
|
|
132
|
+
return "~"
|
|
133
|
+
return f"~{os.sep}{relative_str}"
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def work_package_sort_key(task: Dict[str, Any]) -> tuple:
|
|
137
|
+
"""Provide a natural sort key for work package identifiers."""
|
|
138
|
+
work_id = str(task.get("id", "")).strip()
|
|
139
|
+
if not work_id:
|
|
140
|
+
return ((), "")
|
|
141
|
+
|
|
142
|
+
number_parts = [int(part.lstrip("0") or "0") for part in re.findall(r"\d+", work_id)]
|
|
143
|
+
return (tuple(number_parts), work_id.lower())
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _get_artifact_info(path: Path) -> Dict[str, any]:
|
|
147
|
+
"""Get artifact information including existence, mtime, and size."""
|
|
148
|
+
if not path.exists():
|
|
149
|
+
return {"exists": False, "mtime": None, "size": None}
|
|
150
|
+
|
|
151
|
+
stat = path.stat()
|
|
152
|
+
return {
|
|
153
|
+
"exists": True,
|
|
154
|
+
"mtime": stat.st_mtime,
|
|
155
|
+
"size": stat.st_size if path.is_file() else None,
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def get_feature_artifacts(feature_dir: Path) -> Dict[str, Dict[str, any]]:
|
|
160
|
+
"""Return which artifacts exist for a feature with modification info."""
|
|
161
|
+
return {
|
|
162
|
+
"constitution": _get_artifact_info(feature_dir / "constitution.md"),
|
|
163
|
+
"spec": _get_artifact_info(feature_dir / "spec.md"),
|
|
164
|
+
"plan": _get_artifact_info(feature_dir / "plan.md"),
|
|
165
|
+
"tasks": _get_artifact_info(feature_dir / "tasks.md"),
|
|
166
|
+
"research": _get_artifact_info(feature_dir / "research.md"),
|
|
167
|
+
"quickstart": _get_artifact_info(feature_dir / "quickstart.md"),
|
|
168
|
+
"data_model": _get_artifact_info(feature_dir / "data-model.md"),
|
|
169
|
+
"contracts": _get_artifact_info(feature_dir / "contracts"),
|
|
170
|
+
"checklists": _get_artifact_info(feature_dir / "checklists"),
|
|
171
|
+
"kanban": _get_artifact_info(feature_dir / "tasks"),
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def get_workflow_status(artifacts: Dict[str, Dict[str, any]]) -> Dict[str, str]:
|
|
176
|
+
"""Determine workflow progression status."""
|
|
177
|
+
has_spec = artifacts.get("spec", {}).get("exists", False)
|
|
178
|
+
has_plan = artifacts.get("plan", {}).get("exists", False)
|
|
179
|
+
has_tasks = artifacts.get("tasks", {}).get("exists", False)
|
|
180
|
+
has_kanban = artifacts.get("kanban", {}).get("exists", False)
|
|
181
|
+
|
|
182
|
+
workflow: Dict[str, str] = {}
|
|
183
|
+
|
|
184
|
+
if not has_spec:
|
|
185
|
+
workflow.update(
|
|
186
|
+
{"specify": "pending", "plan": "pending", "tasks": "pending", "implement": "pending"}
|
|
187
|
+
)
|
|
188
|
+
return workflow
|
|
189
|
+
workflow["specify"] = "complete"
|
|
190
|
+
|
|
191
|
+
if not has_plan:
|
|
192
|
+
workflow.update({"plan": "pending", "tasks": "pending", "implement": "pending"})
|
|
193
|
+
return workflow
|
|
194
|
+
workflow["plan"] = "complete"
|
|
195
|
+
|
|
196
|
+
if not has_tasks:
|
|
197
|
+
workflow.update({"tasks": "pending", "implement": "pending"})
|
|
198
|
+
return workflow
|
|
199
|
+
workflow["tasks"] = "complete"
|
|
200
|
+
|
|
201
|
+
workflow["implement"] = "in_progress" if has_kanban else "pending"
|
|
202
|
+
return workflow
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def gather_feature_paths(project_dir: Path) -> Dict[str, Path]:
|
|
206
|
+
"""Collect candidate feature directories from root and worktrees.
|
|
207
|
+
|
|
208
|
+
Main repo (kitty-specs/) paths take priority over worktree copies.
|
|
209
|
+
Worktrees may have stale data from when they were created, so the
|
|
210
|
+
main repo should be the source of truth for feature status.
|
|
211
|
+
"""
|
|
212
|
+
feature_paths: Dict[str, Path] = {}
|
|
213
|
+
|
|
214
|
+
# First scan worktrees (lower priority - may have stale data)
|
|
215
|
+
worktrees_root = project_dir / ".worktrees"
|
|
216
|
+
if worktrees_root.exists():
|
|
217
|
+
for worktree_dir in worktrees_root.iterdir():
|
|
218
|
+
if not worktree_dir.is_dir():
|
|
219
|
+
continue
|
|
220
|
+
wt_specs = worktree_dir / "kitty-specs"
|
|
221
|
+
if not wt_specs.exists():
|
|
222
|
+
continue
|
|
223
|
+
for feature_dir in wt_specs.iterdir():
|
|
224
|
+
if feature_dir.is_dir():
|
|
225
|
+
feature_paths[feature_dir.name] = feature_dir
|
|
226
|
+
|
|
227
|
+
# Then scan main repo (higher priority - source of truth)
|
|
228
|
+
# This will overwrite any worktree paths with the same feature name
|
|
229
|
+
root_specs = project_dir / "kitty-specs"
|
|
230
|
+
if root_specs.exists():
|
|
231
|
+
for feature_dir in root_specs.iterdir():
|
|
232
|
+
if feature_dir.is_dir():
|
|
233
|
+
feature_paths[feature_dir.name] = feature_dir
|
|
234
|
+
|
|
235
|
+
return feature_paths
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def resolve_feature_dir(project_dir: Path, feature_id: str) -> Optional[Path]:
|
|
239
|
+
"""Resolve the on-disk directory for the requested feature."""
|
|
240
|
+
feature_paths = gather_feature_paths(project_dir)
|
|
241
|
+
return feature_paths.get(feature_id)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def _count_wps_by_lane_frontmatter(tasks_dir: Path) -> Dict[str, int]:
|
|
245
|
+
"""Count work packages by lane from frontmatter (new format)."""
|
|
246
|
+
counts = {"planned": 0, "doing": 0, "for_review": 0, "done": 0}
|
|
247
|
+
|
|
248
|
+
if not tasks_dir.exists():
|
|
249
|
+
return counts
|
|
250
|
+
|
|
251
|
+
for wp_file in tasks_dir.glob("WP*.md"):
|
|
252
|
+
content, error = read_file_resilient(wp_file, auto_fix=True)
|
|
253
|
+
if content is None:
|
|
254
|
+
continue
|
|
255
|
+
|
|
256
|
+
frontmatter, _, _ = parse_frontmatter(content)
|
|
257
|
+
lane = frontmatter.get("lane", "planned") if isinstance(frontmatter, dict) else "planned"
|
|
258
|
+
if lane in counts:
|
|
259
|
+
counts[lane] += 1
|
|
260
|
+
|
|
261
|
+
return counts
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
def scan_all_features(project_dir: Path) -> List[Dict[str, Any]]:
|
|
265
|
+
"""Scan all features and return metadata."""
|
|
266
|
+
features: List[Dict[str, Any]] = []
|
|
267
|
+
feature_paths = gather_feature_paths(project_dir)
|
|
268
|
+
|
|
269
|
+
for feature_id, feature_dir in feature_paths.items():
|
|
270
|
+
if not (re.match(r"^\d+", feature_dir.name) or (feature_dir / "tasks").exists()):
|
|
271
|
+
continue
|
|
272
|
+
|
|
273
|
+
friendly_name = feature_dir.name
|
|
274
|
+
meta_data: Dict[str, Any] | None = None
|
|
275
|
+
meta_path = feature_dir / "meta.json"
|
|
276
|
+
if meta_path.exists():
|
|
277
|
+
try:
|
|
278
|
+
meta_data = json.loads(meta_path.read_text(encoding="utf-8-sig"))
|
|
279
|
+
potential_name = meta_data.get("friendly_name")
|
|
280
|
+
if isinstance(potential_name, str) and potential_name.strip():
|
|
281
|
+
friendly_name = potential_name.strip()
|
|
282
|
+
except json.JSONDecodeError:
|
|
283
|
+
meta_data = None
|
|
284
|
+
|
|
285
|
+
artifacts = get_feature_artifacts(feature_dir)
|
|
286
|
+
workflow = get_workflow_status(artifacts)
|
|
287
|
+
|
|
288
|
+
kanban_stats = {"total": 0, "planned": 0, "doing": 0, "for_review": 0, "done": 0}
|
|
289
|
+
if artifacts["kanban"]:
|
|
290
|
+
tasks_dir = feature_dir / "tasks"
|
|
291
|
+
use_legacy = is_legacy_format(feature_dir)
|
|
292
|
+
|
|
293
|
+
if use_legacy:
|
|
294
|
+
# Legacy format: count WPs in lane subdirectories
|
|
295
|
+
for lane in ["planned", "doing", "for_review", "done"]:
|
|
296
|
+
lane_dir = tasks_dir / lane
|
|
297
|
+
if lane_dir.exists():
|
|
298
|
+
count = len(list(lane_dir.rglob("WP*.md")))
|
|
299
|
+
kanban_stats[lane] = count
|
|
300
|
+
kanban_stats["total"] += count
|
|
301
|
+
else:
|
|
302
|
+
# New format: count WPs by frontmatter lane
|
|
303
|
+
lane_counts = _count_wps_by_lane_frontmatter(tasks_dir)
|
|
304
|
+
for lane, count in lane_counts.items():
|
|
305
|
+
kanban_stats[lane] = count
|
|
306
|
+
kanban_stats["total"] += count
|
|
307
|
+
|
|
308
|
+
worktree_root = project_dir / ".worktrees"
|
|
309
|
+
worktree_path = worktree_root / feature_dir.name
|
|
310
|
+
worktree_exists = worktree_path.exists()
|
|
311
|
+
|
|
312
|
+
features.append(
|
|
313
|
+
{
|
|
314
|
+
"id": feature_id,
|
|
315
|
+
"name": friendly_name,
|
|
316
|
+
"path": str(feature_dir.relative_to(project_dir)),
|
|
317
|
+
"artifacts": artifacts,
|
|
318
|
+
"workflow": workflow,
|
|
319
|
+
"kanban_stats": kanban_stats,
|
|
320
|
+
"meta": meta_data or {},
|
|
321
|
+
"worktree": {
|
|
322
|
+
"path": format_path_for_display(str(worktree_path)),
|
|
323
|
+
"exists": worktree_exists,
|
|
324
|
+
},
|
|
325
|
+
}
|
|
326
|
+
)
|
|
327
|
+
|
|
328
|
+
features.sort(key=lambda f: f["id"], reverse=True)
|
|
329
|
+
return features
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
def _process_wp_file(
|
|
333
|
+
prompt_file: Path,
|
|
334
|
+
project_dir: Path,
|
|
335
|
+
default_lane: str,
|
|
336
|
+
) -> Optional[Dict[str, Any]]:
|
|
337
|
+
"""Process a single WP file and return task data or None on error."""
|
|
338
|
+
content, error = read_file_resilient(prompt_file, auto_fix=True)
|
|
339
|
+
|
|
340
|
+
if content is None:
|
|
341
|
+
logger.error(f"Failed to read {prompt_file.name}: {error}")
|
|
342
|
+
return {
|
|
343
|
+
"id": prompt_file.stem,
|
|
344
|
+
"title": f"⚠️ Encoding Error: {prompt_file.name}",
|
|
345
|
+
"lane": default_lane,
|
|
346
|
+
"subtasks": [],
|
|
347
|
+
"agent": "",
|
|
348
|
+
"assignee": "",
|
|
349
|
+
"phase": "",
|
|
350
|
+
"prompt_markdown": f"**Encoding Error**\n\n{error}",
|
|
351
|
+
"prompt_path": str(prompt_file.relative_to(project_dir))
|
|
352
|
+
if prompt_file.is_relative_to(project_dir)
|
|
353
|
+
else str(prompt_file),
|
|
354
|
+
"encoding_error": True,
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
frontmatter, prompt_body, _ = parse_frontmatter(content)
|
|
358
|
+
|
|
359
|
+
if not isinstance(frontmatter, dict) or "work_package_id" not in frontmatter:
|
|
360
|
+
return None
|
|
361
|
+
|
|
362
|
+
title_match = re.search(r"^#\s+Work Package Prompt:\s+(.+)$", content, re.MULTILINE)
|
|
363
|
+
title = title_match.group(1) if title_match else prompt_file.stem
|
|
364
|
+
|
|
365
|
+
return {
|
|
366
|
+
"id": frontmatter.get("work_package_id", prompt_file.stem),
|
|
367
|
+
"title": title,
|
|
368
|
+
"lane": frontmatter.get("lane", default_lane),
|
|
369
|
+
"subtasks": frontmatter.get("subtasks", []),
|
|
370
|
+
"agent": frontmatter.get("agent", ""),
|
|
371
|
+
"assignee": frontmatter.get("assignee", ""),
|
|
372
|
+
"phase": frontmatter.get("phase", ""),
|
|
373
|
+
"prompt_markdown": prompt_body.strip(),
|
|
374
|
+
"prompt_path": str(prompt_file.relative_to(project_dir))
|
|
375
|
+
if prompt_file.is_relative_to(project_dir)
|
|
376
|
+
else str(prompt_file),
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
def scan_feature_kanban(project_dir: Path, feature_id: str) -> Dict[str, List[Dict[str, Any]]]:
|
|
381
|
+
"""Scan kanban board for a specific feature.
|
|
382
|
+
|
|
383
|
+
Supports both legacy (directory-based) and new (frontmatter-based) lane formats.
|
|
384
|
+
"""
|
|
385
|
+
feature_dir = resolve_feature_dir(project_dir, feature_id)
|
|
386
|
+
lanes: Dict[str, List[Dict[str, Any]]] = {
|
|
387
|
+
"planned": [],
|
|
388
|
+
"doing": [],
|
|
389
|
+
"for_review": [],
|
|
390
|
+
"done": [],
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
if feature_dir is None or not feature_dir.exists():
|
|
394
|
+
return lanes
|
|
395
|
+
|
|
396
|
+
tasks_dir = feature_dir / "tasks"
|
|
397
|
+
if not tasks_dir.exists():
|
|
398
|
+
return lanes
|
|
399
|
+
|
|
400
|
+
use_legacy = is_legacy_format(feature_dir)
|
|
401
|
+
|
|
402
|
+
if use_legacy:
|
|
403
|
+
# Legacy format: scan lane subdirectories
|
|
404
|
+
for lane in lanes.keys():
|
|
405
|
+
lane_dir = tasks_dir / lane
|
|
406
|
+
if not lane_dir.exists():
|
|
407
|
+
continue
|
|
408
|
+
|
|
409
|
+
for prompt_file in lane_dir.rglob("WP*.md"):
|
|
410
|
+
try:
|
|
411
|
+
task_data = _process_wp_file(prompt_file, project_dir, lane)
|
|
412
|
+
if task_data is not None:
|
|
413
|
+
lanes[lane].append(task_data)
|
|
414
|
+
except Exception as exc:
|
|
415
|
+
logger.error(f"Unexpected error processing {prompt_file.name}: {exc}")
|
|
416
|
+
continue
|
|
417
|
+
|
|
418
|
+
lanes[lane].sort(key=work_package_sort_key)
|
|
419
|
+
else:
|
|
420
|
+
# New format: scan flat tasks/ directory, lane from frontmatter
|
|
421
|
+
for prompt_file in tasks_dir.glob("WP*.md"):
|
|
422
|
+
try:
|
|
423
|
+
task_data = _process_wp_file(prompt_file, project_dir, "planned")
|
|
424
|
+
if task_data is not None:
|
|
425
|
+
lane = task_data.get("lane", "planned")
|
|
426
|
+
if lane not in lanes:
|
|
427
|
+
lane = "planned"
|
|
428
|
+
lanes[lane].append(task_data)
|
|
429
|
+
except Exception as exc:
|
|
430
|
+
logger.error(f"Unexpected error processing {prompt_file.name}: {exc}")
|
|
431
|
+
continue
|
|
432
|
+
|
|
433
|
+
# Sort all lanes
|
|
434
|
+
for lane in lanes.keys():
|
|
435
|
+
lanes[lane].sort(key=work_package_sort_key)
|
|
436
|
+
|
|
437
|
+
return lanes
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
"""Dashboard HTTP server bootstrap utilities."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import socket
|
|
6
|
+
import subprocess
|
|
7
|
+
import sys
|
|
8
|
+
import textwrap
|
|
9
|
+
import threading
|
|
10
|
+
from http.server import HTTPServer
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Optional, Tuple
|
|
13
|
+
|
|
14
|
+
from .handlers.router import DashboardRouter
|
|
15
|
+
|
|
16
|
+
__all__ = ["find_free_port", "start_dashboard", "run_dashboard_server"]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def find_free_port(start_port: int = 9237, max_attempts: int = 100) -> int:
|
|
20
|
+
"""
|
|
21
|
+
Find an available port starting from start_port.
|
|
22
|
+
|
|
23
|
+
Uses a dual check (connect + bind) to avoid collisions with busy ports.
|
|
24
|
+
"""
|
|
25
|
+
for port in range(start_port, start_port + max_attempts):
|
|
26
|
+
try:
|
|
27
|
+
test_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
28
|
+
test_sock.settimeout(0.1)
|
|
29
|
+
if test_sock.connect_ex(('127.0.0.1', port)) == 0:
|
|
30
|
+
test_sock.close()
|
|
31
|
+
continue
|
|
32
|
+
test_sock.close()
|
|
33
|
+
except OSError:
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
|
38
|
+
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
39
|
+
sock.bind(('127.0.0.1', port))
|
|
40
|
+
return port
|
|
41
|
+
except OSError:
|
|
42
|
+
continue
|
|
43
|
+
|
|
44
|
+
raise RuntimeError(f"Could not find free port in range {start_port}-{start_port + max_attempts}")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _build_handler_class(project_dir: Path, project_token: Optional[str]) -> type[DashboardRouter]:
|
|
48
|
+
return type(
|
|
49
|
+
'DashboardHandler',
|
|
50
|
+
(DashboardRouter,),
|
|
51
|
+
{
|
|
52
|
+
'project_dir': str(project_dir),
|
|
53
|
+
'project_token': project_token,
|
|
54
|
+
},
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def run_dashboard_server(project_dir: Path, port: int, project_token: Optional[str]) -> None:
|
|
59
|
+
"""Run the dashboard server forever (used by detached child processes)."""
|
|
60
|
+
handler_class = _build_handler_class(project_dir, project_token)
|
|
61
|
+
server = HTTPServer(('127.0.0.1', port), handler_class)
|
|
62
|
+
server.serve_forever()
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _background_script(project_dir: Path, port: int, project_token: Optional[str]) -> str:
|
|
66
|
+
repo_root = Path(__file__).resolve().parents[2]
|
|
67
|
+
return textwrap.dedent(
|
|
68
|
+
f"""
|
|
69
|
+
import sys
|
|
70
|
+
from pathlib import Path
|
|
71
|
+
repo_root = Path({repr(str(repo_root))})
|
|
72
|
+
# Always insert at position 0 to ensure correct spec-kitty version takes priority
|
|
73
|
+
# over any other paths in PYTHONPATH or .pth files
|
|
74
|
+
sys.path.insert(0, str(repo_root))
|
|
75
|
+
from specify_cli.dashboard.server import run_dashboard_server
|
|
76
|
+
run_dashboard_server(Path({repr(str(project_dir))}), {port}, {repr(project_token)})
|
|
77
|
+
"""
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def start_dashboard(
|
|
82
|
+
project_dir: Path,
|
|
83
|
+
port: Optional[int] = None,
|
|
84
|
+
background_process: bool = False,
|
|
85
|
+
project_token: Optional[str] = None,
|
|
86
|
+
) -> Tuple[int, Optional[int]]:
|
|
87
|
+
"""
|
|
88
|
+
Start the dashboard server.
|
|
89
|
+
|
|
90
|
+
Returns tuple(port, pid). When background_process=True, pid is the process ID
|
|
91
|
+
of the detached child process. When background_process=False, pid is None.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
project_dir: Path to the project directory
|
|
95
|
+
port: Port number (auto-selected if None)
|
|
96
|
+
background_process: If True, run as detached subprocess; if False, run in thread
|
|
97
|
+
project_token: Security token for the dashboard
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
Tuple[port, pid]: Port number and process ID (None if threaded mode)
|
|
101
|
+
"""
|
|
102
|
+
if port is None:
|
|
103
|
+
port = find_free_port()
|
|
104
|
+
|
|
105
|
+
project_dir_abs = project_dir.resolve()
|
|
106
|
+
|
|
107
|
+
if background_process:
|
|
108
|
+
script = _background_script(project_dir_abs, port, project_token)
|
|
109
|
+
proc = subprocess.Popen(
|
|
110
|
+
[sys.executable, '-c', script],
|
|
111
|
+
stdout=subprocess.DEVNULL,
|
|
112
|
+
stderr=subprocess.DEVNULL,
|
|
113
|
+
stdin=subprocess.DEVNULL,
|
|
114
|
+
start_new_session=True,
|
|
115
|
+
)
|
|
116
|
+
return port, proc.pid
|
|
117
|
+
|
|
118
|
+
handler_class = _build_handler_class(project_dir_abs, project_token)
|
|
119
|
+
server = HTTPServer(('127.0.0.1', port), handler_class)
|
|
120
|
+
|
|
121
|
+
thread = threading.Thread(target=server.serve_forever, daemon=True)
|
|
122
|
+
thread.start()
|
|
123
|
+
return port, None
|