htmlgraph 0.20.1__py3-none-any.whl ā 0.27.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- htmlgraph/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/.htmlgraph/agents.json +72 -0
- htmlgraph/.htmlgraph/htmlgraph.db +0 -0
- htmlgraph/__init__.py +51 -1
- htmlgraph/__init__.pyi +123 -0
- htmlgraph/agent_detection.py +26 -10
- htmlgraph/agent_registry.py +2 -1
- htmlgraph/analytics/__init__.py +8 -1
- htmlgraph/analytics/cli.py +86 -20
- htmlgraph/analytics/cost_analyzer.py +391 -0
- htmlgraph/analytics/cost_monitor.py +664 -0
- htmlgraph/analytics/cost_reporter.py +675 -0
- htmlgraph/analytics/cross_session.py +617 -0
- htmlgraph/analytics/dependency.py +10 -6
- htmlgraph/analytics/pattern_learning.py +771 -0
- htmlgraph/analytics/session_graph.py +707 -0
- htmlgraph/analytics/strategic/__init__.py +80 -0
- htmlgraph/analytics/strategic/cost_optimizer.py +611 -0
- htmlgraph/analytics/strategic/pattern_detector.py +876 -0
- htmlgraph/analytics/strategic/preference_manager.py +709 -0
- htmlgraph/analytics/strategic/suggestion_engine.py +747 -0
- htmlgraph/analytics/work_type.py +67 -27
- htmlgraph/analytics_index.py +53 -20
- htmlgraph/api/__init__.py +3 -0
- htmlgraph/api/cost_alerts_websocket.py +416 -0
- htmlgraph/api/main.py +2498 -0
- htmlgraph/api/static/htmx.min.js +1 -0
- htmlgraph/api/static/style-redesign.css +1344 -0
- htmlgraph/api/static/style.css +1079 -0
- htmlgraph/api/templates/dashboard-redesign.html +1366 -0
- htmlgraph/api/templates/dashboard.html +794 -0
- htmlgraph/api/templates/partials/activity-feed-hierarchical.html +326 -0
- htmlgraph/api/templates/partials/activity-feed.html +1100 -0
- htmlgraph/api/templates/partials/agents-redesign.html +317 -0
- htmlgraph/api/templates/partials/agents.html +317 -0
- htmlgraph/api/templates/partials/event-traces.html +373 -0
- htmlgraph/api/templates/partials/features-kanban-redesign.html +509 -0
- htmlgraph/api/templates/partials/features.html +578 -0
- htmlgraph/api/templates/partials/metrics-redesign.html +346 -0
- htmlgraph/api/templates/partials/metrics.html +346 -0
- htmlgraph/api/templates/partials/orchestration-redesign.html +443 -0
- htmlgraph/api/templates/partials/orchestration.html +198 -0
- htmlgraph/api/templates/partials/spawners.html +375 -0
- htmlgraph/api/templates/partials/work-items.html +613 -0
- htmlgraph/api/websocket.py +538 -0
- htmlgraph/archive/__init__.py +24 -0
- htmlgraph/archive/bloom.py +234 -0
- htmlgraph/archive/fts.py +297 -0
- htmlgraph/archive/manager.py +583 -0
- htmlgraph/archive/search.py +244 -0
- htmlgraph/atomic_ops.py +560 -0
- htmlgraph/attribute_index.py +2 -1
- htmlgraph/bounded_paths.py +539 -0
- htmlgraph/builders/base.py +57 -2
- htmlgraph/builders/bug.py +19 -3
- htmlgraph/builders/chore.py +19 -3
- htmlgraph/builders/epic.py +19 -3
- htmlgraph/builders/feature.py +27 -3
- htmlgraph/builders/insight.py +2 -1
- htmlgraph/builders/metric.py +2 -1
- htmlgraph/builders/pattern.py +2 -1
- htmlgraph/builders/phase.py +19 -3
- htmlgraph/builders/spike.py +29 -3
- htmlgraph/builders/track.py +42 -1
- htmlgraph/cigs/__init__.py +81 -0
- htmlgraph/cigs/autonomy.py +385 -0
- htmlgraph/cigs/cost.py +475 -0
- htmlgraph/cigs/messages_basic.py +472 -0
- htmlgraph/cigs/messaging.py +365 -0
- htmlgraph/cigs/models.py +771 -0
- htmlgraph/cigs/pattern_storage.py +427 -0
- htmlgraph/cigs/patterns.py +503 -0
- htmlgraph/cigs/posttool_analyzer.py +234 -0
- htmlgraph/cigs/reporter.py +818 -0
- htmlgraph/cigs/tracker.py +317 -0
- htmlgraph/cli/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/cli/.htmlgraph/agents.json +72 -0
- htmlgraph/cli/.htmlgraph/htmlgraph.db +0 -0
- htmlgraph/cli/__init__.py +42 -0
- htmlgraph/cli/__main__.py +6 -0
- htmlgraph/cli/analytics.py +1424 -0
- htmlgraph/cli/base.py +685 -0
- htmlgraph/cli/constants.py +206 -0
- htmlgraph/cli/core.py +954 -0
- htmlgraph/cli/main.py +147 -0
- htmlgraph/cli/models.py +475 -0
- htmlgraph/cli/templates/__init__.py +1 -0
- htmlgraph/cli/templates/cost_dashboard.py +399 -0
- htmlgraph/cli/work/__init__.py +239 -0
- htmlgraph/cli/work/browse.py +115 -0
- htmlgraph/cli/work/features.py +568 -0
- htmlgraph/cli/work/orchestration.py +676 -0
- htmlgraph/cli/work/report.py +728 -0
- htmlgraph/cli/work/sessions.py +466 -0
- htmlgraph/cli/work/snapshot.py +559 -0
- htmlgraph/cli/work/tracks.py +486 -0
- htmlgraph/cli_commands/__init__.py +1 -0
- htmlgraph/cli_commands/feature.py +195 -0
- htmlgraph/cli_framework.py +115 -0
- htmlgraph/collections/__init__.py +2 -0
- htmlgraph/collections/base.py +197 -14
- htmlgraph/collections/bug.py +2 -1
- htmlgraph/collections/chore.py +2 -1
- htmlgraph/collections/epic.py +2 -1
- htmlgraph/collections/feature.py +2 -1
- htmlgraph/collections/insight.py +2 -1
- htmlgraph/collections/metric.py +2 -1
- htmlgraph/collections/pattern.py +2 -1
- htmlgraph/collections/phase.py +2 -1
- htmlgraph/collections/session.py +194 -0
- htmlgraph/collections/spike.py +13 -2
- htmlgraph/collections/task_delegation.py +241 -0
- htmlgraph/collections/todo.py +14 -1
- htmlgraph/collections/traces.py +487 -0
- htmlgraph/config/cost_models.json +56 -0
- htmlgraph/config.py +190 -0
- htmlgraph/context_analytics.py +2 -1
- htmlgraph/converter.py +116 -7
- htmlgraph/cost_analysis/__init__.py +5 -0
- htmlgraph/cost_analysis/analyzer.py +438 -0
- htmlgraph/dashboard.html +2246 -248
- htmlgraph/dashboard.html.backup +6592 -0
- htmlgraph/dashboard.html.bak +7181 -0
- htmlgraph/dashboard.html.bak2 +7231 -0
- htmlgraph/dashboard.html.bak3 +7232 -0
- htmlgraph/db/__init__.py +38 -0
- htmlgraph/db/queries.py +790 -0
- htmlgraph/db/schema.py +1788 -0
- htmlgraph/decorators.py +317 -0
- htmlgraph/dependency_models.py +2 -1
- htmlgraph/deploy.py +26 -27
- htmlgraph/docs/API_REFERENCE.md +841 -0
- htmlgraph/docs/HTTP_API.md +750 -0
- htmlgraph/docs/INTEGRATION_GUIDE.md +752 -0
- htmlgraph/docs/ORCHESTRATION_PATTERNS.md +717 -0
- htmlgraph/docs/README.md +532 -0
- htmlgraph/docs/__init__.py +77 -0
- htmlgraph/docs/docs_version.py +55 -0
- htmlgraph/docs/metadata.py +93 -0
- htmlgraph/docs/migrations.py +232 -0
- htmlgraph/docs/template_engine.py +143 -0
- htmlgraph/docs/templates/_sections/cli_reference.md.j2 +52 -0
- htmlgraph/docs/templates/_sections/core_concepts.md.j2 +29 -0
- htmlgraph/docs/templates/_sections/sdk_basics.md.j2 +69 -0
- htmlgraph/docs/templates/base_agents.md.j2 +78 -0
- htmlgraph/docs/templates/example_user_override.md.j2 +47 -0
- htmlgraph/docs/version_check.py +163 -0
- htmlgraph/edge_index.py +2 -1
- htmlgraph/error_handler.py +544 -0
- htmlgraph/event_log.py +86 -37
- htmlgraph/event_migration.py +2 -1
- htmlgraph/file_watcher.py +12 -8
- htmlgraph/find_api.py +2 -1
- htmlgraph/git_events.py +67 -9
- htmlgraph/hooks/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/hooks/.htmlgraph/agents.json +72 -0
- htmlgraph/hooks/.htmlgraph/index.sqlite +0 -0
- htmlgraph/hooks/__init__.py +8 -0
- htmlgraph/hooks/bootstrap.py +169 -0
- htmlgraph/hooks/cigs_pretool_enforcer.py +354 -0
- htmlgraph/hooks/concurrent_sessions.py +208 -0
- htmlgraph/hooks/context.py +350 -0
- htmlgraph/hooks/drift_handler.py +525 -0
- htmlgraph/hooks/event_tracker.py +790 -99
- htmlgraph/hooks/git_commands.py +175 -0
- htmlgraph/hooks/installer.py +5 -1
- htmlgraph/hooks/orchestrator.py +327 -76
- htmlgraph/hooks/orchestrator_reflector.py +31 -4
- htmlgraph/hooks/post_tool_use_failure.py +32 -7
- htmlgraph/hooks/post_tool_use_handler.py +257 -0
- htmlgraph/hooks/posttooluse.py +92 -19
- htmlgraph/hooks/pretooluse.py +527 -7
- htmlgraph/hooks/prompt_analyzer.py +637 -0
- htmlgraph/hooks/session_handler.py +668 -0
- htmlgraph/hooks/session_summary.py +395 -0
- htmlgraph/hooks/state_manager.py +504 -0
- htmlgraph/hooks/subagent_detection.py +202 -0
- htmlgraph/hooks/subagent_stop.py +369 -0
- htmlgraph/hooks/task_enforcer.py +99 -4
- htmlgraph/hooks/validator.py +212 -91
- htmlgraph/ids.py +2 -1
- htmlgraph/learning.py +125 -100
- htmlgraph/mcp_server.py +2 -1
- htmlgraph/models.py +217 -18
- htmlgraph/operations/README.md +62 -0
- htmlgraph/operations/__init__.py +79 -0
- htmlgraph/operations/analytics.py +339 -0
- htmlgraph/operations/bootstrap.py +289 -0
- htmlgraph/operations/events.py +244 -0
- htmlgraph/operations/fastapi_server.py +231 -0
- htmlgraph/operations/hooks.py +350 -0
- htmlgraph/operations/initialization.py +597 -0
- htmlgraph/operations/initialization.py.backup +228 -0
- htmlgraph/operations/server.py +303 -0
- htmlgraph/orchestration/__init__.py +58 -0
- htmlgraph/orchestration/claude_launcher.py +179 -0
- htmlgraph/orchestration/command_builder.py +72 -0
- htmlgraph/orchestration/headless_spawner.py +281 -0
- htmlgraph/orchestration/live_events.py +377 -0
- htmlgraph/orchestration/model_selection.py +327 -0
- htmlgraph/orchestration/plugin_manager.py +140 -0
- htmlgraph/orchestration/prompts.py +137 -0
- htmlgraph/orchestration/spawner_event_tracker.py +383 -0
- htmlgraph/orchestration/spawners/__init__.py +16 -0
- htmlgraph/orchestration/spawners/base.py +194 -0
- htmlgraph/orchestration/spawners/claude.py +173 -0
- htmlgraph/orchestration/spawners/codex.py +435 -0
- htmlgraph/orchestration/spawners/copilot.py +294 -0
- htmlgraph/orchestration/spawners/gemini.py +471 -0
- htmlgraph/orchestration/subprocess_runner.py +36 -0
- htmlgraph/{orchestration.py ā orchestration/task_coordination.py} +16 -8
- htmlgraph/orchestration.md +563 -0
- htmlgraph/orchestrator-system-prompt-optimized.txt +863 -0
- htmlgraph/orchestrator.py +2 -1
- htmlgraph/orchestrator_config.py +357 -0
- htmlgraph/orchestrator_mode.py +115 -4
- htmlgraph/parallel.py +2 -1
- htmlgraph/parser.py +86 -6
- htmlgraph/path_query.py +608 -0
- htmlgraph/pattern_matcher.py +636 -0
- htmlgraph/pydantic_models.py +476 -0
- htmlgraph/quality_gates.py +350 -0
- htmlgraph/query_builder.py +2 -1
- htmlgraph/query_composer.py +509 -0
- htmlgraph/reflection.py +443 -0
- htmlgraph/refs.py +344 -0
- htmlgraph/repo_hash.py +512 -0
- htmlgraph/repositories/__init__.py +292 -0
- htmlgraph/repositories/analytics_repository.py +455 -0
- htmlgraph/repositories/analytics_repository_standard.py +628 -0
- htmlgraph/repositories/feature_repository.py +581 -0
- htmlgraph/repositories/feature_repository_htmlfile.py +668 -0
- htmlgraph/repositories/feature_repository_memory.py +607 -0
- htmlgraph/repositories/feature_repository_sqlite.py +858 -0
- htmlgraph/repositories/filter_service.py +620 -0
- htmlgraph/repositories/filter_service_standard.py +445 -0
- htmlgraph/repositories/shared_cache.py +621 -0
- htmlgraph/repositories/shared_cache_memory.py +395 -0
- htmlgraph/repositories/track_repository.py +552 -0
- htmlgraph/repositories/track_repository_htmlfile.py +619 -0
- htmlgraph/repositories/track_repository_memory.py +508 -0
- htmlgraph/repositories/track_repository_sqlite.py +711 -0
- htmlgraph/sdk/__init__.py +398 -0
- htmlgraph/sdk/__init__.pyi +14 -0
- htmlgraph/sdk/analytics/__init__.py +19 -0
- htmlgraph/sdk/analytics/engine.py +155 -0
- htmlgraph/sdk/analytics/helpers.py +178 -0
- htmlgraph/sdk/analytics/registry.py +109 -0
- htmlgraph/sdk/base.py +484 -0
- htmlgraph/sdk/constants.py +216 -0
- htmlgraph/sdk/core.pyi +308 -0
- htmlgraph/sdk/discovery.py +120 -0
- htmlgraph/sdk/help/__init__.py +12 -0
- htmlgraph/sdk/help/mixin.py +699 -0
- htmlgraph/sdk/mixins/__init__.py +15 -0
- htmlgraph/sdk/mixins/attribution.py +113 -0
- htmlgraph/sdk/mixins/mixin.py +410 -0
- htmlgraph/sdk/operations/__init__.py +12 -0
- htmlgraph/sdk/operations/mixin.py +427 -0
- htmlgraph/sdk/orchestration/__init__.py +17 -0
- htmlgraph/sdk/orchestration/coordinator.py +203 -0
- htmlgraph/sdk/orchestration/spawner.py +204 -0
- htmlgraph/sdk/planning/__init__.py +19 -0
- htmlgraph/sdk/planning/bottlenecks.py +93 -0
- htmlgraph/sdk/planning/mixin.py +211 -0
- htmlgraph/sdk/planning/parallel.py +186 -0
- htmlgraph/sdk/planning/queue.py +210 -0
- htmlgraph/sdk/planning/recommendations.py +87 -0
- htmlgraph/sdk/planning/smart_planning.py +319 -0
- htmlgraph/sdk/session/__init__.py +19 -0
- htmlgraph/sdk/session/continuity.py +57 -0
- htmlgraph/sdk/session/handoff.py +110 -0
- htmlgraph/sdk/session/info.py +309 -0
- htmlgraph/sdk/session/manager.py +103 -0
- htmlgraph/sdk/strategic/__init__.py +26 -0
- htmlgraph/sdk/strategic/mixin.py +563 -0
- htmlgraph/server.py +295 -107
- htmlgraph/session_hooks.py +300 -0
- htmlgraph/session_manager.py +285 -3
- htmlgraph/session_registry.py +587 -0
- htmlgraph/session_state.py +436 -0
- htmlgraph/session_warning.py +2 -1
- htmlgraph/sessions/__init__.py +23 -0
- htmlgraph/sessions/handoff.py +756 -0
- htmlgraph/system_prompts.py +450 -0
- htmlgraph/templates/orchestration-view.html +350 -0
- htmlgraph/track_builder.py +33 -1
- htmlgraph/track_manager.py +38 -0
- htmlgraph/transcript.py +18 -5
- htmlgraph/validation.py +115 -0
- htmlgraph/watch.py +2 -1
- htmlgraph/work_type_utils.py +2 -1
- {htmlgraph-0.20.1.data ā htmlgraph-0.27.5.data}/data/htmlgraph/dashboard.html +2246 -248
- {htmlgraph-0.20.1.dist-info ā htmlgraph-0.27.5.dist-info}/METADATA +95 -64
- htmlgraph-0.27.5.dist-info/RECORD +337 -0
- {htmlgraph-0.20.1.dist-info ā htmlgraph-0.27.5.dist-info}/entry_points.txt +1 -1
- htmlgraph/cli.py +0 -4839
- htmlgraph/sdk.py +0 -2359
- htmlgraph-0.20.1.dist-info/RECORD +0 -118
- {htmlgraph-0.20.1.data ā htmlgraph-0.27.5.data}/data/htmlgraph/styles.css +0 -0
- {htmlgraph-0.20.1.data ā htmlgraph-0.27.5.data}/data/htmlgraph/templates/AGENTS.md.template +0 -0
- {htmlgraph-0.20.1.data ā htmlgraph-0.27.5.data}/data/htmlgraph/templates/CLAUDE.md.template +0 -0
- {htmlgraph-0.20.1.data ā htmlgraph-0.27.5.data}/data/htmlgraph/templates/GEMINI.md.template +0 -0
- {htmlgraph-0.20.1.dist-info ā htmlgraph-0.27.5.dist-info}/WHEEL +0 -0
htmlgraph/cli.py
DELETED
|
@@ -1,4839 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
"""
|
|
3
|
-
HtmlGraph CLI.
|
|
4
|
-
|
|
5
|
-
Usage:
|
|
6
|
-
htmlgraph serve [--port PORT] [--dir DIR]
|
|
7
|
-
htmlgraph init [DIR]
|
|
8
|
-
htmlgraph status [--dir DIR]
|
|
9
|
-
htmlgraph query SELECTOR [--dir DIR]
|
|
10
|
-
|
|
11
|
-
Session Management:
|
|
12
|
-
htmlgraph session start [--id ID] [--agent AGENT]
|
|
13
|
-
htmlgraph session end ID [--notes NOTES] [--recommend NEXT] [--blocker BLOCKER]
|
|
14
|
-
htmlgraph session list
|
|
15
|
-
htmlgraph session start-info [--agent AGENT] [--format json] # Optimized session start (1 call)
|
|
16
|
-
htmlgraph session handoff [--session-id ID] [--notes NOTES] [--recommend NEXT] [--blocker BLOCKER] [--show]
|
|
17
|
-
htmlgraph activity TOOL SUMMARY [--session ID] [--files FILE...]
|
|
18
|
-
|
|
19
|
-
Feature Management:
|
|
20
|
-
htmlgraph feature start ID
|
|
21
|
-
htmlgraph feature complete ID
|
|
22
|
-
htmlgraph feature primary ID
|
|
23
|
-
htmlgraph feature claim ID
|
|
24
|
-
htmlgraph feature release ID
|
|
25
|
-
htmlgraph feature auto-release
|
|
26
|
-
|
|
27
|
-
Track Management (Conductor-Style Planning):
|
|
28
|
-
htmlgraph track new TITLE [--priority PRIORITY]
|
|
29
|
-
htmlgraph track list
|
|
30
|
-
htmlgraph track spec TRACK_ID TITLE
|
|
31
|
-
htmlgraph track plan TRACK_ID TITLE
|
|
32
|
-
htmlgraph track delete TRACK_ID
|
|
33
|
-
|
|
34
|
-
Analytics:
|
|
35
|
-
htmlgraph analytics # Project-wide analytics
|
|
36
|
-
htmlgraph analytics --session-id SESSION_ID # Single session analysis
|
|
37
|
-
htmlgraph analytics --recent N # Analyze recent N sessions
|
|
38
|
-
"""
|
|
39
|
-
|
|
40
|
-
import argparse
|
|
41
|
-
import os
|
|
42
|
-
import subprocess
|
|
43
|
-
import sys
|
|
44
|
-
from datetime import datetime
|
|
45
|
-
from pathlib import Path
|
|
46
|
-
from typing import Any
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def create_json_response(
|
|
50
|
-
command: str,
|
|
51
|
-
data: dict | list,
|
|
52
|
-
success: bool = True,
|
|
53
|
-
metadata: dict | None = None,
|
|
54
|
-
warnings: list[str] | None = None,
|
|
55
|
-
errors: list[str] | None = None,
|
|
56
|
-
) -> dict:
|
|
57
|
-
"""Create standardized JSON response for CLI commands."""
|
|
58
|
-
import htmlgraph
|
|
59
|
-
|
|
60
|
-
return {
|
|
61
|
-
"success": success,
|
|
62
|
-
"timestamp": datetime.now().isoformat(),
|
|
63
|
-
"version": htmlgraph.__version__,
|
|
64
|
-
"command": command,
|
|
65
|
-
"data": data,
|
|
66
|
-
"metadata": metadata or {},
|
|
67
|
-
"warnings": warnings or [],
|
|
68
|
-
"errors": errors or [],
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
def cmd_install_gemini_extension(args: argparse.Namespace) -> None:
|
|
73
|
-
"""Install the Gemini CLI extension from the bundled package files."""
|
|
74
|
-
import htmlgraph
|
|
75
|
-
|
|
76
|
-
# Find the extension path in the installed package
|
|
77
|
-
package_dir = Path(htmlgraph.__file__).parent
|
|
78
|
-
extension_dir = package_dir / "extensions" / "gemini"
|
|
79
|
-
|
|
80
|
-
if not extension_dir.exists():
|
|
81
|
-
print(f"Error: Gemini extension not found at {extension_dir}", file=sys.stderr)
|
|
82
|
-
print(
|
|
83
|
-
"The extension may not be bundled with this version of htmlgraph.",
|
|
84
|
-
file=sys.stderr,
|
|
85
|
-
)
|
|
86
|
-
sys.exit(1)
|
|
87
|
-
|
|
88
|
-
print(f"Installing Gemini extension from: {extension_dir}")
|
|
89
|
-
|
|
90
|
-
# Run gemini extensions install with the bundled path
|
|
91
|
-
try:
|
|
92
|
-
result = subprocess.run(
|
|
93
|
-
["gemini", "extensions", "install", str(extension_dir), "--consent"],
|
|
94
|
-
capture_output=True,
|
|
95
|
-
text=True,
|
|
96
|
-
check=True,
|
|
97
|
-
)
|
|
98
|
-
print(result.stdout)
|
|
99
|
-
print("\nā
Gemini extension installed successfully!")
|
|
100
|
-
print("\nTo verify installation:")
|
|
101
|
-
print(" gemini extensions list")
|
|
102
|
-
except subprocess.CalledProcessError as e:
|
|
103
|
-
print(f"Error installing extension: {e.stderr}", file=sys.stderr)
|
|
104
|
-
sys.exit(1)
|
|
105
|
-
except FileNotFoundError:
|
|
106
|
-
print("Error: 'gemini' command not found.", file=sys.stderr)
|
|
107
|
-
print("Please install Gemini CLI first:", file=sys.stderr)
|
|
108
|
-
print(" npm install -g @google/gemini-cli", file=sys.stderr)
|
|
109
|
-
sys.exit(1)
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
def cmd_serve(args: argparse.Namespace) -> None:
|
|
113
|
-
"""Start the HtmlGraph server."""
|
|
114
|
-
from htmlgraph.server import serve
|
|
115
|
-
|
|
116
|
-
serve(
|
|
117
|
-
port=args.port,
|
|
118
|
-
graph_dir=args.graph_dir,
|
|
119
|
-
static_dir=args.static_dir,
|
|
120
|
-
host=args.host,
|
|
121
|
-
watch=not args.no_watch,
|
|
122
|
-
auto_port=args.auto_port,
|
|
123
|
-
)
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
def cmd_init(args: argparse.Namespace) -> None:
|
|
127
|
-
"""Initialize a new .htmlgraph directory."""
|
|
128
|
-
import shutil
|
|
129
|
-
|
|
130
|
-
from htmlgraph.analytics_index import AnalyticsIndex
|
|
131
|
-
from htmlgraph.server import HtmlGraphAPIHandler
|
|
132
|
-
|
|
133
|
-
# Interactive setup wizard
|
|
134
|
-
if args.interactive:
|
|
135
|
-
print("=== HtmlGraph Interactive Setup ===\n")
|
|
136
|
-
|
|
137
|
-
# Get project name
|
|
138
|
-
default_name = Path(args.dir).resolve().name
|
|
139
|
-
project_name = input(f"Project name [{default_name}]: ").strip() or default_name
|
|
140
|
-
|
|
141
|
-
# Get agent name
|
|
142
|
-
agent_name = input("Your agent name [claude]: ").strip() or "claude"
|
|
143
|
-
|
|
144
|
-
# Ask about git hooks
|
|
145
|
-
install_hooks_response = (
|
|
146
|
-
input("Install git hooks for automatic tracking? [Y/n]: ").strip().lower()
|
|
147
|
-
)
|
|
148
|
-
args.install_hooks = install_hooks_response != "n"
|
|
149
|
-
|
|
150
|
-
# Ask about documentation generation
|
|
151
|
-
gen_docs_response = (
|
|
152
|
-
input("Generate AGENTS.md, CLAUDE.md, GEMINI.md? [Y/n]: ").strip().lower()
|
|
153
|
-
)
|
|
154
|
-
generate_docs = gen_docs_response != "n"
|
|
155
|
-
|
|
156
|
-
print()
|
|
157
|
-
else:
|
|
158
|
-
# Non-interactive defaults
|
|
159
|
-
project_name = Path(args.dir).resolve().name
|
|
160
|
-
agent_name = "claude"
|
|
161
|
-
generate_docs = True # Always generate in non-interactive mode
|
|
162
|
-
|
|
163
|
-
graph_dir = Path(args.dir) / ".htmlgraph"
|
|
164
|
-
graph_dir.mkdir(parents=True, exist_ok=True)
|
|
165
|
-
|
|
166
|
-
for collection in HtmlGraphAPIHandler.COLLECTIONS:
|
|
167
|
-
(graph_dir / collection).mkdir(exist_ok=True)
|
|
168
|
-
|
|
169
|
-
# Event stream directory (Git-friendly source of truth)
|
|
170
|
-
events_dir = graph_dir / "events"
|
|
171
|
-
events_dir.mkdir(exist_ok=True)
|
|
172
|
-
if not args.no_events_keep:
|
|
173
|
-
keep = events_dir / ".gitkeep"
|
|
174
|
-
if not keep.exists():
|
|
175
|
-
keep.write_text("", encoding="utf-8")
|
|
176
|
-
|
|
177
|
-
# Copy stylesheet
|
|
178
|
-
styles_src = Path(__file__).parent / "styles.css"
|
|
179
|
-
styles_dest = graph_dir / "styles.css"
|
|
180
|
-
if styles_src.exists() and not styles_dest.exists():
|
|
181
|
-
styles_dest.write_text(styles_src.read_text())
|
|
182
|
-
|
|
183
|
-
# Create default index.html if not exists
|
|
184
|
-
index_path = Path(args.dir) / "index.html"
|
|
185
|
-
if not index_path.exists():
|
|
186
|
-
create_default_index(index_path)
|
|
187
|
-
|
|
188
|
-
# Create analytics cache DB (rebuildable; typically gitignored)
|
|
189
|
-
if not args.no_index:
|
|
190
|
-
try:
|
|
191
|
-
AnalyticsIndex(graph_dir / "index.sqlite").ensure_schema()
|
|
192
|
-
except Exception:
|
|
193
|
-
# Never fail init because of analytics cache.
|
|
194
|
-
pass
|
|
195
|
-
|
|
196
|
-
def ensure_gitignore_entries(project_dir: Path, lines: list[str]) -> None:
|
|
197
|
-
if args.no_update_gitignore:
|
|
198
|
-
return
|
|
199
|
-
gitignore_path = project_dir / ".gitignore"
|
|
200
|
-
existing = ""
|
|
201
|
-
if gitignore_path.exists():
|
|
202
|
-
try:
|
|
203
|
-
existing = gitignore_path.read_text(encoding="utf-8")
|
|
204
|
-
except Exception:
|
|
205
|
-
existing = ""
|
|
206
|
-
existing_lines = set(existing.splitlines())
|
|
207
|
-
missing = [ln for ln in lines if ln not in existing_lines]
|
|
208
|
-
if not missing:
|
|
209
|
-
return
|
|
210
|
-
block = "\n".join(
|
|
211
|
-
["", "# HtmlGraph analytics index (rebuildable cache)", *missing, ""]
|
|
212
|
-
if "# HtmlGraph analytics index (rebuildable cache)" not in existing_lines
|
|
213
|
-
else ["", *missing, ""]
|
|
214
|
-
)
|
|
215
|
-
try:
|
|
216
|
-
gitignore_path.write_text(existing + block, encoding="utf-8")
|
|
217
|
-
except Exception:
|
|
218
|
-
# Don't fail init on .gitignore issues.
|
|
219
|
-
pass
|
|
220
|
-
|
|
221
|
-
ensure_gitignore_entries(
|
|
222
|
-
Path(args.dir),
|
|
223
|
-
[
|
|
224
|
-
".htmlgraph/index.sqlite",
|
|
225
|
-
".htmlgraph/index.sqlite-wal",
|
|
226
|
-
".htmlgraph/index.sqlite-shm",
|
|
227
|
-
".htmlgraph/git-hook-errors.log",
|
|
228
|
-
],
|
|
229
|
-
)
|
|
230
|
-
|
|
231
|
-
# Ensure versioned hook scripts exist (installation into .git/hooks is optional)
|
|
232
|
-
hooks_dir = graph_dir / "hooks"
|
|
233
|
-
hooks_dir.mkdir(exist_ok=True)
|
|
234
|
-
|
|
235
|
-
# Hook templates (used when htmlgraph is installed without this repo layout).
|
|
236
|
-
post_commit = """#!/bin/bash
|
|
237
|
-
#
|
|
238
|
-
# HtmlGraph Post-Commit Hook
|
|
239
|
-
# Logs Git commit events for agent-agnostic continuity tracking
|
|
240
|
-
#
|
|
241
|
-
|
|
242
|
-
set +e
|
|
243
|
-
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
244
|
-
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
|
245
|
-
cd "$PROJECT_ROOT" || exit 0
|
|
246
|
-
|
|
247
|
-
if [ ! -d ".htmlgraph" ]; then
|
|
248
|
-
exit 0
|
|
249
|
-
fi
|
|
250
|
-
|
|
251
|
-
if ! command -v htmlgraph &> /dev/null; then
|
|
252
|
-
if command -v python3 &> /dev/null; then
|
|
253
|
-
python3 -m htmlgraph.git_events commit &> /dev/null &
|
|
254
|
-
fi
|
|
255
|
-
exit 0
|
|
256
|
-
fi
|
|
257
|
-
|
|
258
|
-
htmlgraph git-event commit &> /dev/null &
|
|
259
|
-
exit 0
|
|
260
|
-
"""
|
|
261
|
-
|
|
262
|
-
post_checkout = """#!/bin/bash
|
|
263
|
-
#
|
|
264
|
-
# HtmlGraph Post-Checkout Hook
|
|
265
|
-
# Logs branch switches / checkouts for continuity tracking
|
|
266
|
-
#
|
|
267
|
-
|
|
268
|
-
set +e
|
|
269
|
-
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
270
|
-
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
|
271
|
-
cd "$PROJECT_ROOT" || exit 0
|
|
272
|
-
|
|
273
|
-
if [ ! -d ".htmlgraph" ]; then
|
|
274
|
-
exit 0
|
|
275
|
-
fi
|
|
276
|
-
|
|
277
|
-
OLD_HEAD="$1"
|
|
278
|
-
NEW_HEAD="$2"
|
|
279
|
-
FLAG="$3"
|
|
280
|
-
|
|
281
|
-
if ! command -v htmlgraph &> /dev/null; then
|
|
282
|
-
if command -v python3 &> /dev/null; then
|
|
283
|
-
python3 -m htmlgraph.git_events checkout "$OLD_HEAD" "$NEW_HEAD" "$FLAG" &> /dev/null &
|
|
284
|
-
fi
|
|
285
|
-
exit 0
|
|
286
|
-
fi
|
|
287
|
-
|
|
288
|
-
htmlgraph git-event checkout "$OLD_HEAD" "$NEW_HEAD" "$FLAG" &> /dev/null &
|
|
289
|
-
exit 0
|
|
290
|
-
"""
|
|
291
|
-
|
|
292
|
-
post_merge = """#!/bin/bash
|
|
293
|
-
#
|
|
294
|
-
# HtmlGraph Post-Merge Hook
|
|
295
|
-
# Logs successful merges for continuity tracking
|
|
296
|
-
#
|
|
297
|
-
|
|
298
|
-
set +e
|
|
299
|
-
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
300
|
-
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
|
301
|
-
cd "$PROJECT_ROOT" || exit 0
|
|
302
|
-
|
|
303
|
-
if [ ! -d ".htmlgraph" ]; then
|
|
304
|
-
exit 0
|
|
305
|
-
fi
|
|
306
|
-
|
|
307
|
-
SQUASH_FLAG="$1"
|
|
308
|
-
|
|
309
|
-
if ! command -v htmlgraph &> /dev/null; then
|
|
310
|
-
if command -v python3 &> /dev/null; then
|
|
311
|
-
python3 -m htmlgraph.git_events merge "$SQUASH_FLAG" &> /dev/null &
|
|
312
|
-
fi
|
|
313
|
-
exit 0
|
|
314
|
-
fi
|
|
315
|
-
|
|
316
|
-
htmlgraph git-event merge "$SQUASH_FLAG" &> /dev/null &
|
|
317
|
-
exit 0
|
|
318
|
-
"""
|
|
319
|
-
|
|
320
|
-
pre_push = """#!/bin/bash
|
|
321
|
-
#
|
|
322
|
-
# HtmlGraph Pre-Push Hook
|
|
323
|
-
# Logs pushes for continuity tracking / team boundary events
|
|
324
|
-
#
|
|
325
|
-
|
|
326
|
-
set +e
|
|
327
|
-
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
328
|
-
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
|
329
|
-
cd "$PROJECT_ROOT" || exit 0
|
|
330
|
-
|
|
331
|
-
if [ ! -d ".htmlgraph" ]; then
|
|
332
|
-
exit 0
|
|
333
|
-
fi
|
|
334
|
-
|
|
335
|
-
REMOTE_NAME="$1"
|
|
336
|
-
REMOTE_URL="$2"
|
|
337
|
-
UPDATES="$(cat)"
|
|
338
|
-
|
|
339
|
-
if ! command -v htmlgraph &> /dev/null; then
|
|
340
|
-
if command -v python3 &> /dev/null; then
|
|
341
|
-
printf "%s" "$UPDATES" | python3 -m htmlgraph.git_events push "$REMOTE_NAME" "$REMOTE_URL" &> /dev/null &
|
|
342
|
-
fi
|
|
343
|
-
exit 0
|
|
344
|
-
fi
|
|
345
|
-
|
|
346
|
-
printf "%s" "$UPDATES" | htmlgraph git-event push "$REMOTE_NAME" "$REMOTE_URL" &> /dev/null &
|
|
347
|
-
exit 0
|
|
348
|
-
"""
|
|
349
|
-
|
|
350
|
-
pre_commit = """#!/bin/bash
|
|
351
|
-
#
|
|
352
|
-
# HtmlGraph Pre-Commit Hook
|
|
353
|
-
# 1. BLOCKS direct edits to .htmlgraph/ (AI agents must use SDK)
|
|
354
|
-
# 2. Reminds developers to create/start features for non-trivial work
|
|
355
|
-
#
|
|
356
|
-
# To disable feature reminder: git config htmlgraph.precommit false
|
|
357
|
-
# To bypass blocking once: git commit --no-verify (NOT RECOMMENDED)
|
|
358
|
-
|
|
359
|
-
# Check if HtmlGraph is initialized
|
|
360
|
-
if [ ! -d ".htmlgraph" ]; then
|
|
361
|
-
# Not an HtmlGraph project, skip silently
|
|
362
|
-
exit 0
|
|
363
|
-
fi
|
|
364
|
-
|
|
365
|
-
# Redirect output to stderr (standard for git hooks)
|
|
366
|
-
exec 1>&2
|
|
367
|
-
|
|
368
|
-
# ============================================================
|
|
369
|
-
# BLOCKING CHECK: Direct edits to .htmlgraph/ files
|
|
370
|
-
# AI agents must use SDK, not direct file edits
|
|
371
|
-
# ============================================================
|
|
372
|
-
HTMLGRAPH_FILES=$(git diff --cached --name-only --diff-filter=ACMR | grep "^\\.htmlgraph/" || true)
|
|
373
|
-
|
|
374
|
-
if [ -n "$HTMLGRAPH_FILES" ]; then
|
|
375
|
-
echo ""
|
|
376
|
-
echo "ā BLOCKED: Direct edits to .htmlgraph/ files"
|
|
377
|
-
echo "āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā"
|
|
378
|
-
echo ""
|
|
379
|
-
echo "Modified files:"
|
|
380
|
-
echo "$HTMLGRAPH_FILES" | while read -r file; do
|
|
381
|
-
echo " - $file"
|
|
382
|
-
done
|
|
383
|
-
echo ""
|
|
384
|
-
echo "AI agents must use SDK, not direct file edits."
|
|
385
|
-
echo "See AGENTS.md line 3: 'AI agents must NEVER edit .htmlgraph/ HTML files directly'"
|
|
386
|
-
echo ""
|
|
387
|
-
echo "Use SDK instead:"
|
|
388
|
-
echo " from htmlgraph import SDK"
|
|
389
|
-
echo " sdk = SDK()"
|
|
390
|
-
echo " sdk.features.complete('feature-id') # Mark feature done"
|
|
391
|
-
echo " sdk.features.create('Title') # Create new feature"
|
|
392
|
-
echo ""
|
|
393
|
-
echo "Or CLI:"
|
|
394
|
-
echo " uv run htmlgraph feature complete <id>"
|
|
395
|
-
echo " uv run htmlgraph feature create 'Title'"
|
|
396
|
-
echo ""
|
|
397
|
-
echo "To bypass (NOT RECOMMENDED): git commit --no-verify"
|
|
398
|
-
echo "āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā"
|
|
399
|
-
echo ""
|
|
400
|
-
exit 1
|
|
401
|
-
fi
|
|
402
|
-
|
|
403
|
-
# ============================================================
|
|
404
|
-
# REMINDER CHECK: Feature tracking (non-blocking)
|
|
405
|
-
# ============================================================
|
|
406
|
-
# Check if reminder is disabled via config
|
|
407
|
-
if [ "$(git config --type=bool htmlgraph.precommit)" = "false" ]; then
|
|
408
|
-
exit 0
|
|
409
|
-
fi
|
|
410
|
-
|
|
411
|
-
# Fast check for in-progress features using grep (avoids Python startup)
|
|
412
|
-
ACTIVE_COUNT=$(find .htmlgraph/features -name "*.html" -exec grep -l 'data-status="in-progress"' {} \\; 2>/dev/null | wc -l | tr -d ' ')
|
|
413
|
-
|
|
414
|
-
# If we have active features and htmlgraph CLI is available, get details
|
|
415
|
-
if [ "$ACTIVE_COUNT" -gt 0 ] && command -v htmlgraph &> /dev/null; then
|
|
416
|
-
ACTIVE_FEATURES=$(htmlgraph feature list --status in-progress 2>/dev/null)
|
|
417
|
-
else
|
|
418
|
-
ACTIVE_FEATURES=""
|
|
419
|
-
fi
|
|
420
|
-
|
|
421
|
-
if [ "$ACTIVE_COUNT" -gt 0 ]; then
|
|
422
|
-
echo ""
|
|
423
|
-
echo "ā HtmlGraph: $ACTIVE_COUNT active feature(s)"
|
|
424
|
-
echo ""
|
|
425
|
-
echo "$ACTIVE_FEATURES"
|
|
426
|
-
echo ""
|
|
427
|
-
else
|
|
428
|
-
echo ""
|
|
429
|
-
echo "ā ļø HtmlGraph Feature Reminder"
|
|
430
|
-
echo "āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā"
|
|
431
|
-
echo "No active features found. Did you forget to start one?"
|
|
432
|
-
echo ""
|
|
433
|
-
echo "Quick decision:"
|
|
434
|
-
echo " ⢠>30 min work? ā Create feature"
|
|
435
|
-
echo " ⢠3+ files? ā Create feature"
|
|
436
|
-
echo " ⢠Simple fix? ā Direct commit OK"
|
|
437
|
-
echo ""
|
|
438
|
-
echo "To disable: git config htmlgraph.precommit false"
|
|
439
|
-
echo "āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā"
|
|
440
|
-
echo ""
|
|
441
|
-
fi
|
|
442
|
-
|
|
443
|
-
exit 0
|
|
444
|
-
"""
|
|
445
|
-
|
|
446
|
-
def ensure_hook_file(hook_name: str, hook_content: str) -> Path:
|
|
447
|
-
hook_dest = hooks_dir / f"{hook_name}.sh"
|
|
448
|
-
if not hook_dest.exists():
|
|
449
|
-
hook_dest.write_text(hook_content)
|
|
450
|
-
try:
|
|
451
|
-
hook_dest.chmod(0o755)
|
|
452
|
-
except Exception:
|
|
453
|
-
pass
|
|
454
|
-
return hook_dest
|
|
455
|
-
|
|
456
|
-
hook_files = {
|
|
457
|
-
"pre-commit": ensure_hook_file("pre-commit", pre_commit),
|
|
458
|
-
"post-commit": ensure_hook_file("post-commit", post_commit),
|
|
459
|
-
"post-checkout": ensure_hook_file("post-checkout", post_checkout),
|
|
460
|
-
"post-merge": ensure_hook_file("post-merge", post_merge),
|
|
461
|
-
"pre-push": ensure_hook_file("pre-push", pre_push),
|
|
462
|
-
}
|
|
463
|
-
|
|
464
|
-
# Generate documentation files from templates
|
|
465
|
-
if generate_docs:
|
|
466
|
-
|
|
467
|
-
def render_template(
|
|
468
|
-
template_path: Path, replacements: dict[str, str]
|
|
469
|
-
) -> str | None:
|
|
470
|
-
"""Render a template file with variable replacements."""
|
|
471
|
-
if not template_path.exists():
|
|
472
|
-
return None
|
|
473
|
-
content = template_path.read_text(encoding="utf-8")
|
|
474
|
-
for key, value in replacements.items():
|
|
475
|
-
content = content.replace(f"{{{{{key}}}}}", value)
|
|
476
|
-
return content
|
|
477
|
-
|
|
478
|
-
templates_dir = Path(__file__).parent / "templates"
|
|
479
|
-
project_dir = Path(args.dir)
|
|
480
|
-
|
|
481
|
-
# Get version
|
|
482
|
-
try:
|
|
483
|
-
from htmlgraph import __version__
|
|
484
|
-
|
|
485
|
-
version = __version__
|
|
486
|
-
except:
|
|
487
|
-
version = "unknown"
|
|
488
|
-
|
|
489
|
-
replacements = {
|
|
490
|
-
"PROJECT_NAME": project_name,
|
|
491
|
-
"AGENT_NAME": agent_name,
|
|
492
|
-
"VERSION": version,
|
|
493
|
-
}
|
|
494
|
-
|
|
495
|
-
# Generate AGENTS.md
|
|
496
|
-
agents_template = templates_dir / "AGENTS.md.template"
|
|
497
|
-
agents_dest = project_dir / "AGENTS.md"
|
|
498
|
-
if agents_template.exists() and not agents_dest.exists():
|
|
499
|
-
content = render_template(agents_template, replacements)
|
|
500
|
-
if content:
|
|
501
|
-
agents_dest.write_text(content, encoding="utf-8")
|
|
502
|
-
print(f"ā Generated: {agents_dest}")
|
|
503
|
-
|
|
504
|
-
# Generate CLAUDE.md
|
|
505
|
-
claude_template = templates_dir / "CLAUDE.md.template"
|
|
506
|
-
claude_dest = project_dir / "CLAUDE.md"
|
|
507
|
-
if claude_template.exists() and not claude_dest.exists():
|
|
508
|
-
content = render_template(claude_template, replacements)
|
|
509
|
-
if content:
|
|
510
|
-
claude_dest.write_text(content, encoding="utf-8")
|
|
511
|
-
print(f"ā Generated: {claude_dest}")
|
|
512
|
-
|
|
513
|
-
# Generate GEMINI.md
|
|
514
|
-
gemini_template = templates_dir / "GEMINI.md.template"
|
|
515
|
-
gemini_dest = project_dir / "GEMINI.md"
|
|
516
|
-
if gemini_template.exists() and not gemini_dest.exists():
|
|
517
|
-
content = render_template(gemini_template, replacements)
|
|
518
|
-
if content:
|
|
519
|
-
gemini_dest.write_text(content, encoding="utf-8")
|
|
520
|
-
print(f"ā Generated: {gemini_dest}")
|
|
521
|
-
|
|
522
|
-
print(f"\nInitialized HtmlGraph in {graph_dir}")
|
|
523
|
-
print(f"Collections: {', '.join(HtmlGraphAPIHandler.COLLECTIONS)}")
|
|
524
|
-
print("\nStart server with: htmlgraph serve")
|
|
525
|
-
if not args.no_index:
|
|
526
|
-
print(
|
|
527
|
-
f"Analytics cache: {graph_dir / 'index.sqlite'} (rebuildable; typically gitignored)"
|
|
528
|
-
)
|
|
529
|
-
print(f"Events: {events_dir}/ (append-only JSONL)")
|
|
530
|
-
|
|
531
|
-
# Install Git hooks if requested
|
|
532
|
-
if args.install_hooks:
|
|
533
|
-
git_dir = Path(args.dir) / ".git"
|
|
534
|
-
if not git_dir.exists():
|
|
535
|
-
print("\nā ļø Warning: No .git directory found. Git hooks not installed.")
|
|
536
|
-
print(" Initialize git first: git init")
|
|
537
|
-
return
|
|
538
|
-
|
|
539
|
-
def install_hook(
|
|
540
|
-
hook_name: str, hook_dest: Path, hook_content: str | None
|
|
541
|
-
) -> None:
|
|
542
|
-
"""
|
|
543
|
-
Install one Git hook:
|
|
544
|
-
- Ensure `.htmlgraph/hooks/<hook>.sh` exists (copy template if present; else inline)
|
|
545
|
-
- Install to `.git/hooks/<hook>` (symlink or chained wrapper if existing)
|
|
546
|
-
"""
|
|
547
|
-
# Try to copy a template from this repo layout (dev), otherwise inline.
|
|
548
|
-
hook_src = (
|
|
549
|
-
Path(__file__).parent.parent.parent.parent
|
|
550
|
-
/ ".htmlgraph"
|
|
551
|
-
/ "hooks"
|
|
552
|
-
/ f"{hook_name}.sh"
|
|
553
|
-
)
|
|
554
|
-
if hook_src.exists() and hook_src.resolve() != hook_dest.resolve():
|
|
555
|
-
shutil.copy(hook_src, hook_dest)
|
|
556
|
-
elif not hook_dest.exists():
|
|
557
|
-
if not hook_content:
|
|
558
|
-
raise RuntimeError(f"Missing hook content for {hook_name}")
|
|
559
|
-
hook_dest.write_text(hook_content)
|
|
560
|
-
# Ensure executable (covers the case where the file already existed)
|
|
561
|
-
try:
|
|
562
|
-
hook_dest.chmod(0o755)
|
|
563
|
-
except Exception:
|
|
564
|
-
pass
|
|
565
|
-
|
|
566
|
-
git_hook_path = git_dir / "hooks" / hook_name
|
|
567
|
-
|
|
568
|
-
if git_hook_path.exists():
|
|
569
|
-
print(f"\nā ļø Existing {hook_name} hook found")
|
|
570
|
-
backup_path = git_hook_path.with_suffix(".existing")
|
|
571
|
-
if not backup_path.exists():
|
|
572
|
-
shutil.copy(git_hook_path, backup_path)
|
|
573
|
-
print(f" Backed up to: {backup_path}")
|
|
574
|
-
|
|
575
|
-
chain_content = f'''#!/bin/bash
|
|
576
|
-
# Chained hook - runs existing hook then HtmlGraph hook
|
|
577
|
-
|
|
578
|
-
if [ -f "{backup_path}" ]; then
|
|
579
|
-
"{backup_path}" || exit $?
|
|
580
|
-
fi
|
|
581
|
-
|
|
582
|
-
if [ -f "{hook_dest}" ]; then
|
|
583
|
-
"{hook_dest}" || true
|
|
584
|
-
fi
|
|
585
|
-
'''
|
|
586
|
-
git_hook_path.write_text(chain_content)
|
|
587
|
-
git_hook_path.chmod(0o755)
|
|
588
|
-
print(f" Installed chained hook at: {git_hook_path}")
|
|
589
|
-
return
|
|
590
|
-
|
|
591
|
-
try:
|
|
592
|
-
git_hook_path.symlink_to(hook_dest.resolve())
|
|
593
|
-
print("\nā Git hooks installed")
|
|
594
|
-
print(f" {hook_name}: {git_hook_path} -> {hook_dest}")
|
|
595
|
-
except OSError:
|
|
596
|
-
shutil.copy(hook_dest, git_hook_path)
|
|
597
|
-
git_hook_path.chmod(0o755)
|
|
598
|
-
print("\nā Git hooks installed")
|
|
599
|
-
print(f" {hook_name}: {git_hook_path}")
|
|
600
|
-
|
|
601
|
-
install_hook("pre-commit", hook_files["pre-commit"], pre_commit)
|
|
602
|
-
install_hook("post-commit", hook_files["post-commit"], post_commit)
|
|
603
|
-
install_hook("post-checkout", hook_files["post-checkout"], post_checkout)
|
|
604
|
-
install_hook("post-merge", hook_files["post-merge"], post_merge)
|
|
605
|
-
install_hook("pre-push", hook_files["pre-push"], pre_push)
|
|
606
|
-
|
|
607
|
-
print("\nGit events will now be logged to HtmlGraph automatically.")
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
def cmd_install_hooks(args: argparse.Namespace) -> None:
|
|
611
|
-
"""Install Git hooks for automatic tracking."""
|
|
612
|
-
from pathlib import Path
|
|
613
|
-
|
|
614
|
-
from htmlgraph.hooks import AVAILABLE_HOOKS
|
|
615
|
-
from htmlgraph.hooks.installer import HookConfig, HookInstaller
|
|
616
|
-
|
|
617
|
-
project_dir = Path(args.project_dir).resolve()
|
|
618
|
-
|
|
619
|
-
# Load configuration
|
|
620
|
-
config_path = project_dir / ".htmlgraph" / "hooks-config.json"
|
|
621
|
-
config = HookConfig(config_path)
|
|
622
|
-
|
|
623
|
-
# Handle configuration changes
|
|
624
|
-
if args.enable:
|
|
625
|
-
if args.enable not in AVAILABLE_HOOKS:
|
|
626
|
-
print(f"Error: Unknown hook '{args.enable}'")
|
|
627
|
-
print(f"Available hooks: {', '.join(AVAILABLE_HOOKS)}")
|
|
628
|
-
return
|
|
629
|
-
config.enable_hook(args.enable)
|
|
630
|
-
config.save()
|
|
631
|
-
print(f"ā Enabled hook '{args.enable}' in configuration")
|
|
632
|
-
return
|
|
633
|
-
|
|
634
|
-
if args.disable:
|
|
635
|
-
if args.disable not in AVAILABLE_HOOKS:
|
|
636
|
-
print(f"Error: Unknown hook '{args.disable}'")
|
|
637
|
-
print(f"Available hooks: {', '.join(AVAILABLE_HOOKS)}")
|
|
638
|
-
return
|
|
639
|
-
config.disable_hook(args.disable)
|
|
640
|
-
config.save()
|
|
641
|
-
print(f"ā Disabled hook '{args.disable}' in configuration")
|
|
642
|
-
return
|
|
643
|
-
|
|
644
|
-
# Override symlink preference if --use-copy is set
|
|
645
|
-
if args.use_copy:
|
|
646
|
-
config.config["use_symlinks"] = False
|
|
647
|
-
|
|
648
|
-
# Create installer
|
|
649
|
-
installer = HookInstaller(project_dir, config)
|
|
650
|
-
|
|
651
|
-
# Validate environment
|
|
652
|
-
is_valid, error_msg = installer.validate_environment()
|
|
653
|
-
if not is_valid:
|
|
654
|
-
print(f"ā {error_msg}")
|
|
655
|
-
return
|
|
656
|
-
|
|
657
|
-
# List hooks status
|
|
658
|
-
if args.list:
|
|
659
|
-
print("\nGit Hooks Installation Status")
|
|
660
|
-
print("=" * 60)
|
|
661
|
-
|
|
662
|
-
status = installer.list_hooks()
|
|
663
|
-
for hook_name, info in status.items():
|
|
664
|
-
status_icon = "ā" if info["installed"] else "ā"
|
|
665
|
-
enabled_icon = "š¢" if info["enabled"] else "š“"
|
|
666
|
-
|
|
667
|
-
print(f"\n{enabled_icon} {hook_name} ({status_icon} installed)")
|
|
668
|
-
print(f" Enabled in config: {info['enabled']}")
|
|
669
|
-
print(f" Versioned (.htmlgraph/hooks/): {info['versioned']}")
|
|
670
|
-
print(f" Installed (.git/hooks/): {info['installed']}")
|
|
671
|
-
|
|
672
|
-
if info["is_symlink"]:
|
|
673
|
-
our_hook = "ā" if info.get("our_hook", False) else "ā"
|
|
674
|
-
print(f" Type: Symlink ({our_hook} ours)")
|
|
675
|
-
print(f" Target: {info.get('symlink_target', 'unknown')}")
|
|
676
|
-
elif info["installed"]:
|
|
677
|
-
print(" Type: Copied file")
|
|
678
|
-
|
|
679
|
-
print("\n" + "=" * 60)
|
|
680
|
-
print(f"\nConfiguration: {config_path}")
|
|
681
|
-
print("Use 'htmlgraph install-hooks --enable <hook>' to enable")
|
|
682
|
-
print("Use 'htmlgraph install-hooks --disable <hook>' to disable")
|
|
683
|
-
return
|
|
684
|
-
|
|
685
|
-
# Uninstall a hook
|
|
686
|
-
if args.uninstall:
|
|
687
|
-
if args.uninstall not in AVAILABLE_HOOKS:
|
|
688
|
-
print(f"Error: Unknown hook '{args.uninstall}'")
|
|
689
|
-
print(f"Available hooks: {', '.join(AVAILABLE_HOOKS)}")
|
|
690
|
-
return
|
|
691
|
-
|
|
692
|
-
success, message = installer.uninstall_hook(args.uninstall)
|
|
693
|
-
if success:
|
|
694
|
-
print(f"ā {message}")
|
|
695
|
-
else:
|
|
696
|
-
print(f"ā {message}")
|
|
697
|
-
return
|
|
698
|
-
|
|
699
|
-
# Install hooks
|
|
700
|
-
print("\nš§ Installing Git hooks for HtmlGraph\n")
|
|
701
|
-
print(f"Project: {project_dir}")
|
|
702
|
-
print(f"Configuration: {config_path}")
|
|
703
|
-
|
|
704
|
-
if args.dry_run:
|
|
705
|
-
print("\n[DRY RUN MODE - No changes will be made]\n")
|
|
706
|
-
|
|
707
|
-
results = installer.install_all_hooks(force=args.force, dry_run=args.dry_run)
|
|
708
|
-
|
|
709
|
-
# Display results
|
|
710
|
-
success_count = 0
|
|
711
|
-
failure_count = 0
|
|
712
|
-
|
|
713
|
-
for hook_name, (success, message) in results.items():
|
|
714
|
-
if success:
|
|
715
|
-
success_count += 1
|
|
716
|
-
print(f"ā {message}")
|
|
717
|
-
else:
|
|
718
|
-
failure_count += 1
|
|
719
|
-
print(f"ā {message}")
|
|
720
|
-
|
|
721
|
-
print("\n" + "=" * 60)
|
|
722
|
-
print(f"Summary: {success_count} installed, {failure_count} failed")
|
|
723
|
-
|
|
724
|
-
if not args.dry_run:
|
|
725
|
-
print(f"\nConfiguration saved to: {config_path}")
|
|
726
|
-
print("\nGit events will now be logged to HtmlGraph automatically.")
|
|
727
|
-
print("\nManagement commands:")
|
|
728
|
-
print(" htmlgraph install-hooks --list # Show status")
|
|
729
|
-
print(" htmlgraph install-hooks --uninstall <hook> # Remove hook")
|
|
730
|
-
print(" htmlgraph install-hooks --enable <hook> # Enable hook")
|
|
731
|
-
print(" htmlgraph install-hooks --disable <hook> # Disable hook")
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
def cmd_status(args: argparse.Namespace) -> None:
|
|
735
|
-
"""Show status of the graph."""
|
|
736
|
-
import json
|
|
737
|
-
from collections import Counter
|
|
738
|
-
|
|
739
|
-
from htmlgraph.sdk import SDK
|
|
740
|
-
|
|
741
|
-
# Use SDK to query all collections
|
|
742
|
-
sdk = SDK(directory=args.graph_dir)
|
|
743
|
-
|
|
744
|
-
total = 0
|
|
745
|
-
by_status: Counter[str] = Counter()
|
|
746
|
-
by_collection = {}
|
|
747
|
-
|
|
748
|
-
# All available collections
|
|
749
|
-
collections = [
|
|
750
|
-
"features",
|
|
751
|
-
"bugs",
|
|
752
|
-
"chores",
|
|
753
|
-
"spikes",
|
|
754
|
-
"epics",
|
|
755
|
-
"phases",
|
|
756
|
-
"sessions",
|
|
757
|
-
"tracks",
|
|
758
|
-
"agents",
|
|
759
|
-
]
|
|
760
|
-
|
|
761
|
-
for coll_name in collections:
|
|
762
|
-
coll = getattr(sdk, coll_name)
|
|
763
|
-
try:
|
|
764
|
-
nodes = coll.all()
|
|
765
|
-
count = len(nodes)
|
|
766
|
-
if count > 0:
|
|
767
|
-
by_collection[coll_name] = count
|
|
768
|
-
total += count
|
|
769
|
-
|
|
770
|
-
# Count by status
|
|
771
|
-
for node in nodes:
|
|
772
|
-
status = getattr(node, "status", "unknown")
|
|
773
|
-
by_status[status] += 1
|
|
774
|
-
except Exception:
|
|
775
|
-
# Collection might not exist yet
|
|
776
|
-
pass
|
|
777
|
-
|
|
778
|
-
# Output based on format flag
|
|
779
|
-
if args.format == "json":
|
|
780
|
-
response = create_json_response(
|
|
781
|
-
command="status",
|
|
782
|
-
data={
|
|
783
|
-
"total_nodes": total,
|
|
784
|
-
"by_collection": dict(sorted(by_collection.items())),
|
|
785
|
-
"by_status": dict(sorted(by_status.items())),
|
|
786
|
-
},
|
|
787
|
-
metadata={"graph_dir": args.graph_dir},
|
|
788
|
-
)
|
|
789
|
-
print(json.dumps(response, indent=2))
|
|
790
|
-
else:
|
|
791
|
-
# Text output (default)
|
|
792
|
-
if not args.quiet:
|
|
793
|
-
print(f"HtmlGraph Status: {args.graph_dir}")
|
|
794
|
-
print(f"{'=' * 40}")
|
|
795
|
-
|
|
796
|
-
print(f"Total nodes: {total}")
|
|
797
|
-
|
|
798
|
-
if not args.quiet:
|
|
799
|
-
print("\nBy Collection:")
|
|
800
|
-
for coll, count in sorted(by_collection.items()):
|
|
801
|
-
print(f" {coll}: {count}")
|
|
802
|
-
print("\nBy Status:")
|
|
803
|
-
for status, count in sorted(by_status.items()):
|
|
804
|
-
print(f" {status}: {count}")
|
|
805
|
-
|
|
806
|
-
# Verbose output
|
|
807
|
-
if args.verbose >= 1:
|
|
808
|
-
print("\n--- Verbose Details ---")
|
|
809
|
-
print(f"Graph directory: {args.graph_dir}")
|
|
810
|
-
print(f"Collections scanned: {len(collections)}")
|
|
811
|
-
print(f"Collections with data: {len(by_collection)}")
|
|
812
|
-
|
|
813
|
-
if args.verbose >= 2:
|
|
814
|
-
print("\nAll collections checked:")
|
|
815
|
-
for coll_name in collections:
|
|
816
|
-
count = by_collection.get(coll_name, 0)
|
|
817
|
-
marker = "ā" if count > 0 else "ā"
|
|
818
|
-
print(f" {marker} {coll_name}: {count}")
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
def cmd_debug(args: argparse.Namespace) -> None:
|
|
822
|
-
"""Show debugging resources and system diagnostics."""
|
|
823
|
-
import os
|
|
824
|
-
from pathlib import Path
|
|
825
|
-
|
|
826
|
-
from htmlgraph.sdk import SDK
|
|
827
|
-
|
|
828
|
-
print("š HtmlGraph Debugging Resources\n")
|
|
829
|
-
print("=" * 60)
|
|
830
|
-
|
|
831
|
-
# Documentation
|
|
832
|
-
print("\nš Documentation:")
|
|
833
|
-
print(" - DEBUGGING.md - Complete debugging guide")
|
|
834
|
-
print(" - AGENTS.md - SDK and agent documentation")
|
|
835
|
-
print(" - CLAUDE.md - Project workflow")
|
|
836
|
-
|
|
837
|
-
# Debugging Agents
|
|
838
|
-
print("\nš¤ Debugging Agents:")
|
|
839
|
-
agents_dir = Path("packages/claude-plugin/agents")
|
|
840
|
-
if agents_dir.exists():
|
|
841
|
-
print(f" - {agents_dir}/researcher.md")
|
|
842
|
-
print(f" - {agents_dir}/debugger.md")
|
|
843
|
-
print(f" - {agents_dir}/test-runner.md")
|
|
844
|
-
else:
|
|
845
|
-
print(" - researcher.md - Research documentation before implementing")
|
|
846
|
-
print(" - debugger.md - Systematic error analysis")
|
|
847
|
-
print(" - test-runner.md - Quality gates and validation")
|
|
848
|
-
|
|
849
|
-
# Diagnostic Commands
|
|
850
|
-
print("\nš ļø Diagnostic Commands:")
|
|
851
|
-
print(" htmlgraph status - Show current graph state")
|
|
852
|
-
print(" htmlgraph feature list - List all features")
|
|
853
|
-
print(" htmlgraph session list - List all sessions")
|
|
854
|
-
print(" htmlgraph analytics - Project analytics")
|
|
855
|
-
|
|
856
|
-
# Current System Status
|
|
857
|
-
print("\nš Current Status:")
|
|
858
|
-
print(f" Graph directory: {args.graph_dir}")
|
|
859
|
-
|
|
860
|
-
graph_path = Path(args.graph_dir)
|
|
861
|
-
if graph_path.exists():
|
|
862
|
-
print(" Status: ā
Initialized")
|
|
863
|
-
|
|
864
|
-
# Try to get quick stats
|
|
865
|
-
try:
|
|
866
|
-
sdk = SDK(directory=args.graph_dir)
|
|
867
|
-
|
|
868
|
-
# Count features
|
|
869
|
-
features = sdk.features.all()
|
|
870
|
-
print(f" Features: {len(features)}")
|
|
871
|
-
|
|
872
|
-
# Count sessions
|
|
873
|
-
sessions = sdk.sessions.all()
|
|
874
|
-
print(f" Sessions: {len(sessions)}")
|
|
875
|
-
|
|
876
|
-
# Count other collections
|
|
877
|
-
for coll_name in ["bugs", "chores", "spikes", "epics", "phases", "tracks"]:
|
|
878
|
-
try:
|
|
879
|
-
coll = getattr(sdk, coll_name)
|
|
880
|
-
nodes = coll.all()
|
|
881
|
-
if len(nodes) > 0:
|
|
882
|
-
print(f" {coll_name.capitalize()}: {len(nodes)}")
|
|
883
|
-
except Exception:
|
|
884
|
-
pass
|
|
885
|
-
|
|
886
|
-
except Exception as e:
|
|
887
|
-
print(f" Warning: Could not load graph data: {e}")
|
|
888
|
-
else:
|
|
889
|
-
print(" Status: ā ļø Not initialized")
|
|
890
|
-
print(" Run 'htmlgraph init' to create .htmlgraph directory")
|
|
891
|
-
|
|
892
|
-
# Environment Info
|
|
893
|
-
print("\nš§ Environment:")
|
|
894
|
-
print(f" Python: {sys.version.split()[0]}")
|
|
895
|
-
print(f" Working dir: {os.getcwd()}")
|
|
896
|
-
|
|
897
|
-
# Check for common files
|
|
898
|
-
print("\nš Project Files:")
|
|
899
|
-
for filename in ["pyproject.toml", "package.json", ".git", "README.md"]:
|
|
900
|
-
exists = "ā
" if Path(filename).exists() else "ā"
|
|
901
|
-
print(f" {exists} {filename}")
|
|
902
|
-
|
|
903
|
-
print("\n" + "=" * 60)
|
|
904
|
-
print("For more help: https://github.com/Shakes-tzd/htmlgraph")
|
|
905
|
-
print()
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
def cmd_query(args: argparse.Namespace) -> None:
|
|
909
|
-
"""Query nodes with CSS selector."""
|
|
910
|
-
import json
|
|
911
|
-
|
|
912
|
-
from htmlgraph.converter import node_to_dict
|
|
913
|
-
from htmlgraph.graph import HtmlGraph
|
|
914
|
-
|
|
915
|
-
graph_dir = Path(args.graph_dir)
|
|
916
|
-
if not graph_dir.exists():
|
|
917
|
-
print(f"Error: {graph_dir} not found.", file=sys.stderr)
|
|
918
|
-
sys.exit(1)
|
|
919
|
-
|
|
920
|
-
results = []
|
|
921
|
-
for collection_dir in graph_dir.iterdir():
|
|
922
|
-
if collection_dir.is_dir() and not collection_dir.name.startswith("."):
|
|
923
|
-
graph = HtmlGraph(collection_dir, auto_load=True)
|
|
924
|
-
for node in graph.query(args.selector):
|
|
925
|
-
data = node_to_dict(node)
|
|
926
|
-
data["_collection"] = collection_dir.name
|
|
927
|
-
results.append(data)
|
|
928
|
-
|
|
929
|
-
if args.format == "json":
|
|
930
|
-
print(json.dumps(results, indent=2, default=str))
|
|
931
|
-
else:
|
|
932
|
-
for result in results: # type: dict[str, Any]
|
|
933
|
-
status = result.get("status", "?")
|
|
934
|
-
priority = result.get("priority", "?")
|
|
935
|
-
print(
|
|
936
|
-
f"[{result['_collection']}] {result['id']}: {result['title']} ({status}, {priority})"
|
|
937
|
-
)
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
# =============================================================================
|
|
941
|
-
# Session Management Commands
|
|
942
|
-
# =============================================================================
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
def cmd_session_start(args: argparse.Namespace) -> None:
|
|
946
|
-
"""Start a new session."""
|
|
947
|
-
import json
|
|
948
|
-
|
|
949
|
-
from htmlgraph.sdk import SDK
|
|
950
|
-
|
|
951
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
952
|
-
session = sdk.start_session(session_id=args.id, title=args.title, agent=args.agent)
|
|
953
|
-
|
|
954
|
-
if args.format == "json":
|
|
955
|
-
from htmlgraph.converter import session_to_dict
|
|
956
|
-
|
|
957
|
-
print(json.dumps(session_to_dict(session), indent=2))
|
|
958
|
-
else:
|
|
959
|
-
print(f"Session started: {session.id}")
|
|
960
|
-
print(f" Agent: {session.agent}")
|
|
961
|
-
print(f" Started: {session.started_at.isoformat()}")
|
|
962
|
-
if session.title:
|
|
963
|
-
print(f" Title: {session.title}")
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
def cmd_session_end(args: argparse.Namespace) -> None:
|
|
967
|
-
"""End a session."""
|
|
968
|
-
import json
|
|
969
|
-
|
|
970
|
-
from htmlgraph.sdk import SDK
|
|
971
|
-
|
|
972
|
-
sdk = SDK(directory=args.graph_dir)
|
|
973
|
-
blockers = args.blocker if args.blocker else None
|
|
974
|
-
session = sdk.end_session(
|
|
975
|
-
args.id,
|
|
976
|
-
handoff_notes=args.notes,
|
|
977
|
-
recommended_next=args.recommend,
|
|
978
|
-
blockers=blockers,
|
|
979
|
-
)
|
|
980
|
-
|
|
981
|
-
if session is None:
|
|
982
|
-
print(f"Error: Session '{args.id}' not found.", file=sys.stderr)
|
|
983
|
-
sys.exit(1)
|
|
984
|
-
|
|
985
|
-
if args.format == "json":
|
|
986
|
-
from htmlgraph.converter import session_to_dict
|
|
987
|
-
|
|
988
|
-
print(json.dumps(session_to_dict(session), indent=2))
|
|
989
|
-
else:
|
|
990
|
-
print(f"Session ended: {session.id}")
|
|
991
|
-
print(f" Duration: {session.ended_at - session.started_at}")
|
|
992
|
-
print(f" Events: {session.event_count}")
|
|
993
|
-
if session.worked_on:
|
|
994
|
-
print(f" Worked on: {', '.join(session.worked_on)}")
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
def cmd_session_handoff(args: argparse.Namespace) -> None:
|
|
998
|
-
"""Set or show session handoff context."""
|
|
999
|
-
import json
|
|
1000
|
-
|
|
1001
|
-
from htmlgraph.sdk import SDK
|
|
1002
|
-
|
|
1003
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
1004
|
-
|
|
1005
|
-
if args.show:
|
|
1006
|
-
# For showing, we might still need direct manager access or add more methods to SDK
|
|
1007
|
-
# But for now, let's keep using SessionManager logic via SDK property if needed
|
|
1008
|
-
# or implement show logic here using SDK collections
|
|
1009
|
-
|
|
1010
|
-
# Use session_manager.get_session() to get Session objects (not Node)
|
|
1011
|
-
if args.session_id:
|
|
1012
|
-
session = sdk.session_manager.get_session(args.session_id)
|
|
1013
|
-
else:
|
|
1014
|
-
# Need "last ended session" - SDK doesn't expose this yet.
|
|
1015
|
-
# Fallback to session_manager logic exposed on SDK
|
|
1016
|
-
session = sdk.session_manager.get_last_ended_session(agent=args.agent)
|
|
1017
|
-
|
|
1018
|
-
if not session:
|
|
1019
|
-
if args.format == "json":
|
|
1020
|
-
print(json.dumps({}))
|
|
1021
|
-
else:
|
|
1022
|
-
print("No handoff context found.")
|
|
1023
|
-
return
|
|
1024
|
-
|
|
1025
|
-
if args.format == "json":
|
|
1026
|
-
from htmlgraph.converter import session_to_dict
|
|
1027
|
-
|
|
1028
|
-
print(json.dumps(session_to_dict(session), indent=2))
|
|
1029
|
-
else:
|
|
1030
|
-
print(f"Session: {session.id}")
|
|
1031
|
-
if session.handoff_notes:
|
|
1032
|
-
print(f"Notes: {session.handoff_notes}")
|
|
1033
|
-
if session.recommended_next:
|
|
1034
|
-
print(f"Recommended next: {session.recommended_next}")
|
|
1035
|
-
if session.blockers:
|
|
1036
|
-
print(f"Blockers: {', '.join(session.blockers)}")
|
|
1037
|
-
return
|
|
1038
|
-
|
|
1039
|
-
# Setting handoff
|
|
1040
|
-
if not (args.notes or args.recommend or args.blocker):
|
|
1041
|
-
print(
|
|
1042
|
-
"Error: Provide --notes, --recommend, or --blocker (or use --show).",
|
|
1043
|
-
file=sys.stderr,
|
|
1044
|
-
)
|
|
1045
|
-
sys.exit(1)
|
|
1046
|
-
|
|
1047
|
-
handoff_result = sdk.set_session_handoff(
|
|
1048
|
-
session_id=args.session_id, # Optional, defaults to active
|
|
1049
|
-
handoff_notes=args.notes,
|
|
1050
|
-
recommended_next=args.recommend,
|
|
1051
|
-
blockers=args.blocker if args.blocker else None,
|
|
1052
|
-
)
|
|
1053
|
-
|
|
1054
|
-
if handoff_result is None:
|
|
1055
|
-
if args.session_id:
|
|
1056
|
-
print(f"Error: Session '{args.session_id}' not found.", file=sys.stderr)
|
|
1057
|
-
else:
|
|
1058
|
-
print(
|
|
1059
|
-
"Error: No active session found. Provide --session-id.",
|
|
1060
|
-
file=sys.stderr,
|
|
1061
|
-
)
|
|
1062
|
-
sys.exit(1)
|
|
1063
|
-
|
|
1064
|
-
if args.format == "json":
|
|
1065
|
-
print(json.dumps(handoff_result, indent=2))
|
|
1066
|
-
else:
|
|
1067
|
-
print(f"Session handoff updated: {handoff_result.get('id', 'unknown')}")
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
def cmd_session_list(args: argparse.Namespace) -> None:
|
|
1071
|
-
"""List all sessions."""
|
|
1072
|
-
import json
|
|
1073
|
-
|
|
1074
|
-
from htmlgraph.converter import SessionConverter
|
|
1075
|
-
|
|
1076
|
-
sessions_dir = Path(args.graph_dir) / "sessions"
|
|
1077
|
-
if not sessions_dir.exists():
|
|
1078
|
-
print("No sessions found.")
|
|
1079
|
-
return
|
|
1080
|
-
|
|
1081
|
-
converter = SessionConverter(sessions_dir)
|
|
1082
|
-
sessions = converter.load_all()
|
|
1083
|
-
|
|
1084
|
-
# Sort by started_at descending (handle mixed tz-aware/naive datetimes)
|
|
1085
|
-
def sort_key(s: Any) -> Any:
|
|
1086
|
-
ts = s.started_at
|
|
1087
|
-
# Make naive datetimes comparable by assuming UTC
|
|
1088
|
-
if ts.tzinfo is None:
|
|
1089
|
-
return ts.replace(tzinfo=None)
|
|
1090
|
-
return ts.replace(tzinfo=None) # Compare as naive for sorting
|
|
1091
|
-
|
|
1092
|
-
sessions.sort(key=sort_key, reverse=True)
|
|
1093
|
-
|
|
1094
|
-
if args.format == "json":
|
|
1095
|
-
from htmlgraph.converter import session_to_dict
|
|
1096
|
-
|
|
1097
|
-
print(json.dumps([session_to_dict(s) for s in sessions], indent=2))
|
|
1098
|
-
else:
|
|
1099
|
-
if not sessions:
|
|
1100
|
-
print("No sessions found.")
|
|
1101
|
-
return
|
|
1102
|
-
|
|
1103
|
-
print(f"{'ID':<30} {'Status':<10} {'Agent':<15} {'Events':<8} {'Started'}")
|
|
1104
|
-
print("=" * 90)
|
|
1105
|
-
for session in sessions:
|
|
1106
|
-
started = session.started_at.strftime("%Y-%m-%d %H:%M")
|
|
1107
|
-
print(
|
|
1108
|
-
f"{session.id:<30} {session.status:<10} {session.agent:<15} {session.event_count:<8} {started}"
|
|
1109
|
-
)
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
def cmd_session_start_info(args: argparse.Namespace) -> None:
|
|
1113
|
-
"""Get comprehensive session start information (optimized for AI agents)."""
|
|
1114
|
-
import json
|
|
1115
|
-
|
|
1116
|
-
from htmlgraph.sdk import SDK
|
|
1117
|
-
|
|
1118
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
1119
|
-
|
|
1120
|
-
info = sdk.get_session_start_info(
|
|
1121
|
-
include_git_log=not args.no_git,
|
|
1122
|
-
git_log_count=args.git_count,
|
|
1123
|
-
analytics_top_n=args.top_n,
|
|
1124
|
-
analytics_max_agents=args.max_agents,
|
|
1125
|
-
)
|
|
1126
|
-
|
|
1127
|
-
if args.format == "json":
|
|
1128
|
-
print(json.dumps(info, indent=2, default=str))
|
|
1129
|
-
else:
|
|
1130
|
-
# Human-readable format
|
|
1131
|
-
status = info["status"]
|
|
1132
|
-
print("=" * 80)
|
|
1133
|
-
print("SESSION START INFO")
|
|
1134
|
-
print("=" * 80)
|
|
1135
|
-
|
|
1136
|
-
# Project status
|
|
1137
|
-
print(f"\nProject: {status.get('project_name', 'HtmlGraph')}")
|
|
1138
|
-
print(f"Total nodes: {status.get('total_nodes', 0)}")
|
|
1139
|
-
print(f"In progress: {status.get('in_progress_count', 0)}")
|
|
1140
|
-
print(f"Completed: {status.get('done_count', 0)}")
|
|
1141
|
-
|
|
1142
|
-
# Active work item (validation status)
|
|
1143
|
-
active_work = info.get("active_work")
|
|
1144
|
-
print("\nACTIVE WORK:")
|
|
1145
|
-
if active_work:
|
|
1146
|
-
# Determine type symbol
|
|
1147
|
-
type_symbol = {
|
|
1148
|
-
"feature": "āØ",
|
|
1149
|
-
"bug": "š",
|
|
1150
|
-
"spike": "š",
|
|
1151
|
-
"chore": "š§",
|
|
1152
|
-
"epic": "šÆ",
|
|
1153
|
-
}.get(active_work.get("type"), "š")
|
|
1154
|
-
|
|
1155
|
-
# Build progress info
|
|
1156
|
-
steps_total = active_work.get("steps_total", 0)
|
|
1157
|
-
steps_completed = active_work.get("steps_completed", 0)
|
|
1158
|
-
progress_str = (
|
|
1159
|
-
f"({steps_completed}/{steps_total} steps)" if steps_total > 0 else ""
|
|
1160
|
-
)
|
|
1161
|
-
|
|
1162
|
-
# Check if auto-spike
|
|
1163
|
-
auto_spike_info = ""
|
|
1164
|
-
if active_work.get("type") == "spike" and active_work.get("auto_generated"):
|
|
1165
|
-
spike_subtype = active_work.get("spike_subtype", "unknown")
|
|
1166
|
-
auto_spike_info = f" [AUTO-{spike_subtype.upper()}]"
|
|
1167
|
-
|
|
1168
|
-
print(
|
|
1169
|
-
f" {type_symbol} {active_work['id']}: {active_work['title']} {progress_str}{auto_spike_info}"
|
|
1170
|
-
)
|
|
1171
|
-
else:
|
|
1172
|
-
print(" ā ļø No active work item")
|
|
1173
|
-
print(" Code changes will be blocked until you assign work.")
|
|
1174
|
-
print(' Create a feature: uv run htmlgraph feature create "Title"')
|
|
1175
|
-
|
|
1176
|
-
# Active features
|
|
1177
|
-
active_features = [f for f in info["features"] if f["status"] == "in-progress"]
|
|
1178
|
-
if active_features:
|
|
1179
|
-
print(f"\nACTIVE FEATURES ({len(active_features)}):")
|
|
1180
|
-
for feat in active_features:
|
|
1181
|
-
progress = (
|
|
1182
|
-
f"{feat['steps_completed']}/{feat['steps_total']}"
|
|
1183
|
-
if feat["steps_total"] > 0
|
|
1184
|
-
else "no steps"
|
|
1185
|
-
)
|
|
1186
|
-
print(f" - {feat['id']}: {feat['title']} ({progress})")
|
|
1187
|
-
|
|
1188
|
-
# Recent sessions
|
|
1189
|
-
recent_sessions = info["sessions"][:5]
|
|
1190
|
-
if recent_sessions:
|
|
1191
|
-
print(f"\nRECENT SESSIONS ({len(recent_sessions)}):")
|
|
1192
|
-
for sess in recent_sessions:
|
|
1193
|
-
print(
|
|
1194
|
-
f" - {sess['id']}: {sess['agent']} ({sess['event_count']} events)"
|
|
1195
|
-
)
|
|
1196
|
-
|
|
1197
|
-
# Git log
|
|
1198
|
-
if info.get("git_log"):
|
|
1199
|
-
print("\nRECENT COMMITS:")
|
|
1200
|
-
for commit in info["git_log"]:
|
|
1201
|
-
print(f" {commit}")
|
|
1202
|
-
|
|
1203
|
-
# Analytics
|
|
1204
|
-
analytics = info["analytics"]
|
|
1205
|
-
|
|
1206
|
-
# Bottlenecks
|
|
1207
|
-
bottlenecks = analytics.get("bottlenecks", [])
|
|
1208
|
-
if bottlenecks:
|
|
1209
|
-
print(f"\nBOTTLENECKS ({len(bottlenecks)}):")
|
|
1210
|
-
for bn in bottlenecks:
|
|
1211
|
-
print(
|
|
1212
|
-
f" - {bn['title']} (blocks {bn['blocks_count']} tasks, impact: {bn['impact_score']:.1f})"
|
|
1213
|
-
)
|
|
1214
|
-
|
|
1215
|
-
# Recommendations
|
|
1216
|
-
recommendations = analytics.get("recommendations", [])
|
|
1217
|
-
if recommendations:
|
|
1218
|
-
print("\nRECOMMENDATIONS:")
|
|
1219
|
-
for rec in recommendations[:3]:
|
|
1220
|
-
reasons_str = ", ".join(rec["reasons"][:2])
|
|
1221
|
-
print(f" - {rec['title']} (score: {rec['score']:.1f})")
|
|
1222
|
-
print(f" Why: {reasons_str}")
|
|
1223
|
-
if rec.get("unlocks_count", 0) > 0:
|
|
1224
|
-
print(f" Unlocks: {rec['unlocks_count']} tasks")
|
|
1225
|
-
|
|
1226
|
-
# Parallel capacity
|
|
1227
|
-
parallel = analytics.get("parallel", {})
|
|
1228
|
-
if parallel:
|
|
1229
|
-
print("\nPARALLEL CAPACITY:")
|
|
1230
|
-
print(f" Max parallelism: {parallel.get('max_parallelism', 0)}")
|
|
1231
|
-
print(f" Ready now: {parallel.get('ready_now', 0)}")
|
|
1232
|
-
print(f" Total ready: {parallel.get('total_ready', 0)}")
|
|
1233
|
-
|
|
1234
|
-
print("\n" + "=" * 80)
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
def cmd_session_status_report(args: argparse.Namespace) -> None:
|
|
1238
|
-
"""Print a comprehensive status report (Markdown)."""
|
|
1239
|
-
import subprocess
|
|
1240
|
-
|
|
1241
|
-
from htmlgraph.sdk import SDK
|
|
1242
|
-
|
|
1243
|
-
sdk = SDK(directory=args.graph_dir)
|
|
1244
|
-
status = sdk.get_status()
|
|
1245
|
-
|
|
1246
|
-
# Git log
|
|
1247
|
-
try:
|
|
1248
|
-
git_log = subprocess.check_output(
|
|
1249
|
-
["git", "log", "--oneline", "-n", "3"], text=True, stderr=subprocess.DEVNULL
|
|
1250
|
-
).strip()
|
|
1251
|
-
except Exception:
|
|
1252
|
-
git_log = "(Git log unavailable)"
|
|
1253
|
-
|
|
1254
|
-
# Active features detail
|
|
1255
|
-
active_features_text = ""
|
|
1256
|
-
if status["active_features"]:
|
|
1257
|
-
active_features_text = "\n### Current Feature(s)\n"
|
|
1258
|
-
for fid in status["active_features"]:
|
|
1259
|
-
# Use SDK to get nodes
|
|
1260
|
-
node = sdk.features.get(fid) or sdk.bugs.get(fid)
|
|
1261
|
-
if node:
|
|
1262
|
-
active_features_text += f"**Working On:** {node.title} ({node.id})\n"
|
|
1263
|
-
active_features_text += f"**Status:** {node.status}\n"
|
|
1264
|
-
if node.steps:
|
|
1265
|
-
active_features_text += "**Step Progress**\n"
|
|
1266
|
-
for step in node.steps:
|
|
1267
|
-
mark = "[x]" if step.completed else "[ ]"
|
|
1268
|
-
active_features_text += f"- {mark} {step.description}\n"
|
|
1269
|
-
active_features_text += "\n"
|
|
1270
|
-
else:
|
|
1271
|
-
active_features_text = "\n### Current Feature(s)\nNo active features. Start one with `htmlgraph feature start <id>`.\n"
|
|
1272
|
-
|
|
1273
|
-
# Project Name (from directory)
|
|
1274
|
-
project_name = Path(args.graph_dir).resolve().parent.name
|
|
1275
|
-
|
|
1276
|
-
completed = status["by_status"].get("done", 0)
|
|
1277
|
-
total = status["total_features"]
|
|
1278
|
-
pct = int(completed / max(1, total) * 100)
|
|
1279
|
-
|
|
1280
|
-
print(f"""## Session Status
|
|
1281
|
-
|
|
1282
|
-
**Project:** {project_name}
|
|
1283
|
-
**Progress:** {completed}/{total} features ({pct}%)
|
|
1284
|
-
**Active Features (WIP):** {status["wip_count"]}
|
|
1285
|
-
|
|
1286
|
-
---
|
|
1287
|
-
{active_features_text}---
|
|
1288
|
-
|
|
1289
|
-
### Recent Commits
|
|
1290
|
-
{git_log}
|
|
1291
|
-
|
|
1292
|
-
---
|
|
1293
|
-
|
|
1294
|
-
### What's Next
|
|
1295
|
-
Use `htmlgraph feature list --status todo` to see backlog.
|
|
1296
|
-
""")
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
def cmd_session_dedupe(args: argparse.Namespace) -> None:
|
|
1300
|
-
"""Move low-signal session files out of the main sessions directory."""
|
|
1301
|
-
from htmlgraph import SDK
|
|
1302
|
-
|
|
1303
|
-
sdk = SDK(directory=args.graph_dir)
|
|
1304
|
-
result = sdk.dedupe_sessions(
|
|
1305
|
-
max_events=args.max_events,
|
|
1306
|
-
move_dir_name=args.move_dir,
|
|
1307
|
-
dry_run=args.dry_run,
|
|
1308
|
-
stale_extra_active=not args.no_stale_active,
|
|
1309
|
-
)
|
|
1310
|
-
|
|
1311
|
-
print(f"Scanned: {result['scanned']}")
|
|
1312
|
-
print(f"Moved: {result['moved']}")
|
|
1313
|
-
if result.get("missing"):
|
|
1314
|
-
print(f"Missing: {result['missing']}")
|
|
1315
|
-
if not args.dry_run:
|
|
1316
|
-
if result.get("staled_active"):
|
|
1317
|
-
print(f"Staled: {result['staled_active']} extra active sessions")
|
|
1318
|
-
if result.get("kept_active"):
|
|
1319
|
-
print(f"Kept: {result['kept_active']} canonical active sessions")
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
def cmd_session_link(args: argparse.Namespace) -> None:
|
|
1323
|
-
"""Link a feature to a session retroactively."""
|
|
1324
|
-
import json
|
|
1325
|
-
|
|
1326
|
-
from htmlgraph.graph import HtmlGraph
|
|
1327
|
-
from htmlgraph.models import Edge
|
|
1328
|
-
|
|
1329
|
-
graph_dir = Path(args.graph_dir)
|
|
1330
|
-
sessions_dir = graph_dir / "sessions"
|
|
1331
|
-
feature_dir = graph_dir / args.collection
|
|
1332
|
-
|
|
1333
|
-
# Load session
|
|
1334
|
-
session_file = sessions_dir / f"{args.session_id}.html"
|
|
1335
|
-
if not session_file.exists():
|
|
1336
|
-
print(
|
|
1337
|
-
f"Error: Session '{args.session_id}' not found at {session_file}",
|
|
1338
|
-
file=sys.stderr,
|
|
1339
|
-
)
|
|
1340
|
-
sys.exit(1)
|
|
1341
|
-
|
|
1342
|
-
session_graph = HtmlGraph(sessions_dir)
|
|
1343
|
-
session = session_graph.get(args.session_id)
|
|
1344
|
-
if not session:
|
|
1345
|
-
print(f"Error: Failed to load session '{args.session_id}'", file=sys.stderr)
|
|
1346
|
-
sys.exit(1)
|
|
1347
|
-
|
|
1348
|
-
# Load feature
|
|
1349
|
-
feature_file = feature_dir / f"{args.feature_id}.html"
|
|
1350
|
-
if not feature_file.exists():
|
|
1351
|
-
print(
|
|
1352
|
-
f"Error: Feature '{args.feature_id}' not found at {feature_file}",
|
|
1353
|
-
file=sys.stderr,
|
|
1354
|
-
)
|
|
1355
|
-
sys.exit(1)
|
|
1356
|
-
|
|
1357
|
-
feature_graph = HtmlGraph(feature_dir)
|
|
1358
|
-
feature = feature_graph.get(args.feature_id)
|
|
1359
|
-
if not feature:
|
|
1360
|
-
print(f"Error: Failed to load feature '{args.feature_id}'", file=sys.stderr)
|
|
1361
|
-
sys.exit(1)
|
|
1362
|
-
|
|
1363
|
-
# Check if already linked
|
|
1364
|
-
worked_on = session.edges.get("worked-on", [])
|
|
1365
|
-
already_linked = any(e.target_id == args.feature_id for e in worked_on)
|
|
1366
|
-
|
|
1367
|
-
if already_linked:
|
|
1368
|
-
print(
|
|
1369
|
-
f"Feature '{args.feature_id}' is already linked to session '{args.session_id}'"
|
|
1370
|
-
)
|
|
1371
|
-
if not args.bidirectional:
|
|
1372
|
-
sys.exit(0)
|
|
1373
|
-
|
|
1374
|
-
# Add edge from session to feature
|
|
1375
|
-
if not already_linked:
|
|
1376
|
-
new_edge = Edge(
|
|
1377
|
-
target_id=args.feature_id, relationship="worked-on", title=feature.title
|
|
1378
|
-
)
|
|
1379
|
-
if "worked-on" not in session.edges:
|
|
1380
|
-
session.edges["worked-on"] = []
|
|
1381
|
-
session.edges["worked-on"].append(new_edge)
|
|
1382
|
-
session_graph.update(session)
|
|
1383
|
-
print(f"ā Linked feature '{args.feature_id}' to session '{args.session_id}'")
|
|
1384
|
-
|
|
1385
|
-
# Optionally add reciprocal edge from feature to session
|
|
1386
|
-
if args.bidirectional:
|
|
1387
|
-
implemented_in = feature.edges.get("implemented-in", [])
|
|
1388
|
-
feature_already_linked = any(
|
|
1389
|
-
e.target_id == args.session_id for e in implemented_in
|
|
1390
|
-
)
|
|
1391
|
-
|
|
1392
|
-
if not feature_already_linked:
|
|
1393
|
-
reciprocal_edge = Edge(
|
|
1394
|
-
target_id=args.session_id,
|
|
1395
|
-
relationship="implemented-in",
|
|
1396
|
-
title=f"Session {session.id}",
|
|
1397
|
-
)
|
|
1398
|
-
if "implemented-in" not in feature.edges:
|
|
1399
|
-
feature.edges["implemented-in"] = []
|
|
1400
|
-
feature.edges["implemented-in"].append(reciprocal_edge)
|
|
1401
|
-
feature_graph.update(feature)
|
|
1402
|
-
print(
|
|
1403
|
-
f"ā Added reciprocal link from feature '{args.feature_id}' to session '{args.session_id}'"
|
|
1404
|
-
)
|
|
1405
|
-
else:
|
|
1406
|
-
print(f"Feature '{args.feature_id}' already has reciprocal link to session")
|
|
1407
|
-
|
|
1408
|
-
if args.format == "json":
|
|
1409
|
-
result = {
|
|
1410
|
-
"session_id": args.session_id,
|
|
1411
|
-
"feature_id": args.feature_id,
|
|
1412
|
-
"bidirectional": args.bidirectional,
|
|
1413
|
-
"linked": not already_linked,
|
|
1414
|
-
}
|
|
1415
|
-
print(json.dumps(result, indent=2))
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
def cmd_session_validate_attribution(args: argparse.Namespace) -> None:
|
|
1419
|
-
"""Validate feature attribution and tracking."""
|
|
1420
|
-
import json
|
|
1421
|
-
from datetime import datetime
|
|
1422
|
-
|
|
1423
|
-
from htmlgraph.graph import HtmlGraph
|
|
1424
|
-
|
|
1425
|
-
graph_dir = Path(args.graph_dir)
|
|
1426
|
-
feature_dir = graph_dir / args.collection
|
|
1427
|
-
sessions_dir = graph_dir / "sessions"
|
|
1428
|
-
events_dir = graph_dir / "events"
|
|
1429
|
-
|
|
1430
|
-
# Load feature
|
|
1431
|
-
feature_graph = HtmlGraph(feature_dir)
|
|
1432
|
-
feature = feature_graph.get(args.feature_id)
|
|
1433
|
-
if not feature:
|
|
1434
|
-
print(f"Error: Feature '{args.feature_id}' not found", file=sys.stderr)
|
|
1435
|
-
sys.exit(1)
|
|
1436
|
-
|
|
1437
|
-
# Find sessions that worked on this feature
|
|
1438
|
-
sessions_graph = HtmlGraph(sessions_dir)
|
|
1439
|
-
all_sessions = sessions_graph.query('[data-type="session"]')
|
|
1440
|
-
linked_sessions = []
|
|
1441
|
-
|
|
1442
|
-
for session in all_sessions:
|
|
1443
|
-
worked_on = session.edges.get("worked-on", [])
|
|
1444
|
-
if any(e.target_id == args.feature_id for e in worked_on):
|
|
1445
|
-
linked_sessions.append(session)
|
|
1446
|
-
|
|
1447
|
-
# Count events attributed to this feature
|
|
1448
|
-
event_count = 0
|
|
1449
|
-
last_activity = None
|
|
1450
|
-
high_drift_events = []
|
|
1451
|
-
|
|
1452
|
-
for session in linked_sessions:
|
|
1453
|
-
session_events_file = events_dir / f"{session.id}.jsonl"
|
|
1454
|
-
if session_events_file.exists():
|
|
1455
|
-
with open(session_events_file) as f:
|
|
1456
|
-
for line in f:
|
|
1457
|
-
try:
|
|
1458
|
-
event = json.loads(line.strip())
|
|
1459
|
-
if event.get("feature_id") == args.feature_id:
|
|
1460
|
-
event_count += 1
|
|
1461
|
-
timestamp = event.get("timestamp")
|
|
1462
|
-
if timestamp:
|
|
1463
|
-
event_time = datetime.fromisoformat(
|
|
1464
|
-
timestamp.replace("Z", "+00:00")
|
|
1465
|
-
)
|
|
1466
|
-
if not last_activity or event_time > last_activity:
|
|
1467
|
-
last_activity = event_time
|
|
1468
|
-
|
|
1469
|
-
# Check for high drift
|
|
1470
|
-
drift_score = event.get("drift_score")
|
|
1471
|
-
if drift_score and drift_score > 0.8:
|
|
1472
|
-
high_drift_events.append(
|
|
1473
|
-
{
|
|
1474
|
-
"timestamp": timestamp,
|
|
1475
|
-
"tool": event.get("tool"),
|
|
1476
|
-
"drift": drift_score,
|
|
1477
|
-
}
|
|
1478
|
-
)
|
|
1479
|
-
except json.JSONDecodeError:
|
|
1480
|
-
continue
|
|
1481
|
-
|
|
1482
|
-
# Calculate attribution health
|
|
1483
|
-
health = "UNKNOWN"
|
|
1484
|
-
issues = []
|
|
1485
|
-
|
|
1486
|
-
if len(linked_sessions) == 0:
|
|
1487
|
-
health = "CRITICAL"
|
|
1488
|
-
issues.append("Feature not linked to any session")
|
|
1489
|
-
elif event_count == 0:
|
|
1490
|
-
health = "CRITICAL"
|
|
1491
|
-
issues.append("No events attributed to feature")
|
|
1492
|
-
elif event_count < 5:
|
|
1493
|
-
health = "WARNING"
|
|
1494
|
-
issues.append(f"Only {event_count} events attributed (unusually low)")
|
|
1495
|
-
else:
|
|
1496
|
-
health = "GOOD"
|
|
1497
|
-
|
|
1498
|
-
if len(high_drift_events) > 3:
|
|
1499
|
-
if health == "GOOD":
|
|
1500
|
-
health = "WARNING"
|
|
1501
|
-
issues.append(
|
|
1502
|
-
f"{len(high_drift_events)} events with drift > 0.8 (may be misattributed)"
|
|
1503
|
-
)
|
|
1504
|
-
|
|
1505
|
-
# Output results
|
|
1506
|
-
if args.format == "json":
|
|
1507
|
-
result = {
|
|
1508
|
-
"feature_id": args.feature_id,
|
|
1509
|
-
"feature_title": feature.title,
|
|
1510
|
-
"health": health,
|
|
1511
|
-
"linked_sessions": len(linked_sessions),
|
|
1512
|
-
"event_count": event_count,
|
|
1513
|
-
"last_activity": last_activity.isoformat() if last_activity else None,
|
|
1514
|
-
"high_drift_count": len(high_drift_events),
|
|
1515
|
-
"issues": issues,
|
|
1516
|
-
}
|
|
1517
|
-
print(json.dumps(result, indent=2))
|
|
1518
|
-
else:
|
|
1519
|
-
status_symbol = "ā" if health == "GOOD" else "ā " if health == "WARNING" else "ā"
|
|
1520
|
-
print(f"{status_symbol} Feature '{args.feature_id}' validation:")
|
|
1521
|
-
print(f" Title: {feature.title}")
|
|
1522
|
-
print(f" Health: {health}")
|
|
1523
|
-
print(f" - Linked to {len(linked_sessions)} session(s)")
|
|
1524
|
-
print(f" - {event_count} events attributed")
|
|
1525
|
-
if last_activity:
|
|
1526
|
-
print(f" - Last activity: {last_activity.strftime('%Y-%m-%d %H:%M:%S')}")
|
|
1527
|
-
|
|
1528
|
-
if issues:
|
|
1529
|
-
print("\nā Issues detected:")
|
|
1530
|
-
for issue in issues:
|
|
1531
|
-
print(f" - {issue}")
|
|
1532
|
-
|
|
1533
|
-
if len(high_drift_events) > 0 and len(high_drift_events) <= 5:
|
|
1534
|
-
print("\nā High drift events:")
|
|
1535
|
-
for event in high_drift_events[:5]:
|
|
1536
|
-
print(
|
|
1537
|
-
f" - {event['timestamp']}: {event['tool']} (drift: {event['drift']:.2f})"
|
|
1538
|
-
)
|
|
1539
|
-
|
|
1540
|
-
|
|
1541
|
-
# =========================================================================
|
|
1542
|
-
# Transcript Commands
|
|
1543
|
-
# =========================================================================
|
|
1544
|
-
|
|
1545
|
-
|
|
1546
|
-
def cmd_transcript_list(args: argparse.Namespace) -> None:
|
|
1547
|
-
"""List available Claude Code transcripts."""
|
|
1548
|
-
import json
|
|
1549
|
-
|
|
1550
|
-
from htmlgraph.transcript import TranscriptReader
|
|
1551
|
-
|
|
1552
|
-
reader = TranscriptReader()
|
|
1553
|
-
|
|
1554
|
-
# Use project path filter if provided
|
|
1555
|
-
project_path = args.project if hasattr(args, "project") and args.project else None
|
|
1556
|
-
|
|
1557
|
-
sessions = reader.list_sessions(
|
|
1558
|
-
project_path=project_path,
|
|
1559
|
-
limit=args.limit if hasattr(args, "limit") else 20,
|
|
1560
|
-
)
|
|
1561
|
-
|
|
1562
|
-
if not sessions:
|
|
1563
|
-
if args.format == "json":
|
|
1564
|
-
print(json.dumps({"sessions": [], "count": 0}))
|
|
1565
|
-
else:
|
|
1566
|
-
print("No Claude Code transcripts found.")
|
|
1567
|
-
print(f"\nLooked in: {reader.claude_dir}")
|
|
1568
|
-
return
|
|
1569
|
-
|
|
1570
|
-
if args.format == "json":
|
|
1571
|
-
data = {
|
|
1572
|
-
"sessions": [
|
|
1573
|
-
{
|
|
1574
|
-
"session_id": s.session_id,
|
|
1575
|
-
"path": str(s.path),
|
|
1576
|
-
"cwd": s.cwd,
|
|
1577
|
-
"git_branch": s.git_branch,
|
|
1578
|
-
"started_at": s.started_at.isoformat() if s.started_at else None,
|
|
1579
|
-
"user_messages": s.user_message_count,
|
|
1580
|
-
"tool_calls": s.tool_call_count,
|
|
1581
|
-
"duration_seconds": s.duration_seconds,
|
|
1582
|
-
}
|
|
1583
|
-
for s in sessions
|
|
1584
|
-
],
|
|
1585
|
-
"count": len(sessions),
|
|
1586
|
-
}
|
|
1587
|
-
print(json.dumps(data, indent=2))
|
|
1588
|
-
else:
|
|
1589
|
-
print(f"Found {len(sessions)} Claude Code transcript(s):\n")
|
|
1590
|
-
for s in sessions:
|
|
1591
|
-
started = (
|
|
1592
|
-
s.started_at.strftime("%Y-%m-%d %H:%M") if s.started_at else "unknown"
|
|
1593
|
-
)
|
|
1594
|
-
duration = f"{int(s.duration_seconds / 60)}m" if s.duration_seconds else "?"
|
|
1595
|
-
branch = s.git_branch or "no branch"
|
|
1596
|
-
print(
|
|
1597
|
-
f" {s.session_id[:12]} {started} {duration:>6} {s.user_message_count:>3} msgs [{branch}]"
|
|
1598
|
-
)
|
|
1599
|
-
|
|
1600
|
-
|
|
1601
|
-
def cmd_transcript_import(args: argparse.Namespace) -> None:
|
|
1602
|
-
"""Import a Claude Code transcript into HtmlGraph."""
|
|
1603
|
-
import json
|
|
1604
|
-
|
|
1605
|
-
from htmlgraph.session_manager import SessionManager
|
|
1606
|
-
from htmlgraph.transcript import TranscriptReader
|
|
1607
|
-
|
|
1608
|
-
reader = TranscriptReader()
|
|
1609
|
-
manager = SessionManager(args.graph_dir)
|
|
1610
|
-
|
|
1611
|
-
# Find the transcript
|
|
1612
|
-
transcript = reader.read_session(args.session_id)
|
|
1613
|
-
if not transcript:
|
|
1614
|
-
print(f"Error: Transcript not found: {args.session_id}", file=sys.stderr)
|
|
1615
|
-
sys.exit(1)
|
|
1616
|
-
|
|
1617
|
-
# Find or create HtmlGraph session to import into
|
|
1618
|
-
htmlgraph_session_id = args.to_session
|
|
1619
|
-
if not htmlgraph_session_id:
|
|
1620
|
-
# Check if already linked
|
|
1621
|
-
existing = manager.find_session_by_transcript(args.session_id)
|
|
1622
|
-
if existing:
|
|
1623
|
-
htmlgraph_session_id = existing.id
|
|
1624
|
-
print(f"Found existing linked session: {htmlgraph_session_id}")
|
|
1625
|
-
else:
|
|
1626
|
-
# Create a new session
|
|
1627
|
-
agent = args.agent or "claude-code"
|
|
1628
|
-
new_session = manager.start_session(
|
|
1629
|
-
agent=agent,
|
|
1630
|
-
title=f"Imported: {transcript.session_id[:12]}",
|
|
1631
|
-
)
|
|
1632
|
-
htmlgraph_session_id = new_session.id
|
|
1633
|
-
print(f"Created new session: {htmlgraph_session_id}")
|
|
1634
|
-
|
|
1635
|
-
# Import events
|
|
1636
|
-
result = manager.import_transcript_events(
|
|
1637
|
-
session_id=htmlgraph_session_id,
|
|
1638
|
-
transcript_session=transcript,
|
|
1639
|
-
overwrite=args.overwrite if hasattr(args, "overwrite") else False,
|
|
1640
|
-
)
|
|
1641
|
-
|
|
1642
|
-
# Link to feature if specified
|
|
1643
|
-
if args.link_feature:
|
|
1644
|
-
session = manager.get_session(htmlgraph_session_id)
|
|
1645
|
-
if session and args.link_feature not in session.worked_on:
|
|
1646
|
-
session.worked_on.append(args.link_feature)
|
|
1647
|
-
manager.session_converter.save(session)
|
|
1648
|
-
result["linked_feature"] = args.link_feature
|
|
1649
|
-
|
|
1650
|
-
if args.format == "json":
|
|
1651
|
-
print(json.dumps(result, indent=2))
|
|
1652
|
-
else:
|
|
1653
|
-
print(f"ā
Imported transcript {args.session_id[:12]}:")
|
|
1654
|
-
print(f" ā HtmlGraph session: {htmlgraph_session_id}")
|
|
1655
|
-
print(f" ā Events imported: {result.get('imported', 0)}")
|
|
1656
|
-
print(f" ā Events skipped: {result.get('skipped', 0)}")
|
|
1657
|
-
if result.get("linked_feature"):
|
|
1658
|
-
print(f" ā Linked to feature: {result['linked_feature']}")
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
def cmd_transcript_link(args: argparse.Namespace) -> None:
|
|
1662
|
-
"""Link a Claude Code transcript to an HtmlGraph session."""
|
|
1663
|
-
import json
|
|
1664
|
-
|
|
1665
|
-
from htmlgraph.session_manager import SessionManager
|
|
1666
|
-
from htmlgraph.transcript import TranscriptReader
|
|
1667
|
-
|
|
1668
|
-
reader = TranscriptReader()
|
|
1669
|
-
manager = SessionManager(args.graph_dir)
|
|
1670
|
-
|
|
1671
|
-
# Find the transcript to get git branch
|
|
1672
|
-
transcript = reader.read_session(args.session_id)
|
|
1673
|
-
if not transcript:
|
|
1674
|
-
print(f"Error: Transcript not found: {args.session_id}", file=sys.stderr)
|
|
1675
|
-
sys.exit(1)
|
|
1676
|
-
|
|
1677
|
-
# Link to HtmlGraph session
|
|
1678
|
-
session = manager.link_transcript(
|
|
1679
|
-
session_id=args.to_session,
|
|
1680
|
-
transcript_id=args.session_id,
|
|
1681
|
-
transcript_path=str(transcript.path),
|
|
1682
|
-
git_branch=transcript.git_branch,
|
|
1683
|
-
)
|
|
1684
|
-
|
|
1685
|
-
if not session:
|
|
1686
|
-
print(f"Error: HtmlGraph session not found: {args.to_session}", file=sys.stderr)
|
|
1687
|
-
sys.exit(1)
|
|
1688
|
-
|
|
1689
|
-
if args.format == "json":
|
|
1690
|
-
print(
|
|
1691
|
-
json.dumps(
|
|
1692
|
-
{
|
|
1693
|
-
"linked": True,
|
|
1694
|
-
"session_id": session.id,
|
|
1695
|
-
"transcript_id": args.session_id,
|
|
1696
|
-
"git_branch": transcript.git_branch,
|
|
1697
|
-
},
|
|
1698
|
-
indent=2,
|
|
1699
|
-
)
|
|
1700
|
-
)
|
|
1701
|
-
else:
|
|
1702
|
-
print(f"ā
Linked transcript {args.session_id[:12]} to session {session.id}")
|
|
1703
|
-
if transcript.git_branch:
|
|
1704
|
-
print(f" Git branch: {transcript.git_branch}")
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
def cmd_transcript_stats(args: argparse.Namespace) -> None:
|
|
1708
|
-
"""Show transcript statistics for a session."""
|
|
1709
|
-
import json
|
|
1710
|
-
|
|
1711
|
-
from htmlgraph.session_manager import SessionManager
|
|
1712
|
-
|
|
1713
|
-
manager = SessionManager(args.graph_dir)
|
|
1714
|
-
stats = manager.get_transcript_stats(args.session_id)
|
|
1715
|
-
|
|
1716
|
-
if not stats:
|
|
1717
|
-
print(
|
|
1718
|
-
f"Error: No transcript linked to session: {args.session_id}",
|
|
1719
|
-
file=sys.stderr,
|
|
1720
|
-
)
|
|
1721
|
-
sys.exit(1)
|
|
1722
|
-
|
|
1723
|
-
if stats.get("error"):
|
|
1724
|
-
print(f"Error: {stats['error']}", file=sys.stderr)
|
|
1725
|
-
sys.exit(1)
|
|
1726
|
-
|
|
1727
|
-
if args.format == "json":
|
|
1728
|
-
print(json.dumps(stats, indent=2))
|
|
1729
|
-
else:
|
|
1730
|
-
print(f"Transcript Stats for {args.session_id}:")
|
|
1731
|
-
print(f" Transcript ID: {stats['transcript_id']}")
|
|
1732
|
-
print(f" Git Branch: {stats.get('git_branch', 'N/A')}")
|
|
1733
|
-
print(f" User Messages: {stats['user_messages']}")
|
|
1734
|
-
print(f" Tool Calls: {stats['tool_calls']}")
|
|
1735
|
-
if stats.get("duration_seconds"):
|
|
1736
|
-
mins = int(stats["duration_seconds"] / 60)
|
|
1737
|
-
print(f" Duration: {mins} minutes")
|
|
1738
|
-
print(f" Has Thinking Traces: {stats['has_thinking_traces']}")
|
|
1739
|
-
if stats.get("tool_breakdown"):
|
|
1740
|
-
print(" Tool Breakdown:")
|
|
1741
|
-
for tool, count in sorted(
|
|
1742
|
-
stats["tool_breakdown"].items(), key=lambda x: -x[1]
|
|
1743
|
-
):
|
|
1744
|
-
print(f" {tool}: {count}")
|
|
1745
|
-
|
|
1746
|
-
|
|
1747
|
-
def cmd_transcript_auto_link(args: argparse.Namespace) -> None:
|
|
1748
|
-
"""Auto-link transcripts to sessions by git branch."""
|
|
1749
|
-
import json
|
|
1750
|
-
|
|
1751
|
-
from htmlgraph.session_manager import SessionManager
|
|
1752
|
-
|
|
1753
|
-
manager = SessionManager(args.graph_dir)
|
|
1754
|
-
|
|
1755
|
-
# Get current git branch if not specified
|
|
1756
|
-
branch = args.branch
|
|
1757
|
-
if not branch:
|
|
1758
|
-
try:
|
|
1759
|
-
result = subprocess.run(
|
|
1760
|
-
["git", "rev-parse", "--abbrev-ref", "HEAD"],
|
|
1761
|
-
capture_output=True,
|
|
1762
|
-
text=True,
|
|
1763
|
-
check=True,
|
|
1764
|
-
)
|
|
1765
|
-
branch = result.stdout.strip()
|
|
1766
|
-
except Exception:
|
|
1767
|
-
print(
|
|
1768
|
-
"Error: Could not detect git branch. Specify with --branch",
|
|
1769
|
-
file=sys.stderr,
|
|
1770
|
-
)
|
|
1771
|
-
sys.exit(1)
|
|
1772
|
-
|
|
1773
|
-
linked = manager.auto_link_transcript_by_branch(
|
|
1774
|
-
git_branch=branch,
|
|
1775
|
-
agent=args.agent,
|
|
1776
|
-
)
|
|
1777
|
-
|
|
1778
|
-
if args.format == "json":
|
|
1779
|
-
print(
|
|
1780
|
-
json.dumps(
|
|
1781
|
-
{
|
|
1782
|
-
"branch": branch,
|
|
1783
|
-
"linked": [
|
|
1784
|
-
{"session_id": s, "transcript_id": t} for s, t in linked
|
|
1785
|
-
],
|
|
1786
|
-
"count": len(linked),
|
|
1787
|
-
},
|
|
1788
|
-
indent=2,
|
|
1789
|
-
)
|
|
1790
|
-
)
|
|
1791
|
-
else:
|
|
1792
|
-
if linked:
|
|
1793
|
-
print(f"ā
Auto-linked {len(linked)} session(s) for branch '{branch}':")
|
|
1794
|
-
for session_id, transcript_id in linked:
|
|
1795
|
-
print(f" {session_id} ā {transcript_id[:12]}")
|
|
1796
|
-
else:
|
|
1797
|
-
print(f"No sessions to link for branch '{branch}'")
|
|
1798
|
-
|
|
1799
|
-
|
|
1800
|
-
def cmd_transcript_health(args: argparse.Namespace) -> None:
|
|
1801
|
-
"""Show session health metrics from transcript."""
|
|
1802
|
-
import json
|
|
1803
|
-
|
|
1804
|
-
from htmlgraph.transcript_analytics import TranscriptAnalytics
|
|
1805
|
-
|
|
1806
|
-
analytics = TranscriptAnalytics(args.graph_dir)
|
|
1807
|
-
health = analytics.calculate_session_health(args.transcript_id)
|
|
1808
|
-
|
|
1809
|
-
if not health:
|
|
1810
|
-
print(
|
|
1811
|
-
f"Error: Could not analyze transcript {args.transcript_id}", file=sys.stderr
|
|
1812
|
-
)
|
|
1813
|
-
sys.exit(1)
|
|
1814
|
-
|
|
1815
|
-
if args.format == "json":
|
|
1816
|
-
print(
|
|
1817
|
-
json.dumps(
|
|
1818
|
-
{
|
|
1819
|
-
"session_id": health.session_id,
|
|
1820
|
-
"overall_score": round(health.overall_score(), 2),
|
|
1821
|
-
"efficiency_score": round(health.efficiency_score, 2),
|
|
1822
|
-
"retry_rate": round(health.retry_rate, 2),
|
|
1823
|
-
"context_rebuild_count": health.context_rebuild_count,
|
|
1824
|
-
"tool_diversity": round(health.tool_diversity, 2),
|
|
1825
|
-
"prompt_clarity_score": round(health.prompt_clarity_score, 2),
|
|
1826
|
-
"error_recovery_rate": round(health.error_recovery_rate, 2),
|
|
1827
|
-
"duration_seconds": round(health.duration_seconds, 1),
|
|
1828
|
-
"tools_per_minute": round(health.tools_per_minute, 1),
|
|
1829
|
-
},
|
|
1830
|
-
indent=2,
|
|
1831
|
-
)
|
|
1832
|
-
)
|
|
1833
|
-
else:
|
|
1834
|
-
score = health.overall_score()
|
|
1835
|
-
grade = (
|
|
1836
|
-
"š¢ Excellent"
|
|
1837
|
-
if score > 0.8
|
|
1838
|
-
else "š” Good"
|
|
1839
|
-
if score > 0.6
|
|
1840
|
-
else "š Fair"
|
|
1841
|
-
if score > 0.4
|
|
1842
|
-
else "š“ Needs Work"
|
|
1843
|
-
)
|
|
1844
|
-
|
|
1845
|
-
print(f"Session Health: {args.transcript_id[:12]}...")
|
|
1846
|
-
print(f"{'=' * 50}")
|
|
1847
|
-
print(f"Overall Score: {score:.0%} {grade}")
|
|
1848
|
-
print()
|
|
1849
|
-
print(f"š Efficiency: {health.efficiency_score:.0%}")
|
|
1850
|
-
print(
|
|
1851
|
-
f"š Retry Rate: {health.retry_rate:.0%} {'ā ļø' if health.retry_rate > 0.3 else 'ā'}"
|
|
1852
|
-
)
|
|
1853
|
-
print(
|
|
1854
|
-
f"š Context Rebuilds: {health.context_rebuild_count} {'ā ļø' if health.context_rebuild_count > 5 else 'ā'}"
|
|
1855
|
-
)
|
|
1856
|
-
print(f"š§ Tool Diversity: {health.tool_diversity:.0%}")
|
|
1857
|
-
print(f"š¬ Prompt Clarity: {health.prompt_clarity_score:.0%}")
|
|
1858
|
-
print(f"š§ Error Recovery: {health.error_recovery_rate:.0%}")
|
|
1859
|
-
print()
|
|
1860
|
-
dur_mins = int(health.duration_seconds // 60)
|
|
1861
|
-
dur_secs = int(health.duration_seconds % 60)
|
|
1862
|
-
print(
|
|
1863
|
-
f"ā±ļø Duration: {dur_mins}m {dur_secs}s | Tools/min: {health.tools_per_minute:.1f}"
|
|
1864
|
-
)
|
|
1865
|
-
|
|
1866
|
-
|
|
1867
|
-
def cmd_transcript_patterns(args: argparse.Namespace) -> None:
|
|
1868
|
-
"""Detect workflow patterns in transcripts."""
|
|
1869
|
-
import json
|
|
1870
|
-
|
|
1871
|
-
from htmlgraph.transcript_analytics import TranscriptAnalytics
|
|
1872
|
-
|
|
1873
|
-
analytics = TranscriptAnalytics(args.graph_dir)
|
|
1874
|
-
patterns = analytics.detect_patterns(
|
|
1875
|
-
transcript_id=args.transcript_id,
|
|
1876
|
-
min_length=args.min_length,
|
|
1877
|
-
max_length=args.max_length,
|
|
1878
|
-
)
|
|
1879
|
-
|
|
1880
|
-
if args.format == "json":
|
|
1881
|
-
print(
|
|
1882
|
-
json.dumps(
|
|
1883
|
-
[
|
|
1884
|
-
{
|
|
1885
|
-
"sequence": p.sequence,
|
|
1886
|
-
"count": p.count,
|
|
1887
|
-
"category": p.category,
|
|
1888
|
-
}
|
|
1889
|
-
for p in patterns
|
|
1890
|
-
],
|
|
1891
|
-
indent=2,
|
|
1892
|
-
)
|
|
1893
|
-
)
|
|
1894
|
-
else:
|
|
1895
|
-
print("Workflow Patterns Detected")
|
|
1896
|
-
print("=" * 50)
|
|
1897
|
-
|
|
1898
|
-
optimal = [p for p in patterns if p.category == "optimal"]
|
|
1899
|
-
anti = [p for p in patterns if p.category == "anti-pattern"]
|
|
1900
|
-
neutral = [p for p in patterns if p.category == "neutral"][:10]
|
|
1901
|
-
|
|
1902
|
-
if optimal:
|
|
1903
|
-
print("\nā
Optimal Patterns:")
|
|
1904
|
-
for p in optimal:
|
|
1905
|
-
print(f" {' ā '.join(p.sequence)} ({p.count}x)")
|
|
1906
|
-
|
|
1907
|
-
if anti:
|
|
1908
|
-
print("\nā ļø Anti-Patterns:")
|
|
1909
|
-
for p in anti:
|
|
1910
|
-
print(f" {' ā '.join(p.sequence)} ({p.count}x)")
|
|
1911
|
-
|
|
1912
|
-
if neutral:
|
|
1913
|
-
print("\nš Common Patterns:")
|
|
1914
|
-
for p in neutral:
|
|
1915
|
-
print(f" {' ā '.join(p.sequence)} ({p.count}x)")
|
|
1916
|
-
|
|
1917
|
-
|
|
1918
|
-
def cmd_transcript_transitions(args: argparse.Namespace) -> None:
|
|
1919
|
-
"""Show tool transition matrix."""
|
|
1920
|
-
import json
|
|
1921
|
-
|
|
1922
|
-
from htmlgraph.transcript_analytics import TranscriptAnalytics
|
|
1923
|
-
|
|
1924
|
-
analytics = TranscriptAnalytics(args.graph_dir)
|
|
1925
|
-
transitions = analytics.get_tool_transitions(transcript_id=args.transcript_id)
|
|
1926
|
-
|
|
1927
|
-
if args.format == "json":
|
|
1928
|
-
print(json.dumps(transitions, indent=2))
|
|
1929
|
-
else:
|
|
1930
|
-
print("Tool Transition Matrix")
|
|
1931
|
-
print("=" * 50)
|
|
1932
|
-
print("(from_tool ā to_tool: count)")
|
|
1933
|
-
print()
|
|
1934
|
-
|
|
1935
|
-
# Flatten and sort
|
|
1936
|
-
flat = []
|
|
1937
|
-
for from_tool, tos in transitions.items():
|
|
1938
|
-
for to_tool, count in tos.items():
|
|
1939
|
-
flat.append((from_tool, to_tool, count))
|
|
1940
|
-
|
|
1941
|
-
flat.sort(key=lambda x: -x[2])
|
|
1942
|
-
|
|
1943
|
-
for from_t, to_t, count in flat[:20]:
|
|
1944
|
-
bar = "ā" * min(count, 20)
|
|
1945
|
-
print(f" {from_t:12} ā {to_t:12} {count:4} {bar}")
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
def cmd_transcript_recommendations(args: argparse.Namespace) -> None:
|
|
1949
|
-
"""Get workflow improvement recommendations."""
|
|
1950
|
-
import json
|
|
1951
|
-
|
|
1952
|
-
from htmlgraph.transcript_analytics import TranscriptAnalytics
|
|
1953
|
-
|
|
1954
|
-
analytics = TranscriptAnalytics(args.graph_dir)
|
|
1955
|
-
recommendations = analytics.generate_recommendations(
|
|
1956
|
-
transcript_id=args.transcript_id
|
|
1957
|
-
)
|
|
1958
|
-
|
|
1959
|
-
if args.format == "json":
|
|
1960
|
-
print(json.dumps({"recommendations": recommendations}, indent=2))
|
|
1961
|
-
else:
|
|
1962
|
-
print("Workflow Recommendations")
|
|
1963
|
-
print("=" * 50)
|
|
1964
|
-
for rec in recommendations:
|
|
1965
|
-
print(f" {rec}")
|
|
1966
|
-
|
|
1967
|
-
|
|
1968
|
-
def cmd_transcript_insights(args: argparse.Namespace) -> None:
|
|
1969
|
-
"""Get comprehensive transcript insights."""
|
|
1970
|
-
import json
|
|
1971
|
-
|
|
1972
|
-
from htmlgraph.transcript_analytics import TranscriptAnalytics
|
|
1973
|
-
|
|
1974
|
-
analytics = TranscriptAnalytics(args.graph_dir)
|
|
1975
|
-
insights = analytics.get_insights()
|
|
1976
|
-
|
|
1977
|
-
if args.format == "json":
|
|
1978
|
-
print(
|
|
1979
|
-
json.dumps(
|
|
1980
|
-
{
|
|
1981
|
-
"total_sessions": insights.total_sessions,
|
|
1982
|
-
"total_user_messages": insights.total_user_messages,
|
|
1983
|
-
"total_tool_calls": insights.total_tool_calls,
|
|
1984
|
-
"tool_frequency": insights.tool_frequency,
|
|
1985
|
-
"avg_session_health": round(insights.avg_session_health, 2),
|
|
1986
|
-
"recommendations": insights.recommendations,
|
|
1987
|
-
},
|
|
1988
|
-
indent=2,
|
|
1989
|
-
)
|
|
1990
|
-
)
|
|
1991
|
-
else:
|
|
1992
|
-
print("š Transcript Insights")
|
|
1993
|
-
print("=" * 50)
|
|
1994
|
-
print(f"Sessions Analyzed: {insights.total_sessions}")
|
|
1995
|
-
print(f"Total User Messages: {insights.total_user_messages}")
|
|
1996
|
-
print(f"Total Tool Calls: {insights.total_tool_calls}")
|
|
1997
|
-
print(f"Avg Session Health: {insights.avg_session_health:.0%}")
|
|
1998
|
-
print()
|
|
1999
|
-
|
|
2000
|
-
if insights.tool_frequency:
|
|
2001
|
-
print("š§ Top Tools:")
|
|
2002
|
-
for tool, count in list(insights.tool_frequency.items())[:8]:
|
|
2003
|
-
bar = "ā" * min(count // 5, 15)
|
|
2004
|
-
print(f" {tool:15} {count:4} {bar}")
|
|
2005
|
-
|
|
2006
|
-
print()
|
|
2007
|
-
print("š” Recommendations:")
|
|
2008
|
-
for rec in insights.recommendations[:5]:
|
|
2009
|
-
print(f" {rec}")
|
|
2010
|
-
|
|
2011
|
-
|
|
2012
|
-
def cmd_transcript_export(args: argparse.Namespace) -> None:
|
|
2013
|
-
"""Export transcript to HTML format."""
|
|
2014
|
-
from pathlib import Path
|
|
2015
|
-
|
|
2016
|
-
from htmlgraph.transcript import TranscriptReader
|
|
2017
|
-
|
|
2018
|
-
reader = TranscriptReader()
|
|
2019
|
-
transcript = reader.read_session(args.transcript_id)
|
|
2020
|
-
|
|
2021
|
-
if not transcript:
|
|
2022
|
-
print(f"Transcript '{args.transcript_id}' not found.", file=sys.stderr)
|
|
2023
|
-
sys.exit(1)
|
|
2024
|
-
|
|
2025
|
-
html = transcript.to_html(include_thinking=args.include_thinking)
|
|
2026
|
-
|
|
2027
|
-
if args.output:
|
|
2028
|
-
output_path = Path(args.output)
|
|
2029
|
-
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
2030
|
-
output_path.write_text(html)
|
|
2031
|
-
print(f"Exported to: {output_path}")
|
|
2032
|
-
else:
|
|
2033
|
-
print(html)
|
|
2034
|
-
|
|
2035
|
-
|
|
2036
|
-
def cmd_transcript_track_stats(args: argparse.Namespace) -> None:
|
|
2037
|
-
"""Get aggregated transcript stats for a track."""
|
|
2038
|
-
import json
|
|
2039
|
-
|
|
2040
|
-
from htmlgraph.transcript_analytics import TranscriptAnalytics
|
|
2041
|
-
|
|
2042
|
-
analytics = TranscriptAnalytics(args.graph_dir)
|
|
2043
|
-
stats = analytics.get_track_stats(args.track_id)
|
|
2044
|
-
|
|
2045
|
-
if not stats:
|
|
2046
|
-
print(
|
|
2047
|
-
f"Track '{args.track_id}' not found or has no transcript data.",
|
|
2048
|
-
file=sys.stderr,
|
|
2049
|
-
)
|
|
2050
|
-
sys.exit(1)
|
|
2051
|
-
|
|
2052
|
-
if args.format == "json":
|
|
2053
|
-
print(json.dumps(stats.to_dict(), indent=2))
|
|
2054
|
-
else:
|
|
2055
|
-
print(f"š Track Transcript Stats: {args.track_id}")
|
|
2056
|
-
print("=" * 50)
|
|
2057
|
-
print(f"Sessions: {stats.session_count}")
|
|
2058
|
-
print(f"User Messages: {stats.total_user_messages}")
|
|
2059
|
-
print(f"Tool Calls: {stats.total_tool_calls}")
|
|
2060
|
-
print(f"Total Duration: {stats._format_duration(stats.total_duration_seconds)}")
|
|
2061
|
-
print(f"Avg Health: {stats.avg_session_health:.0%}")
|
|
2062
|
-
print(f"Health Trend: {stats.health_trend}")
|
|
2063
|
-
print(f"Anti-Patterns: {stats.anti_patterns_detected}")
|
|
2064
|
-
|
|
2065
|
-
if stats.tool_frequency:
|
|
2066
|
-
print()
|
|
2067
|
-
print("š§ Top Tools:")
|
|
2068
|
-
for tool, count in list(stats.tool_frequency.items())[:8]:
|
|
2069
|
-
bar = "ā" * min(count // 5, 15)
|
|
2070
|
-
print(f" {tool:15} {count:4} {bar}")
|
|
2071
|
-
|
|
2072
|
-
if stats.session_ids:
|
|
2073
|
-
print()
|
|
2074
|
-
print("š Sessions:")
|
|
2075
|
-
for i, (sid, health) in enumerate(
|
|
2076
|
-
zip(stats.session_ids, stats.session_healths)
|
|
2077
|
-
):
|
|
2078
|
-
print(f" {sid[:20]:20} health: {health:.0%}")
|
|
2079
|
-
if i >= 9:
|
|
2080
|
-
remaining = len(stats.session_ids) - 10
|
|
2081
|
-
if remaining > 0:
|
|
2082
|
-
print(f" ... and {remaining} more sessions")
|
|
2083
|
-
break
|
|
2084
|
-
|
|
2085
|
-
|
|
2086
|
-
def cmd_transcript_link_feature(args: argparse.Namespace) -> None:
|
|
2087
|
-
"""Link a Claude Code transcript to a feature for parallel agent tracking."""
|
|
2088
|
-
import json
|
|
2089
|
-
|
|
2090
|
-
from htmlgraph.session_manager import SessionManager
|
|
2091
|
-
|
|
2092
|
-
manager = SessionManager(args.graph_dir)
|
|
2093
|
-
graph = manager.features_graph
|
|
2094
|
-
|
|
2095
|
-
# Get the feature
|
|
2096
|
-
feature = graph.get(args.to_feature)
|
|
2097
|
-
if not feature:
|
|
2098
|
-
print(f"Feature '{args.to_feature}' not found.", file=sys.stderr)
|
|
2099
|
-
sys.exit(1)
|
|
2100
|
-
|
|
2101
|
-
# Link the transcript
|
|
2102
|
-
manager._link_transcript_to_feature(feature, args.transcript_id, graph)
|
|
2103
|
-
graph.update(feature)
|
|
2104
|
-
|
|
2105
|
-
if args.format == "json":
|
|
2106
|
-
result = {
|
|
2107
|
-
"success": True,
|
|
2108
|
-
"feature_id": args.to_feature,
|
|
2109
|
-
"transcript_id": args.transcript_id,
|
|
2110
|
-
"tool_count": feature.properties.get("transcript_tool_count", 0),
|
|
2111
|
-
"duration_seconds": feature.properties.get(
|
|
2112
|
-
"transcript_duration_seconds", 0
|
|
2113
|
-
),
|
|
2114
|
-
}
|
|
2115
|
-
print(json.dumps(result, indent=2))
|
|
2116
|
-
else:
|
|
2117
|
-
print(
|
|
2118
|
-
f"ā
Linked transcript '{args.transcript_id}' to feature '{args.to_feature}'"
|
|
2119
|
-
)
|
|
2120
|
-
tool_count = feature.properties.get("transcript_tool_count", 0)
|
|
2121
|
-
duration = feature.properties.get("transcript_duration_seconds", 0)
|
|
2122
|
-
if tool_count > 0:
|
|
2123
|
-
print(f" Tools: {tool_count}")
|
|
2124
|
-
print(f" Duration: {duration}s")
|
|
2125
|
-
|
|
2126
|
-
|
|
2127
|
-
def cmd_track(args: argparse.Namespace) -> None:
|
|
2128
|
-
"""Track an activity in the current session."""
|
|
2129
|
-
import json
|
|
2130
|
-
|
|
2131
|
-
from htmlgraph import SDK
|
|
2132
|
-
|
|
2133
|
-
agent = os.environ.get("HTMLGRAPH_AGENT")
|
|
2134
|
-
sdk = SDK(directory=args.graph_dir, agent=agent)
|
|
2135
|
-
|
|
2136
|
-
try:
|
|
2137
|
-
entry = sdk.track_activity(
|
|
2138
|
-
tool=args.tool,
|
|
2139
|
-
summary=args.summary,
|
|
2140
|
-
file_paths=args.files,
|
|
2141
|
-
success=not args.failed,
|
|
2142
|
-
session_id=args.session, # None if not specified, SDK will find active session
|
|
2143
|
-
)
|
|
2144
|
-
except ValueError as e:
|
|
2145
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
2146
|
-
sys.exit(1)
|
|
2147
|
-
|
|
2148
|
-
if args.format == "json":
|
|
2149
|
-
data = {
|
|
2150
|
-
"id": entry.id,
|
|
2151
|
-
"timestamp": entry.timestamp.isoformat(),
|
|
2152
|
-
"tool": entry.tool,
|
|
2153
|
-
"summary": entry.summary,
|
|
2154
|
-
"success": entry.success,
|
|
2155
|
-
"feature_id": entry.feature_id,
|
|
2156
|
-
"drift_score": entry.drift_score,
|
|
2157
|
-
}
|
|
2158
|
-
print(json.dumps(data, indent=2))
|
|
2159
|
-
else:
|
|
2160
|
-
print(f"Tracked: [{entry.tool}] {entry.summary}")
|
|
2161
|
-
if entry.feature_id:
|
|
2162
|
-
print(f" Attributed to: {entry.feature_id}")
|
|
2163
|
-
if entry.drift_score and entry.drift_score > 0.3:
|
|
2164
|
-
print(f" Drift warning: {entry.drift_score:.2f}")
|
|
2165
|
-
|
|
2166
|
-
|
|
2167
|
-
# =============================================================================
|
|
2168
|
-
# Events & Index Commands
|
|
2169
|
-
# =============================================================================
|
|
2170
|
-
|
|
2171
|
-
|
|
2172
|
-
def cmd_events_export(args: argparse.Namespace) -> None:
|
|
2173
|
-
"""Export legacy session HTML activity logs to JSONL event logs."""
|
|
2174
|
-
from htmlgraph.event_migration import export_sessions_to_jsonl
|
|
2175
|
-
|
|
2176
|
-
graph_dir = Path(args.graph_dir)
|
|
2177
|
-
sessions_dir = graph_dir / "sessions"
|
|
2178
|
-
events_dir = graph_dir / "events"
|
|
2179
|
-
|
|
2180
|
-
result = export_sessions_to_jsonl(
|
|
2181
|
-
sessions_dir=sessions_dir,
|
|
2182
|
-
events_dir=events_dir,
|
|
2183
|
-
overwrite=args.overwrite,
|
|
2184
|
-
include_subdirs=args.include_subdirs,
|
|
2185
|
-
)
|
|
2186
|
-
|
|
2187
|
-
print(f"Written: {result['written']}")
|
|
2188
|
-
print(f"Skipped: {result['skipped']}")
|
|
2189
|
-
print(f"Failed: {result['failed']}")
|
|
2190
|
-
|
|
2191
|
-
|
|
2192
|
-
def cmd_index_rebuild(args: argparse.Namespace) -> None:
|
|
2193
|
-
"""Rebuild the SQLite analytics index from JSONL event logs."""
|
|
2194
|
-
from htmlgraph.analytics_index import AnalyticsIndex
|
|
2195
|
-
from htmlgraph.event_log import JsonlEventLog
|
|
2196
|
-
|
|
2197
|
-
graph_dir = Path(args.graph_dir)
|
|
2198
|
-
events_dir = graph_dir / "events"
|
|
2199
|
-
db_path = graph_dir / "index.sqlite"
|
|
2200
|
-
|
|
2201
|
-
log = JsonlEventLog(events_dir)
|
|
2202
|
-
index = AnalyticsIndex(db_path)
|
|
2203
|
-
|
|
2204
|
-
events = (event for _, event in log.iter_events())
|
|
2205
|
-
result = index.rebuild_from_events(events)
|
|
2206
|
-
|
|
2207
|
-
print(f"DB: {db_path}")
|
|
2208
|
-
print(f"Inserted: {result['inserted']}")
|
|
2209
|
-
print(f"Skipped: {result['skipped']}")
|
|
2210
|
-
|
|
2211
|
-
|
|
2212
|
-
def cmd_watch(args: argparse.Namespace) -> None:
|
|
2213
|
-
"""Watch filesystem changes and record them as activity events."""
|
|
2214
|
-
from htmlgraph.watch import watch_and_track
|
|
2215
|
-
|
|
2216
|
-
root = Path(args.root).resolve()
|
|
2217
|
-
graph_dir = Path(args.graph_dir)
|
|
2218
|
-
|
|
2219
|
-
watch_and_track(
|
|
2220
|
-
root=root,
|
|
2221
|
-
graph_dir=graph_dir,
|
|
2222
|
-
session_id=args.session_id,
|
|
2223
|
-
agent=args.agent,
|
|
2224
|
-
interval_seconds=args.interval,
|
|
2225
|
-
batch_seconds=args.batch_seconds,
|
|
2226
|
-
)
|
|
2227
|
-
|
|
2228
|
-
|
|
2229
|
-
def cmd_git_event(args: argparse.Namespace) -> None:
|
|
2230
|
-
"""Log a Git event (commit, checkout, merge, push)."""
|
|
2231
|
-
import sys
|
|
2232
|
-
|
|
2233
|
-
from htmlgraph.git_events import (
|
|
2234
|
-
log_git_checkout,
|
|
2235
|
-
log_git_commit,
|
|
2236
|
-
log_git_merge,
|
|
2237
|
-
log_git_push,
|
|
2238
|
-
)
|
|
2239
|
-
|
|
2240
|
-
if args.event_type == "commit":
|
|
2241
|
-
success = log_git_commit()
|
|
2242
|
-
if not success:
|
|
2243
|
-
sys.exit(1)
|
|
2244
|
-
return
|
|
2245
|
-
|
|
2246
|
-
if args.event_type == "checkout":
|
|
2247
|
-
if len(args.args) < 3:
|
|
2248
|
-
print(
|
|
2249
|
-
"Error: checkout requires args: <old_head> <new_head> <flag>",
|
|
2250
|
-
file=sys.stderr,
|
|
2251
|
-
)
|
|
2252
|
-
sys.exit(1)
|
|
2253
|
-
old_head, new_head, flag = args.args[0], args.args[1], args.args[2]
|
|
2254
|
-
if not log_git_checkout(old_head, new_head, flag):
|
|
2255
|
-
sys.exit(1)
|
|
2256
|
-
return
|
|
2257
|
-
|
|
2258
|
-
if args.event_type == "merge":
|
|
2259
|
-
squash_flag = args.args[0] if args.args else "0"
|
|
2260
|
-
if not log_git_merge(squash_flag):
|
|
2261
|
-
sys.exit(1)
|
|
2262
|
-
return
|
|
2263
|
-
|
|
2264
|
-
if args.event_type == "push":
|
|
2265
|
-
if len(args.args) < 2:
|
|
2266
|
-
print(
|
|
2267
|
-
"Error: push requires args: <remote_name> <remote_url>", file=sys.stderr
|
|
2268
|
-
)
|
|
2269
|
-
sys.exit(1)
|
|
2270
|
-
remote_name, remote_url = args.args[0], args.args[1]
|
|
2271
|
-
updates_text = sys.stdin.read()
|
|
2272
|
-
if not log_git_push(remote_name, remote_url, updates_text):
|
|
2273
|
-
sys.exit(1)
|
|
2274
|
-
return
|
|
2275
|
-
else:
|
|
2276
|
-
print(f"Error: Unknown event type '{args.event_type}'", file=sys.stderr)
|
|
2277
|
-
sys.exit(1)
|
|
2278
|
-
|
|
2279
|
-
|
|
2280
|
-
def cmd_mcp_serve(args: argparse.Namespace) -> None:
|
|
2281
|
-
"""Run the minimal MCP server over stdio."""
|
|
2282
|
-
from htmlgraph.mcp_server import serve_stdio
|
|
2283
|
-
|
|
2284
|
-
serve_stdio(graph_dir=Path(args.graph_dir), default_agent=args.agent)
|
|
2285
|
-
|
|
2286
|
-
|
|
2287
|
-
# =============================================================================
|
|
2288
|
-
# Work Management Commands (Smart Routing)
|
|
2289
|
-
# =============================================================================
|
|
2290
|
-
|
|
2291
|
-
|
|
2292
|
-
def cmd_work_next(args: argparse.Namespace) -> None:
|
|
2293
|
-
"""Get next best task using smart routing."""
|
|
2294
|
-
import json
|
|
2295
|
-
|
|
2296
|
-
from htmlgraph.converter import node_to_dict
|
|
2297
|
-
from htmlgraph.sdk import SDK
|
|
2298
|
-
|
|
2299
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
2300
|
-
|
|
2301
|
-
try:
|
|
2302
|
-
task = sdk.work_next(
|
|
2303
|
-
agent_id=args.agent, auto_claim=args.auto_claim, min_score=args.min_score
|
|
2304
|
-
)
|
|
2305
|
-
except ValueError as e:
|
|
2306
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
2307
|
-
sys.exit(1)
|
|
2308
|
-
|
|
2309
|
-
if args.format == "json":
|
|
2310
|
-
if task:
|
|
2311
|
-
print(json.dumps(node_to_dict(task), indent=2, default=str))
|
|
2312
|
-
else:
|
|
2313
|
-
print(
|
|
2314
|
-
json.dumps(
|
|
2315
|
-
{"task": None, "message": "No suitable tasks found"}, indent=2
|
|
2316
|
-
)
|
|
2317
|
-
)
|
|
2318
|
-
else:
|
|
2319
|
-
if task:
|
|
2320
|
-
print(f"Next task: {task.id}")
|
|
2321
|
-
print(f" Title: {task.title}")
|
|
2322
|
-
print(f" Priority: {task.priority}")
|
|
2323
|
-
print(f" Status: {task.status}")
|
|
2324
|
-
if getattr(task, "required_capabilities", None):
|
|
2325
|
-
print(
|
|
2326
|
-
f" Required capabilities: {', '.join(task.required_capabilities)}"
|
|
2327
|
-
)
|
|
2328
|
-
complexity = getattr(task, "complexity", None)
|
|
2329
|
-
if complexity:
|
|
2330
|
-
print(f" Complexity: {complexity}")
|
|
2331
|
-
effort = getattr(task, "estimated_effort", None)
|
|
2332
|
-
if effort:
|
|
2333
|
-
print(f" Estimated effort: {effort}h")
|
|
2334
|
-
if args.auto_claim:
|
|
2335
|
-
print(f" ā Task claimed by {args.agent}")
|
|
2336
|
-
else:
|
|
2337
|
-
print("No suitable tasks found.")
|
|
2338
|
-
print(
|
|
2339
|
-
"Try lowering --min-score or check available tasks with 'htmlgraph feature list --status todo'"
|
|
2340
|
-
)
|
|
2341
|
-
|
|
2342
|
-
|
|
2343
|
-
def cmd_work_queue(args: argparse.Namespace) -> None:
|
|
2344
|
-
"""Get prioritized work queue for an agent."""
|
|
2345
|
-
import json
|
|
2346
|
-
|
|
2347
|
-
from htmlgraph.sdk import SDK
|
|
2348
|
-
|
|
2349
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
2350
|
-
|
|
2351
|
-
try:
|
|
2352
|
-
queue = sdk.get_work_queue(
|
|
2353
|
-
agent_id=args.agent, limit=args.limit, min_score=args.min_score
|
|
2354
|
-
)
|
|
2355
|
-
except ValueError as e:
|
|
2356
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
2357
|
-
sys.exit(1)
|
|
2358
|
-
|
|
2359
|
-
if args.format == "json":
|
|
2360
|
-
print(json.dumps({"queue": queue, "count": len(queue)}, indent=2))
|
|
2361
|
-
else:
|
|
2362
|
-
if not queue:
|
|
2363
|
-
print(f"No tasks found for agent '{args.agent}'.")
|
|
2364
|
-
print(
|
|
2365
|
-
"Try lowering --min-score or check available tasks with 'htmlgraph feature list --status todo'"
|
|
2366
|
-
)
|
|
2367
|
-
return
|
|
2368
|
-
|
|
2369
|
-
print(f"Work queue for {args.agent} ({len(queue)} tasks):")
|
|
2370
|
-
print("=" * 90)
|
|
2371
|
-
print(f"{'Score':<8} {'Priority':<10} {'Complexity':<12} {'ID':<25} {'Title'}")
|
|
2372
|
-
print("=" * 90)
|
|
2373
|
-
|
|
2374
|
-
for item in queue:
|
|
2375
|
-
complexity = item.get("complexity", "N/A") or "N/A"
|
|
2376
|
-
title = (
|
|
2377
|
-
item["title"][:30] + "..." if len(item["title"]) > 33 else item["title"]
|
|
2378
|
-
)
|
|
2379
|
-
print(
|
|
2380
|
-
f"{item['score']:<8.1f} {item['priority']:<10} {complexity:<12} {item['task_id']:<25} {title}"
|
|
2381
|
-
)
|
|
2382
|
-
|
|
2383
|
-
|
|
2384
|
-
def cmd_agent_list(args: argparse.Namespace) -> None:
|
|
2385
|
-
"""List all registered agents."""
|
|
2386
|
-
import json
|
|
2387
|
-
|
|
2388
|
-
from htmlgraph.agent_registry import AgentRegistry
|
|
2389
|
-
|
|
2390
|
-
# Use AgentRegistry to get AgentProfile objects (not Node objects)
|
|
2391
|
-
registry = AgentRegistry(args.graph_dir)
|
|
2392
|
-
agents = list(registry.list_agents())
|
|
2393
|
-
|
|
2394
|
-
if args.format == "json":
|
|
2395
|
-
print(
|
|
2396
|
-
json.dumps(
|
|
2397
|
-
{"agents": [agent.to_dict() for agent in agents], "count": len(agents)},
|
|
2398
|
-
indent=2,
|
|
2399
|
-
)
|
|
2400
|
-
)
|
|
2401
|
-
else:
|
|
2402
|
-
if not agents:
|
|
2403
|
-
print("No agents registered.")
|
|
2404
|
-
print("Agents are automatically registered in .htmlgraph/agents.json")
|
|
2405
|
-
return
|
|
2406
|
-
|
|
2407
|
-
print(f"Registered agents ({len(agents)}):")
|
|
2408
|
-
print("=" * 90)
|
|
2409
|
-
|
|
2410
|
-
for agent in agents:
|
|
2411
|
-
status = "ā active" if agent.active else "ā inactive"
|
|
2412
|
-
print(f"\n{agent.id} ({agent.name}) - {status}")
|
|
2413
|
-
print(f" Capabilities: {', '.join(agent.capabilities)}")
|
|
2414
|
-
print(f" Max parallel tasks: {agent.max_parallel_tasks}")
|
|
2415
|
-
print(f" Preferred complexity: {', '.join(agent.preferred_complexity)}")
|
|
2416
|
-
|
|
2417
|
-
|
|
2418
|
-
# =============================================================================
|
|
2419
|
-
# Feature Management Commands
|
|
2420
|
-
# =============================================================================
|
|
2421
|
-
|
|
2422
|
-
|
|
2423
|
-
def cmd_feature_create(args: argparse.Namespace) -> None:
|
|
2424
|
-
"""Create a new feature."""
|
|
2425
|
-
import json
|
|
2426
|
-
|
|
2427
|
-
from htmlgraph.sdk import SDK
|
|
2428
|
-
|
|
2429
|
-
# Use SDK for feature creation (which now handles logging)
|
|
2430
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
2431
|
-
|
|
2432
|
-
try:
|
|
2433
|
-
# Determine collection (features -> create builder, others -> manual create?)
|
|
2434
|
-
# For now, only 'features' has a builder in SDK.features.create()
|
|
2435
|
-
# But BaseCollection doesn't have create().
|
|
2436
|
-
|
|
2437
|
-
# If collection is 'features', use builder
|
|
2438
|
-
if args.collection == "features":
|
|
2439
|
-
builder = sdk.features.create(
|
|
2440
|
-
title=args.title,
|
|
2441
|
-
description=args.description or "",
|
|
2442
|
-
priority=args.priority,
|
|
2443
|
-
)
|
|
2444
|
-
if args.steps:
|
|
2445
|
-
builder.add_steps(args.steps)
|
|
2446
|
-
node = builder.save()
|
|
2447
|
-
else:
|
|
2448
|
-
# Fallback to SessionManager directly for non-feature collections
|
|
2449
|
-
# (or extend SDK to support create on all collections)
|
|
2450
|
-
# For consistency with old CLI, we use SessionManager here if not features.
|
|
2451
|
-
# But wait, SDK initializes SessionManager.
|
|
2452
|
-
|
|
2453
|
-
# Creating bugs/chores via SDK isn't fully fluent yet.
|
|
2454
|
-
# Let's use the low-level SessionManager.create_feature logic for now via SDK's session_manager
|
|
2455
|
-
# IF we want to strictly use SDK. But SDK.session_manager IS exposed now.
|
|
2456
|
-
node = sdk.session_manager.create_feature(
|
|
2457
|
-
title=args.title,
|
|
2458
|
-
collection=args.collection,
|
|
2459
|
-
description=args.description or "",
|
|
2460
|
-
priority=args.priority,
|
|
2461
|
-
steps=args.steps,
|
|
2462
|
-
agent=args.agent,
|
|
2463
|
-
)
|
|
2464
|
-
|
|
2465
|
-
except ValueError as e:
|
|
2466
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
2467
|
-
sys.exit(1)
|
|
2468
|
-
|
|
2469
|
-
if args.format == "json":
|
|
2470
|
-
from htmlgraph.converter import node_to_dict
|
|
2471
|
-
|
|
2472
|
-
print(json.dumps(node_to_dict(node), indent=2))
|
|
2473
|
-
else:
|
|
2474
|
-
print(f"Created: {node.id}")
|
|
2475
|
-
print(f" Title: {node.title}")
|
|
2476
|
-
print(f" Status: {node.status}")
|
|
2477
|
-
print(f" Path: {args.graph_dir}/{args.collection}/{node.id}.html")
|
|
2478
|
-
|
|
2479
|
-
|
|
2480
|
-
def cmd_feature_start(args: argparse.Namespace) -> None:
|
|
2481
|
-
"""Start working on a feature."""
|
|
2482
|
-
import json
|
|
2483
|
-
|
|
2484
|
-
from htmlgraph.sdk import SDK
|
|
2485
|
-
|
|
2486
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
2487
|
-
collection = getattr(sdk, args.collection, None)
|
|
2488
|
-
|
|
2489
|
-
if not collection:
|
|
2490
|
-
print(
|
|
2491
|
-
f"Error: Collection '{args.collection}' not found in SDK.", file=sys.stderr
|
|
2492
|
-
)
|
|
2493
|
-
sys.exit(1)
|
|
2494
|
-
|
|
2495
|
-
try:
|
|
2496
|
-
node = collection.start(args.id)
|
|
2497
|
-
except ValueError as e:
|
|
2498
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
2499
|
-
sys.exit(1)
|
|
2500
|
-
|
|
2501
|
-
if node is None:
|
|
2502
|
-
print(
|
|
2503
|
-
f"Error: Feature '{args.id}' not found in {args.collection}.",
|
|
2504
|
-
file=sys.stderr,
|
|
2505
|
-
)
|
|
2506
|
-
sys.exit(1)
|
|
2507
|
-
|
|
2508
|
-
if args.format == "json":
|
|
2509
|
-
from htmlgraph.converter import node_to_dict
|
|
2510
|
-
|
|
2511
|
-
print(json.dumps(node_to_dict(node), indent=2))
|
|
2512
|
-
else:
|
|
2513
|
-
print(f"Started: {node.id}")
|
|
2514
|
-
print(f" Title: {node.title}")
|
|
2515
|
-
print(f" Status: {node.status}")
|
|
2516
|
-
|
|
2517
|
-
# Show WIP status
|
|
2518
|
-
status = sdk.session_manager.get_status()
|
|
2519
|
-
print(f" WIP: {status['wip_count']}/{status['wip_limit']}")
|
|
2520
|
-
|
|
2521
|
-
|
|
2522
|
-
def cmd_feature_complete(args: argparse.Namespace) -> None:
|
|
2523
|
-
"""Mark a feature as complete."""
|
|
2524
|
-
import json
|
|
2525
|
-
|
|
2526
|
-
from htmlgraph.sdk import SDK
|
|
2527
|
-
|
|
2528
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
2529
|
-
collection = getattr(sdk, args.collection, None)
|
|
2530
|
-
|
|
2531
|
-
if not collection:
|
|
2532
|
-
print(
|
|
2533
|
-
f"Error: Collection '{args.collection}' not found in SDK.", file=sys.stderr
|
|
2534
|
-
)
|
|
2535
|
-
sys.exit(1)
|
|
2536
|
-
|
|
2537
|
-
try:
|
|
2538
|
-
node = collection.complete(args.id)
|
|
2539
|
-
except ValueError as e:
|
|
2540
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
2541
|
-
sys.exit(1)
|
|
2542
|
-
|
|
2543
|
-
if node is None:
|
|
2544
|
-
print(
|
|
2545
|
-
f"Error: Feature '{args.id}' not found in {args.collection}.",
|
|
2546
|
-
file=sys.stderr,
|
|
2547
|
-
)
|
|
2548
|
-
sys.exit(1)
|
|
2549
|
-
|
|
2550
|
-
if args.format == "json":
|
|
2551
|
-
from htmlgraph.converter import node_to_dict
|
|
2552
|
-
|
|
2553
|
-
print(json.dumps(node_to_dict(node), indent=2))
|
|
2554
|
-
else:
|
|
2555
|
-
print(f"Completed: {node.id}")
|
|
2556
|
-
print(f" Title: {node.title}")
|
|
2557
|
-
|
|
2558
|
-
|
|
2559
|
-
def cmd_feature_primary(args: argparse.Namespace) -> None:
|
|
2560
|
-
"""Set the primary feature for attribution."""
|
|
2561
|
-
import json
|
|
2562
|
-
|
|
2563
|
-
from htmlgraph.sdk import SDK
|
|
2564
|
-
|
|
2565
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
2566
|
-
|
|
2567
|
-
# Only FeatureCollection has set_primary currently
|
|
2568
|
-
if args.collection == "features":
|
|
2569
|
-
node = sdk.features.set_primary(args.id)
|
|
2570
|
-
else:
|
|
2571
|
-
# Fallback to direct session manager for other collections
|
|
2572
|
-
node = sdk.session_manager.set_primary_feature(
|
|
2573
|
-
args.id, collection=args.collection, agent=args.agent
|
|
2574
|
-
)
|
|
2575
|
-
|
|
2576
|
-
if node is None:
|
|
2577
|
-
print(
|
|
2578
|
-
f"Error: Feature '{args.id}' not found in {args.collection}.",
|
|
2579
|
-
file=sys.stderr,
|
|
2580
|
-
)
|
|
2581
|
-
sys.exit(1)
|
|
2582
|
-
|
|
2583
|
-
if args.format == "json":
|
|
2584
|
-
from htmlgraph.converter import node_to_dict
|
|
2585
|
-
|
|
2586
|
-
print(json.dumps(node_to_dict(node), indent=2))
|
|
2587
|
-
else:
|
|
2588
|
-
print(f"Primary feature set: {node.id}")
|
|
2589
|
-
print(f" Title: {node.title}")
|
|
2590
|
-
|
|
2591
|
-
|
|
2592
|
-
def cmd_feature_claim(args: argparse.Namespace) -> None:
|
|
2593
|
-
"""Claim a feature."""
|
|
2594
|
-
import json
|
|
2595
|
-
|
|
2596
|
-
from htmlgraph.sdk import SDK
|
|
2597
|
-
|
|
2598
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
2599
|
-
collection = getattr(sdk, args.collection, None)
|
|
2600
|
-
|
|
2601
|
-
if not collection:
|
|
2602
|
-
print(
|
|
2603
|
-
f"Error: Collection '{args.collection}' not found in SDK.", file=sys.stderr
|
|
2604
|
-
)
|
|
2605
|
-
sys.exit(1)
|
|
2606
|
-
|
|
2607
|
-
try:
|
|
2608
|
-
node = collection.claim(args.id)
|
|
2609
|
-
except ValueError as e:
|
|
2610
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
2611
|
-
sys.exit(1)
|
|
2612
|
-
|
|
2613
|
-
if node is None:
|
|
2614
|
-
print(
|
|
2615
|
-
f"Error: Feature '{args.id}' not found in {args.collection}.",
|
|
2616
|
-
file=sys.stderr,
|
|
2617
|
-
)
|
|
2618
|
-
sys.exit(1)
|
|
2619
|
-
|
|
2620
|
-
if args.format == "json":
|
|
2621
|
-
from htmlgraph.converter import node_to_dict
|
|
2622
|
-
|
|
2623
|
-
print(json.dumps(node_to_dict(node), indent=2))
|
|
2624
|
-
else:
|
|
2625
|
-
print(f"Claimed: {node.id}")
|
|
2626
|
-
print(f" Agent: {node.agent_assigned}")
|
|
2627
|
-
print(f" Session: {node.claimed_by_session}")
|
|
2628
|
-
|
|
2629
|
-
|
|
2630
|
-
def cmd_feature_release(args: argparse.Namespace) -> None:
|
|
2631
|
-
"""Release a feature."""
|
|
2632
|
-
import json
|
|
2633
|
-
|
|
2634
|
-
from htmlgraph.sdk import SDK
|
|
2635
|
-
|
|
2636
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
2637
|
-
collection = getattr(sdk, args.collection, None)
|
|
2638
|
-
|
|
2639
|
-
if not collection:
|
|
2640
|
-
print(
|
|
2641
|
-
f"Error: Collection '{args.collection}' not found in SDK.", file=sys.stderr
|
|
2642
|
-
)
|
|
2643
|
-
sys.exit(1)
|
|
2644
|
-
|
|
2645
|
-
try:
|
|
2646
|
-
node = collection.release(args.id)
|
|
2647
|
-
except ValueError as e:
|
|
2648
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
2649
|
-
sys.exit(1)
|
|
2650
|
-
|
|
2651
|
-
if node is None:
|
|
2652
|
-
print(
|
|
2653
|
-
f"Error: Feature '{args.id}' not found in {args.collection}.",
|
|
2654
|
-
file=sys.stderr,
|
|
2655
|
-
)
|
|
2656
|
-
sys.exit(1)
|
|
2657
|
-
|
|
2658
|
-
if args.format == "json":
|
|
2659
|
-
from htmlgraph.converter import node_to_dict
|
|
2660
|
-
|
|
2661
|
-
print(json.dumps(node_to_dict(node), indent=2))
|
|
2662
|
-
else:
|
|
2663
|
-
print(f"Released: {node.id}")
|
|
2664
|
-
|
|
2665
|
-
|
|
2666
|
-
def cmd_feature_auto_release(args: argparse.Namespace) -> None:
|
|
2667
|
-
"""Release all features claimed by an agent."""
|
|
2668
|
-
import json
|
|
2669
|
-
|
|
2670
|
-
from htmlgraph.sdk import SDK
|
|
2671
|
-
|
|
2672
|
-
sdk = SDK(directory=args.graph_dir, agent=args.agent)
|
|
2673
|
-
# auto_release_features is on SessionManager, exposed via SDK
|
|
2674
|
-
released = sdk.session_manager.auto_release_features(agent=args.agent)
|
|
2675
|
-
|
|
2676
|
-
if args.format == "json":
|
|
2677
|
-
print(json.dumps({"released": released}, indent=2))
|
|
2678
|
-
else:
|
|
2679
|
-
if not released:
|
|
2680
|
-
print(f"No features claimed by agent '{args.agent}'.")
|
|
2681
|
-
else:
|
|
2682
|
-
print(f"Released {len(released)} feature(s):")
|
|
2683
|
-
for node_id in released:
|
|
2684
|
-
print(f" - {node_id}")
|
|
2685
|
-
|
|
2686
|
-
|
|
2687
|
-
def cmd_orchestrator_enable(args: argparse.Namespace) -> None:
|
|
2688
|
-
"""Enable orchestrator mode."""
|
|
2689
|
-
from typing import Literal
|
|
2690
|
-
|
|
2691
|
-
from htmlgraph.orchestrator_mode import OrchestratorModeManager
|
|
2692
|
-
|
|
2693
|
-
manager = OrchestratorModeManager(args.graph_dir)
|
|
2694
|
-
level: Literal["strict", "guidance"] = (
|
|
2695
|
-
args.level if hasattr(args, "level") and args.level else "strict"
|
|
2696
|
-
)
|
|
2697
|
-
manager.enable(level=level)
|
|
2698
|
-
|
|
2699
|
-
level_text = "strict enforcement" if level == "strict" else "guidance mode"
|
|
2700
|
-
print(f"ā Orchestrator mode enabled ({level_text})")
|
|
2701
|
-
|
|
2702
|
-
|
|
2703
|
-
def cmd_orchestrator_disable(args: argparse.Namespace) -> None:
|
|
2704
|
-
"""Disable orchestrator mode."""
|
|
2705
|
-
from htmlgraph.orchestrator_mode import OrchestratorModeManager
|
|
2706
|
-
|
|
2707
|
-
manager = OrchestratorModeManager(args.graph_dir)
|
|
2708
|
-
manager.disable(by_user=True)
|
|
2709
|
-
print("ā Orchestrator mode disabled")
|
|
2710
|
-
|
|
2711
|
-
|
|
2712
|
-
def cmd_orchestrator_status(args: argparse.Namespace) -> None:
|
|
2713
|
-
"""Show orchestrator mode status."""
|
|
2714
|
-
from htmlgraph.orchestrator_mode import OrchestratorModeManager
|
|
2715
|
-
|
|
2716
|
-
manager = OrchestratorModeManager(args.graph_dir)
|
|
2717
|
-
status = manager.status()
|
|
2718
|
-
|
|
2719
|
-
if status["enabled"]:
|
|
2720
|
-
level = status["enforcement_level"]
|
|
2721
|
-
level_text = "strict enforcement" if level == "strict" else "guidance mode"
|
|
2722
|
-
print(f"Orchestrator mode: enabled ({level_text})")
|
|
2723
|
-
if status["activated_at"]:
|
|
2724
|
-
print(f"Activated at: {status['activated_at']}")
|
|
2725
|
-
if status["auto_activated"]:
|
|
2726
|
-
print("Auto-activated: yes")
|
|
2727
|
-
else:
|
|
2728
|
-
print("Orchestrator mode: disabled")
|
|
2729
|
-
if status["disabled_by_user"]:
|
|
2730
|
-
print("Disabled by user (auto-activation prevented)")
|
|
2731
|
-
|
|
2732
|
-
|
|
2733
|
-
def cmd_publish(args: argparse.Namespace) -> None:
|
|
2734
|
-
"""Build and publish the package to PyPI (Interoperable)."""
|
|
2735
|
-
import shutil
|
|
2736
|
-
import subprocess
|
|
2737
|
-
|
|
2738
|
-
# Ensure we are in project root
|
|
2739
|
-
if not Path("pyproject.toml").exists():
|
|
2740
|
-
print(
|
|
2741
|
-
"Error: pyproject.toml not found. Run this from the project root.",
|
|
2742
|
-
file=sys.stderr,
|
|
2743
|
-
)
|
|
2744
|
-
sys.exit(1)
|
|
2745
|
-
|
|
2746
|
-
# 1. Clean dist/
|
|
2747
|
-
dist_dir = Path("dist")
|
|
2748
|
-
if dist_dir.exists():
|
|
2749
|
-
print("Cleaning dist/...")
|
|
2750
|
-
shutil.rmtree(dist_dir)
|
|
2751
|
-
|
|
2752
|
-
# 2. Build
|
|
2753
|
-
print("Building package with uv...")
|
|
2754
|
-
try:
|
|
2755
|
-
subprocess.run(["uv", "build"], check=True)
|
|
2756
|
-
except subprocess.CalledProcessError:
|
|
2757
|
-
print("Error: Build failed.", file=sys.stderr)
|
|
2758
|
-
sys.exit(1)
|
|
2759
|
-
except FileNotFoundError:
|
|
2760
|
-
print("Error: 'uv' command not found.", file=sys.stderr)
|
|
2761
|
-
sys.exit(1)
|
|
2762
|
-
|
|
2763
|
-
# 3. Publish
|
|
2764
|
-
if args.dry_run:
|
|
2765
|
-
print("Dry run: Skipping publish.")
|
|
2766
|
-
return
|
|
2767
|
-
|
|
2768
|
-
print("Publishing to PyPI...")
|
|
2769
|
-
env = os.environ.copy()
|
|
2770
|
-
|
|
2771
|
-
# Smart credential loading from .env
|
|
2772
|
-
# Maps PyPI_API_TOKEN (common in .env) to UV_PUBLISH_TOKEN (needed by uv)
|
|
2773
|
-
if "UV_PUBLISH_TOKEN" not in env:
|
|
2774
|
-
dotenv = Path(".env")
|
|
2775
|
-
if dotenv.exists():
|
|
2776
|
-
try:
|
|
2777
|
-
content = dotenv.read_text()
|
|
2778
|
-
for line in content.splitlines():
|
|
2779
|
-
if line.strip() and not line.startswith("#") and "=" in line:
|
|
2780
|
-
key, val = line.split("=", 1)
|
|
2781
|
-
key = key.strip()
|
|
2782
|
-
val = val.strip().strip("'").strip('"')
|
|
2783
|
-
if key == "PyPI_API_TOKEN":
|
|
2784
|
-
env["UV_PUBLISH_TOKEN"] = val
|
|
2785
|
-
print("Loaded credentials from .env")
|
|
2786
|
-
except Exception:
|
|
2787
|
-
pass
|
|
2788
|
-
|
|
2789
|
-
try:
|
|
2790
|
-
subprocess.run(["uv", "publish"], env=env, check=True)
|
|
2791
|
-
print("\nā
Successfully published!")
|
|
2792
|
-
except subprocess.CalledProcessError:
|
|
2793
|
-
print("\nā Publish failed.", file=sys.stderr)
|
|
2794
|
-
sys.exit(1)
|
|
2795
|
-
|
|
2796
|
-
|
|
2797
|
-
def cmd_feature_list(args: argparse.Namespace) -> None:
|
|
2798
|
-
"""List features by status."""
|
|
2799
|
-
import json
|
|
2800
|
-
|
|
2801
|
-
from htmlgraph.converter import node_to_dict
|
|
2802
|
-
from htmlgraph.sdk import SDK
|
|
2803
|
-
|
|
2804
|
-
# Use SDK for feature queries
|
|
2805
|
-
sdk = SDK(directory=args.graph_dir)
|
|
2806
|
-
|
|
2807
|
-
# Query features with SDK
|
|
2808
|
-
if args.status:
|
|
2809
|
-
nodes = sdk.features.where(status=args.status)
|
|
2810
|
-
else:
|
|
2811
|
-
nodes = sdk.features.all()
|
|
2812
|
-
|
|
2813
|
-
# Sort by priority then updated
|
|
2814
|
-
from datetime import timezone
|
|
2815
|
-
|
|
2816
|
-
priority_order = {"critical": 0, "high": 1, "medium": 2, "low": 3}
|
|
2817
|
-
|
|
2818
|
-
def sort_key(n: Any) -> Any:
|
|
2819
|
-
# Ensure timezone-aware datetime for comparison
|
|
2820
|
-
updated = n.updated
|
|
2821
|
-
if updated.tzinfo is None:
|
|
2822
|
-
updated = updated.replace(tzinfo=timezone.utc)
|
|
2823
|
-
return (priority_order.get(n.priority, 99), updated)
|
|
2824
|
-
|
|
2825
|
-
nodes.sort(key=sort_key, reverse=True)
|
|
2826
|
-
|
|
2827
|
-
if args.format == "json":
|
|
2828
|
-
response = create_json_response(
|
|
2829
|
-
command="feature list",
|
|
2830
|
-
data=[node_to_dict(n) for n in nodes],
|
|
2831
|
-
metadata={
|
|
2832
|
-
"graph_dir": args.graph_dir,
|
|
2833
|
-
"status_filter": args.status,
|
|
2834
|
-
"total_count": len(nodes),
|
|
2835
|
-
},
|
|
2836
|
-
)
|
|
2837
|
-
print(json.dumps(response, indent=2, default=str))
|
|
2838
|
-
else:
|
|
2839
|
-
if not nodes:
|
|
2840
|
-
if not args.quiet:
|
|
2841
|
-
print(
|
|
2842
|
-
f"No features found with status '{args.status}'."
|
|
2843
|
-
if args.status
|
|
2844
|
-
else "No features found."
|
|
2845
|
-
)
|
|
2846
|
-
return
|
|
2847
|
-
|
|
2848
|
-
# Header (skip if quiet)
|
|
2849
|
-
if not args.quiet:
|
|
2850
|
-
print(f"{'ID':<25} {'Status':<12} {'Priority':<10} {'Title'}")
|
|
2851
|
-
print("=" * 80)
|
|
2852
|
-
|
|
2853
|
-
# List features
|
|
2854
|
-
for node in nodes:
|
|
2855
|
-
title = node.title[:35] + "..." if len(node.title) > 38 else node.title
|
|
2856
|
-
print(f"{node.id:<25} {node.status:<12} {node.priority:<10} {title}")
|
|
2857
|
-
|
|
2858
|
-
# Verbose output
|
|
2859
|
-
if args.verbose >= 1:
|
|
2860
|
-
print("\n--- Verbose Details ---")
|
|
2861
|
-
print(f"Total features: {len(nodes)}")
|
|
2862
|
-
print(f"Graph directory: {args.graph_dir}")
|
|
2863
|
-
if args.status:
|
|
2864
|
-
print(f"Filtered by status: {args.status}")
|
|
2865
|
-
|
|
2866
|
-
if args.verbose >= 2:
|
|
2867
|
-
print("\nFeature breakdown by status:")
|
|
2868
|
-
from collections import Counter
|
|
2869
|
-
|
|
2870
|
-
status_counts = Counter(n.status for n in sdk.features.all())
|
|
2871
|
-
for status, count in sorted(status_counts.items()):
|
|
2872
|
-
marker = "ā" if status == args.status else " "
|
|
2873
|
-
print(f" {marker} {status}: {count}")
|
|
2874
|
-
|
|
2875
|
-
|
|
2876
|
-
# =============================================================================
|
|
2877
|
-
# Track Management Commands (Conductor-Style Planning)
|
|
2878
|
-
# =============================================================================
|
|
2879
|
-
|
|
2880
|
-
|
|
2881
|
-
def cmd_feature_step_complete(args: argparse.Namespace) -> None:
|
|
2882
|
-
"""Mark one or more feature steps as complete via API."""
|
|
2883
|
-
import http.client
|
|
2884
|
-
import json
|
|
2885
|
-
|
|
2886
|
-
# Parse step indices (support both space-separated and comma-separated)
|
|
2887
|
-
step_indices: list[int] = []
|
|
2888
|
-
for step_arg in args.steps:
|
|
2889
|
-
if "," in step_arg:
|
|
2890
|
-
# Comma-separated: "0,1,2"
|
|
2891
|
-
step_indices.extend(
|
|
2892
|
-
int(s.strip()) for s in step_arg.split(",") if s.strip()
|
|
2893
|
-
)
|
|
2894
|
-
else:
|
|
2895
|
-
# Space-separated: "0" "1" "2"
|
|
2896
|
-
step_indices.append(int(step_arg))
|
|
2897
|
-
|
|
2898
|
-
# Remove duplicates and sort
|
|
2899
|
-
step_indices = sorted(set(step_indices))
|
|
2900
|
-
|
|
2901
|
-
if not step_indices:
|
|
2902
|
-
print("Error: No step indices provided", file=sys.stderr)
|
|
2903
|
-
sys.exit(1)
|
|
2904
|
-
|
|
2905
|
-
# Make API requests for each step
|
|
2906
|
-
success_count = 0
|
|
2907
|
-
error_count = 0
|
|
2908
|
-
results = []
|
|
2909
|
-
|
|
2910
|
-
for step_index in step_indices:
|
|
2911
|
-
try:
|
|
2912
|
-
conn = http.client.HTTPConnection(args.host, args.port, timeout=5)
|
|
2913
|
-
body = json.dumps({"complete_step": step_index})
|
|
2914
|
-
headers = {"Content-Type": "application/json"}
|
|
2915
|
-
|
|
2916
|
-
conn.request("PATCH", f"/api/{args.collection}/{args.id}", body, headers)
|
|
2917
|
-
response = conn.getresponse()
|
|
2918
|
-
response_data = response.read().decode()
|
|
2919
|
-
|
|
2920
|
-
if response.status == 200:
|
|
2921
|
-
success_count += 1
|
|
2922
|
-
results.append({"step": step_index, "status": "success"})
|
|
2923
|
-
if args.format != "json":
|
|
2924
|
-
print(f"ā Marked step {step_index} complete")
|
|
2925
|
-
else:
|
|
2926
|
-
error_count += 1
|
|
2927
|
-
results.append(
|
|
2928
|
-
{"step": step_index, "status": "error", "message": response_data}
|
|
2929
|
-
)
|
|
2930
|
-
if args.format != "json":
|
|
2931
|
-
print(
|
|
2932
|
-
f"ā Failed to mark step {step_index} complete: {response_data}",
|
|
2933
|
-
file=sys.stderr,
|
|
2934
|
-
)
|
|
2935
|
-
|
|
2936
|
-
conn.close()
|
|
2937
|
-
except Exception as e:
|
|
2938
|
-
error_count += 1
|
|
2939
|
-
results.append({"step": step_index, "status": "error", "message": str(e)})
|
|
2940
|
-
if args.format != "json":
|
|
2941
|
-
print(
|
|
2942
|
-
f"ā Error marking step {step_index} complete: {e}", file=sys.stderr
|
|
2943
|
-
)
|
|
2944
|
-
|
|
2945
|
-
# Output results
|
|
2946
|
-
if args.format == "json":
|
|
2947
|
-
output = {
|
|
2948
|
-
"feature_id": args.id,
|
|
2949
|
-
"total_steps": len(step_indices),
|
|
2950
|
-
"success": success_count,
|
|
2951
|
-
"errors": error_count,
|
|
2952
|
-
"results": results,
|
|
2953
|
-
}
|
|
2954
|
-
print(json.dumps(output, indent=2))
|
|
2955
|
-
else:
|
|
2956
|
-
print(
|
|
2957
|
-
f"\nCompleted {success_count}/{len(step_indices)} steps for feature '{args.id}'"
|
|
2958
|
-
)
|
|
2959
|
-
if error_count > 0:
|
|
2960
|
-
sys.exit(1)
|
|
2961
|
-
|
|
2962
|
-
|
|
2963
|
-
def cmd_feature_delete(args: argparse.Namespace) -> None:
|
|
2964
|
-
"""Delete a feature."""
|
|
2965
|
-
import json
|
|
2966
|
-
import sys
|
|
2967
|
-
|
|
2968
|
-
from htmlgraph import SDK
|
|
2969
|
-
|
|
2970
|
-
sdk = SDK(agent=getattr(args, "agent", "cli"), directory=args.graph_dir)
|
|
2971
|
-
|
|
2972
|
-
# Get the feature first to show confirmation
|
|
2973
|
-
collection = getattr(sdk, args.collection, None)
|
|
2974
|
-
if not collection:
|
|
2975
|
-
print(f"Error: Collection '{args.collection}' not found", file=sys.stderr)
|
|
2976
|
-
sys.exit(1)
|
|
2977
|
-
|
|
2978
|
-
feature = collection.get(args.id)
|
|
2979
|
-
if not feature:
|
|
2980
|
-
print(
|
|
2981
|
-
f"Error: {args.collection.rstrip('s').capitalize()} '{args.id}' not found",
|
|
2982
|
-
file=sys.stderr,
|
|
2983
|
-
)
|
|
2984
|
-
sys.exit(1)
|
|
2985
|
-
|
|
2986
|
-
# Confirmation prompt (unless --yes flag)
|
|
2987
|
-
if not args.yes:
|
|
2988
|
-
print(f"Delete {args.collection.rstrip('s')} '{args.id}'?")
|
|
2989
|
-
print(f" Title: {feature.title}")
|
|
2990
|
-
print(f" Status: {feature.status}")
|
|
2991
|
-
print("\nThis cannot be undone. Continue? [y/N] ", end="")
|
|
2992
|
-
|
|
2993
|
-
response = input().strip().lower()
|
|
2994
|
-
if response not in ("y", "yes"):
|
|
2995
|
-
print("Cancelled")
|
|
2996
|
-
sys.exit(0)
|
|
2997
|
-
|
|
2998
|
-
# Delete
|
|
2999
|
-
try:
|
|
3000
|
-
success = collection.delete(args.id)
|
|
3001
|
-
if success:
|
|
3002
|
-
if args.format == "json":
|
|
3003
|
-
data = {"id": args.id, "title": feature.title, "deleted": True}
|
|
3004
|
-
print(json.dumps(data, indent=2))
|
|
3005
|
-
else:
|
|
3006
|
-
print(f"Deleted {args.collection.rstrip('s')}: {args.id}")
|
|
3007
|
-
print(f" Title: {feature.title}")
|
|
3008
|
-
else:
|
|
3009
|
-
print(
|
|
3010
|
-
f"Error: Failed to delete {args.collection.rstrip('s')} '{args.id}'",
|
|
3011
|
-
file=sys.stderr,
|
|
3012
|
-
)
|
|
3013
|
-
sys.exit(1)
|
|
3014
|
-
except Exception as e:
|
|
3015
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
3016
|
-
sys.exit(1)
|
|
3017
|
-
|
|
3018
|
-
|
|
3019
|
-
def cmd_track_new(args: argparse.Namespace) -> None:
|
|
3020
|
-
"""Create a new track."""
|
|
3021
|
-
import json
|
|
3022
|
-
|
|
3023
|
-
from htmlgraph.track_manager import TrackManager
|
|
3024
|
-
|
|
3025
|
-
manager = TrackManager(args.graph_dir)
|
|
3026
|
-
|
|
3027
|
-
try:
|
|
3028
|
-
track = manager.create_track(
|
|
3029
|
-
title=args.title,
|
|
3030
|
-
description=args.description or "",
|
|
3031
|
-
priority=args.priority,
|
|
3032
|
-
)
|
|
3033
|
-
except ValueError as e:
|
|
3034
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
3035
|
-
sys.exit(1)
|
|
3036
|
-
|
|
3037
|
-
if args.format == "json":
|
|
3038
|
-
data = {
|
|
3039
|
-
"id": track.id,
|
|
3040
|
-
"title": track.title,
|
|
3041
|
-
"status": track.status,
|
|
3042
|
-
"priority": track.priority,
|
|
3043
|
-
"path": f"{args.graph_dir}/tracks/{track.id}/",
|
|
3044
|
-
}
|
|
3045
|
-
print(json.dumps(data, indent=2))
|
|
3046
|
-
else:
|
|
3047
|
-
print(f"Created track: {track.id}")
|
|
3048
|
-
print(f" Title: {track.title}")
|
|
3049
|
-
print(f" Status: {track.status}")
|
|
3050
|
-
print(f" Priority: {track.priority}")
|
|
3051
|
-
print(f" Path: {args.graph_dir}/tracks/{track.id}/")
|
|
3052
|
-
print("\nNext steps:")
|
|
3053
|
-
print(f" - Create spec: htmlgraph track spec {track.id} 'Spec Title'")
|
|
3054
|
-
print(f" - Create plan: htmlgraph track plan {track.id} 'Plan Title'")
|
|
3055
|
-
|
|
3056
|
-
|
|
3057
|
-
def cmd_track_list(args: argparse.Namespace) -> None:
|
|
3058
|
-
"""List all tracks."""
|
|
3059
|
-
import json
|
|
3060
|
-
|
|
3061
|
-
from htmlgraph.track_manager import TrackManager
|
|
3062
|
-
|
|
3063
|
-
manager = TrackManager(args.graph_dir)
|
|
3064
|
-
track_ids = manager.list_tracks()
|
|
3065
|
-
|
|
3066
|
-
if args.format == "json":
|
|
3067
|
-
print(json.dumps({"tracks": track_ids}, indent=2))
|
|
3068
|
-
else:
|
|
3069
|
-
if not track_ids:
|
|
3070
|
-
print("No tracks found.")
|
|
3071
|
-
print("\nCreate a track with: htmlgraph track new 'Track Title'")
|
|
3072
|
-
return
|
|
3073
|
-
|
|
3074
|
-
print(f"Tracks in {args.graph_dir}/tracks/:")
|
|
3075
|
-
print("=" * 60)
|
|
3076
|
-
for track_id in track_ids:
|
|
3077
|
-
# Check for both consolidated (single file) and directory-based formats
|
|
3078
|
-
track_file = Path(args.graph_dir) / "tracks" / f"{track_id}.html"
|
|
3079
|
-
track_dir = Path(args.graph_dir) / "tracks" / track_id
|
|
3080
|
-
|
|
3081
|
-
if track_file.exists():
|
|
3082
|
-
# Consolidated format - spec and plan are in the same file
|
|
3083
|
-
content = track_file.read_text(encoding="utf-8")
|
|
3084
|
-
has_spec = (
|
|
3085
|
-
'data-section="overview"' in content
|
|
3086
|
-
or 'data-section="requirements"' in content
|
|
3087
|
-
)
|
|
3088
|
-
has_plan = 'data-section="plan"' in content
|
|
3089
|
-
format_indicator = " (consolidated)"
|
|
3090
|
-
else:
|
|
3091
|
-
# Directory format
|
|
3092
|
-
has_spec = (track_dir / "spec.html").exists()
|
|
3093
|
-
has_plan = (track_dir / "plan.html").exists()
|
|
3094
|
-
format_indicator = ""
|
|
3095
|
-
|
|
3096
|
-
components = []
|
|
3097
|
-
if has_spec:
|
|
3098
|
-
components.append("spec")
|
|
3099
|
-
if has_plan:
|
|
3100
|
-
components.append("plan")
|
|
3101
|
-
|
|
3102
|
-
components_str = f" [{', '.join(components)}]" if components else " [empty]"
|
|
3103
|
-
print(f" {track_id}{components_str}{format_indicator}")
|
|
3104
|
-
|
|
3105
|
-
|
|
3106
|
-
def cmd_track_spec(args: argparse.Namespace) -> None:
|
|
3107
|
-
"""Create a spec for a track."""
|
|
3108
|
-
import json
|
|
3109
|
-
|
|
3110
|
-
from htmlgraph.track_manager import TrackManager
|
|
3111
|
-
|
|
3112
|
-
manager = TrackManager(args.graph_dir)
|
|
3113
|
-
|
|
3114
|
-
# Check if track uses consolidated format
|
|
3115
|
-
if manager.is_consolidated(args.track_id):
|
|
3116
|
-
track_file = manager.tracks_dir / f"{args.track_id}.html"
|
|
3117
|
-
print(f"Track '{args.track_id}' uses consolidated single-file format.")
|
|
3118
|
-
print(f"Spec is embedded in: {track_file}")
|
|
3119
|
-
print("\nTo create a track with separate spec/plan files, use:")
|
|
3120
|
-
print(" sdk.tracks.builder().separate_files().title('...').create()")
|
|
3121
|
-
return
|
|
3122
|
-
|
|
3123
|
-
try:
|
|
3124
|
-
spec = manager.create_spec(
|
|
3125
|
-
track_id=args.track_id,
|
|
3126
|
-
title=args.title,
|
|
3127
|
-
overview=args.overview or "",
|
|
3128
|
-
context=args.context or "",
|
|
3129
|
-
author=args.author,
|
|
3130
|
-
)
|
|
3131
|
-
except ValueError as e:
|
|
3132
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
3133
|
-
sys.exit(1)
|
|
3134
|
-
except FileNotFoundError as e:
|
|
3135
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
3136
|
-
sys.exit(1)
|
|
3137
|
-
|
|
3138
|
-
if args.format == "json":
|
|
3139
|
-
data = {
|
|
3140
|
-
"id": spec.id,
|
|
3141
|
-
"title": spec.title,
|
|
3142
|
-
"track_id": spec.track_id,
|
|
3143
|
-
"status": spec.status,
|
|
3144
|
-
"path": f"{args.graph_dir}/tracks/{args.track_id}/spec.html",
|
|
3145
|
-
}
|
|
3146
|
-
print(json.dumps(data, indent=2))
|
|
3147
|
-
else:
|
|
3148
|
-
print(f"Created spec: {spec.id}")
|
|
3149
|
-
print(f" Title: {spec.title}")
|
|
3150
|
-
print(f" Track: {spec.track_id}")
|
|
3151
|
-
print(f" Status: {spec.status}")
|
|
3152
|
-
print(f" Path: {args.graph_dir}/tracks/{args.track_id}/spec.html")
|
|
3153
|
-
print(f"\nView spec: open {args.graph_dir}/tracks/{args.track_id}/spec.html")
|
|
3154
|
-
|
|
3155
|
-
|
|
3156
|
-
def cmd_track_plan(args: argparse.Namespace) -> None:
|
|
3157
|
-
"""Create a plan for a track."""
|
|
3158
|
-
import json
|
|
3159
|
-
|
|
3160
|
-
from htmlgraph.track_manager import TrackManager
|
|
3161
|
-
|
|
3162
|
-
manager = TrackManager(args.graph_dir)
|
|
3163
|
-
|
|
3164
|
-
# Check if track uses consolidated format
|
|
3165
|
-
if manager.is_consolidated(args.track_id):
|
|
3166
|
-
track_file = manager.tracks_dir / f"{args.track_id}.html"
|
|
3167
|
-
print(f"Track '{args.track_id}' uses consolidated single-file format.")
|
|
3168
|
-
print(f"Plan is embedded in: {track_file}")
|
|
3169
|
-
print("\nTo create a track with separate spec/plan files, use:")
|
|
3170
|
-
print(" sdk.tracks.builder().separate_files().title('...').create()")
|
|
3171
|
-
return
|
|
3172
|
-
|
|
3173
|
-
try:
|
|
3174
|
-
plan = manager.create_plan(
|
|
3175
|
-
track_id=args.track_id,
|
|
3176
|
-
title=args.title,
|
|
3177
|
-
)
|
|
3178
|
-
except ValueError as e:
|
|
3179
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
3180
|
-
sys.exit(1)
|
|
3181
|
-
except FileNotFoundError as e:
|
|
3182
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
3183
|
-
sys.exit(1)
|
|
3184
|
-
|
|
3185
|
-
if args.format == "json":
|
|
3186
|
-
data = {
|
|
3187
|
-
"id": plan.id,
|
|
3188
|
-
"title": plan.title,
|
|
3189
|
-
"track_id": plan.track_id,
|
|
3190
|
-
"status": plan.status,
|
|
3191
|
-
"path": f"{args.graph_dir}/tracks/{args.track_id}/plan.html",
|
|
3192
|
-
}
|
|
3193
|
-
print(json.dumps(data, indent=2))
|
|
3194
|
-
else:
|
|
3195
|
-
print(f"Created plan: {plan.id}")
|
|
3196
|
-
print(f" Title: {plan.title}")
|
|
3197
|
-
print(f" Track: {plan.track_id}")
|
|
3198
|
-
print(f" Status: {plan.status}")
|
|
3199
|
-
print(f" Path: {args.graph_dir}/tracks/{args.track_id}/plan.html")
|
|
3200
|
-
print(f"\nView plan: open {args.graph_dir}/tracks/{args.track_id}/plan.html")
|
|
3201
|
-
|
|
3202
|
-
|
|
3203
|
-
def cmd_track_delete(args: argparse.Namespace) -> None:
|
|
3204
|
-
"""Delete a track."""
|
|
3205
|
-
import json
|
|
3206
|
-
|
|
3207
|
-
from htmlgraph.track_manager import TrackManager
|
|
3208
|
-
|
|
3209
|
-
manager = TrackManager(args.graph_dir)
|
|
3210
|
-
|
|
3211
|
-
try:
|
|
3212
|
-
manager.delete_track(args.track_id)
|
|
3213
|
-
except ValueError as e:
|
|
3214
|
-
print(f"Error: {e}", file=sys.stderr)
|
|
3215
|
-
sys.exit(1)
|
|
3216
|
-
|
|
3217
|
-
if args.format == "json":
|
|
3218
|
-
data = {"deleted": True, "track_id": args.track_id}
|
|
3219
|
-
print(json.dumps(data, indent=2))
|
|
3220
|
-
else:
|
|
3221
|
-
print(f"ā Deleted track: {args.track_id}")
|
|
3222
|
-
print(f" Removed: {args.graph_dir}/tracks/{args.track_id}/")
|
|
3223
|
-
|
|
3224
|
-
|
|
3225
|
-
def create_default_index(path: Path) -> None:
|
|
3226
|
-
"""
|
|
3227
|
-
Create a default index.html for new projects.
|
|
3228
|
-
|
|
3229
|
-
The dashboard UI evolves quickly; to keep new projects consistent with the
|
|
3230
|
-
current dashboard, prefer a packaged HTML template over a hardcoded string.
|
|
3231
|
-
"""
|
|
3232
|
-
template = Path(__file__).parent / "dashboard.html"
|
|
3233
|
-
try:
|
|
3234
|
-
if template.exists():
|
|
3235
|
-
path.write_text(template.read_text(encoding="utf-8"), encoding="utf-8")
|
|
3236
|
-
return
|
|
3237
|
-
except Exception:
|
|
3238
|
-
pass
|
|
3239
|
-
|
|
3240
|
-
# Fallback (rare): minimal landing page.
|
|
3241
|
-
path.write_text(
|
|
3242
|
-
"<!doctype html><html><head><meta charset='utf-8'><title>HtmlGraph</title></head>"
|
|
3243
|
-
"<body><h1>HtmlGraph</h1><p>Run <code>htmlgraph serve</code> and open "
|
|
3244
|
-
"<code>http://localhost:8080</code>.</p></body></html>",
|
|
3245
|
-
encoding="utf-8",
|
|
3246
|
-
)
|
|
3247
|
-
|
|
3248
|
-
|
|
3249
|
-
def main() -> None:
|
|
3250
|
-
parser = argparse.ArgumentParser(
|
|
3251
|
-
description="HtmlGraph - HTML is All You Need",
|
|
3252
|
-
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
3253
|
-
epilog="""
|
|
3254
|
-
Examples:
|
|
3255
|
-
htmlgraph init # Initialize .htmlgraph in current dir
|
|
3256
|
-
htmlgraph serve # Start server on port 8080
|
|
3257
|
-
htmlgraph status # Show graph status
|
|
3258
|
-
htmlgraph query "[data-status='todo']" # Query nodes
|
|
3259
|
-
|
|
3260
|
-
Session Management:
|
|
3261
|
-
htmlgraph session start # Start a new session (auto-ID)
|
|
3262
|
-
htmlgraph session start --id my-session --title "Bug fixes"
|
|
3263
|
-
htmlgraph session end my-session # End a session
|
|
3264
|
-
htmlgraph session list # List all sessions
|
|
3265
|
-
htmlgraph activity Edit "Edit: src/app.py:45-60" --files src/app.py
|
|
3266
|
-
|
|
3267
|
-
Feature Management:
|
|
3268
|
-
htmlgraph feature list # List all features
|
|
3269
|
-
htmlgraph feature start feat-001 # Start working on a feature
|
|
3270
|
-
htmlgraph feature primary feat-001 # Set primary feature
|
|
3271
|
-
htmlgraph feature claim feat-001 # Claim feature for current agent
|
|
3272
|
-
htmlgraph feature release feat-001 # Release claim
|
|
3273
|
-
htmlgraph feature auto-release # Release all claims for agent
|
|
3274
|
-
htmlgraph feature step-complete feat-001 0 1 2 # Mark steps complete
|
|
3275
|
-
htmlgraph feature complete feat-001 # Mark feature as done
|
|
3276
|
-
|
|
3277
|
-
Track Management (Conductor-Style Planning):
|
|
3278
|
-
htmlgraph track new "User Authentication" # Create a new track
|
|
3279
|
-
htmlgraph track list # List all tracks
|
|
3280
|
-
htmlgraph track spec track-001-auth "Auth Specification" # Create spec
|
|
3281
|
-
htmlgraph track plan track-001-auth "Auth Implementation Plan" # Create plan
|
|
3282
|
-
|
|
3283
|
-
Analytics:
|
|
3284
|
-
htmlgraph analytics # Project-wide work type analytics
|
|
3285
|
-
htmlgraph analytics --recent 10 # Analyze last 10 sessions
|
|
3286
|
-
htmlgraph analytics --session-id session-123 # Detailed session metrics
|
|
3287
|
-
|
|
3288
|
-
curl Examples:
|
|
3289
|
-
curl localhost:8080/api/status
|
|
3290
|
-
curl localhost:8080/api/features
|
|
3291
|
-
curl -X POST localhost:8080/api/features -d '{"title": "New feature"}'
|
|
3292
|
-
curl -X PATCH localhost:8080/api/features/feat-001 -d '{"status": "done"}'
|
|
3293
|
-
|
|
3294
|
-
Debugging & Quality:
|
|
3295
|
-
See DEBUGGING.md for comprehensive debugging guide
|
|
3296
|
-
|
|
3297
|
-
Debugging agents:
|
|
3298
|
-
researcher.md - Research documentation before implementing
|
|
3299
|
-
debugger.md - Systematic error analysis
|
|
3300
|
-
test-runner.md - Quality gates and validation
|
|
3301
|
-
|
|
3302
|
-
Quick diagnostics:
|
|
3303
|
-
htmlgraph status - Check current state
|
|
3304
|
-
htmlgraph feature list - List all features
|
|
3305
|
-
htmlgraph debug - Show debugging resources
|
|
3306
|
-
|
|
3307
|
-
For more help: https://github.com/Shakes-tzd/htmlgraph
|
|
3308
|
-
""",
|
|
3309
|
-
)
|
|
3310
|
-
|
|
3311
|
-
# Global output control flags (work across all commands)
|
|
3312
|
-
parser.add_argument(
|
|
3313
|
-
"--format",
|
|
3314
|
-
choices=["text", "json"],
|
|
3315
|
-
default="text",
|
|
3316
|
-
help="Output format: text (default) or json",
|
|
3317
|
-
)
|
|
3318
|
-
parser.add_argument(
|
|
3319
|
-
"--quiet",
|
|
3320
|
-
"-q",
|
|
3321
|
-
action="store_true",
|
|
3322
|
-
help="Suppress progress messages and non-essential output",
|
|
3323
|
-
)
|
|
3324
|
-
parser.add_argument(
|
|
3325
|
-
"--verbose",
|
|
3326
|
-
"-v",
|
|
3327
|
-
action="count",
|
|
3328
|
-
default=0,
|
|
3329
|
-
help="Increase verbosity (can be used multiple times: -v, -vv, -vvv)",
|
|
3330
|
-
)
|
|
3331
|
-
|
|
3332
|
-
subparsers = parser.add_subparsers(dest="command", help="Command to run")
|
|
3333
|
-
|
|
3334
|
-
# serve
|
|
3335
|
-
serve_parser = subparsers.add_parser("serve", help="Start the HtmlGraph server")
|
|
3336
|
-
serve_parser.add_argument(
|
|
3337
|
-
"--port", "-p", type=int, default=8080, help="Port (default: 8080)"
|
|
3338
|
-
)
|
|
3339
|
-
serve_parser.add_argument(
|
|
3340
|
-
"--host", default="0.0.0.0", help="Host to bind to (default: 0.0.0.0)"
|
|
3341
|
-
)
|
|
3342
|
-
serve_parser.add_argument(
|
|
3343
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3344
|
-
)
|
|
3345
|
-
serve_parser.add_argument(
|
|
3346
|
-
"--static-dir", "-s", default=".", help="Static files directory"
|
|
3347
|
-
)
|
|
3348
|
-
serve_parser.add_argument(
|
|
3349
|
-
"--no-watch",
|
|
3350
|
-
action="store_true",
|
|
3351
|
-
help="Disable file watching (auto-reload disabled)",
|
|
3352
|
-
)
|
|
3353
|
-
serve_parser.add_argument(
|
|
3354
|
-
"--auto-port",
|
|
3355
|
-
action="store_true",
|
|
3356
|
-
help="Automatically find an available port if default is occupied",
|
|
3357
|
-
)
|
|
3358
|
-
|
|
3359
|
-
# init
|
|
3360
|
-
init_parser = subparsers.add_parser("init", help="Initialize .htmlgraph directory")
|
|
3361
|
-
init_parser.add_argument(
|
|
3362
|
-
"dir", nargs="?", default=".", help="Directory to initialize"
|
|
3363
|
-
)
|
|
3364
|
-
init_parser.add_argument(
|
|
3365
|
-
"--install-hooks",
|
|
3366
|
-
action="store_true",
|
|
3367
|
-
help="Install Git hooks for event logging",
|
|
3368
|
-
)
|
|
3369
|
-
init_parser.add_argument(
|
|
3370
|
-
"--interactive", "-i", action="store_true", help="Interactive setup wizard"
|
|
3371
|
-
)
|
|
3372
|
-
init_parser.add_argument(
|
|
3373
|
-
"--no-index",
|
|
3374
|
-
action="store_true",
|
|
3375
|
-
help="Do not create the analytics cache (index.sqlite)",
|
|
3376
|
-
)
|
|
3377
|
-
init_parser.add_argument(
|
|
3378
|
-
"--no-update-gitignore",
|
|
3379
|
-
action="store_true",
|
|
3380
|
-
help="Do not update/create .gitignore for HtmlGraph cache files",
|
|
3381
|
-
)
|
|
3382
|
-
init_parser.add_argument(
|
|
3383
|
-
"--no-events-keep",
|
|
3384
|
-
action="store_true",
|
|
3385
|
-
help="Do not create .htmlgraph/events/.gitkeep",
|
|
3386
|
-
)
|
|
3387
|
-
|
|
3388
|
-
# install-hooks
|
|
3389
|
-
hooks_parser = subparsers.add_parser(
|
|
3390
|
-
"install-hooks", help="Install Git hooks for automatic tracking"
|
|
3391
|
-
)
|
|
3392
|
-
hooks_parser.add_argument(
|
|
3393
|
-
"--project-dir", "-d", default=".", help="Project directory (default: current)"
|
|
3394
|
-
)
|
|
3395
|
-
hooks_parser.add_argument(
|
|
3396
|
-
"--force",
|
|
3397
|
-
"-f",
|
|
3398
|
-
action="store_true",
|
|
3399
|
-
help="Force installation even if hooks exist",
|
|
3400
|
-
)
|
|
3401
|
-
hooks_parser.add_argument(
|
|
3402
|
-
"--dry-run",
|
|
3403
|
-
action="store_true",
|
|
3404
|
-
help="Show what would be done without doing it",
|
|
3405
|
-
)
|
|
3406
|
-
hooks_parser.add_argument(
|
|
3407
|
-
"--list", "-l", action="store_true", help="List hook installation status"
|
|
3408
|
-
)
|
|
3409
|
-
hooks_parser.add_argument(
|
|
3410
|
-
"--uninstall", "-u", metavar="HOOK", help="Uninstall a specific hook"
|
|
3411
|
-
)
|
|
3412
|
-
hooks_parser.add_argument(
|
|
3413
|
-
"--enable", metavar="HOOK", help="Enable a specific hook in configuration"
|
|
3414
|
-
)
|
|
3415
|
-
hooks_parser.add_argument(
|
|
3416
|
-
"--disable", metavar="HOOK", help="Disable a specific hook in configuration"
|
|
3417
|
-
)
|
|
3418
|
-
hooks_parser.add_argument(
|
|
3419
|
-
"--use-copy", action="store_true", help="Use file copy instead of symlinks"
|
|
3420
|
-
)
|
|
3421
|
-
|
|
3422
|
-
# status
|
|
3423
|
-
status_parser = subparsers.add_parser("status", help="Show graph status")
|
|
3424
|
-
status_parser.add_argument(
|
|
3425
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3426
|
-
)
|
|
3427
|
-
|
|
3428
|
-
# debug
|
|
3429
|
-
debug_parser = subparsers.add_parser(
|
|
3430
|
-
"debug", help="Show debugging resources and system diagnostics"
|
|
3431
|
-
)
|
|
3432
|
-
debug_parser.add_argument(
|
|
3433
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3434
|
-
)
|
|
3435
|
-
|
|
3436
|
-
# query
|
|
3437
|
-
query_parser = subparsers.add_parser("query", help="Query nodes with CSS selector")
|
|
3438
|
-
query_parser.add_argument(
|
|
3439
|
-
"selector", help="CSS selector (e.g. [data-status='todo'])"
|
|
3440
|
-
)
|
|
3441
|
-
query_parser.add_argument(
|
|
3442
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3443
|
-
)
|
|
3444
|
-
query_parser.add_argument(
|
|
3445
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3446
|
-
)
|
|
3447
|
-
|
|
3448
|
-
# =========================================================================
|
|
3449
|
-
# Session Management
|
|
3450
|
-
# =========================================================================
|
|
3451
|
-
|
|
3452
|
-
# session (with subcommands)
|
|
3453
|
-
session_parser = subparsers.add_parser("session", help="Session management")
|
|
3454
|
-
session_subparsers = session_parser.add_subparsers(
|
|
3455
|
-
dest="session_command", help="Session command"
|
|
3456
|
-
)
|
|
3457
|
-
|
|
3458
|
-
# session start
|
|
3459
|
-
session_start = session_subparsers.add_parser("start", help="Start a new session")
|
|
3460
|
-
session_start.add_argument(
|
|
3461
|
-
"--id", help="Session ID (auto-generated if not provided)"
|
|
3462
|
-
)
|
|
3463
|
-
session_start.add_argument("--agent", default="claude-code", help="Agent name")
|
|
3464
|
-
session_start.add_argument("--title", help="Session title")
|
|
3465
|
-
session_start.add_argument(
|
|
3466
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3467
|
-
)
|
|
3468
|
-
session_start.add_argument(
|
|
3469
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3470
|
-
)
|
|
3471
|
-
|
|
3472
|
-
# session end
|
|
3473
|
-
session_end = session_subparsers.add_parser("end", help="End a session")
|
|
3474
|
-
session_end.add_argument("id", help="Session ID to end")
|
|
3475
|
-
session_end.add_argument("--notes", help="Handoff notes for the next session")
|
|
3476
|
-
session_end.add_argument("--recommend", help="Recommended next steps")
|
|
3477
|
-
session_end.add_argument(
|
|
3478
|
-
"--blocker", action="append", default=[], help="Blocker to record (repeatable)"
|
|
3479
|
-
)
|
|
3480
|
-
session_end.add_argument(
|
|
3481
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3482
|
-
)
|
|
3483
|
-
session_end.add_argument(
|
|
3484
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3485
|
-
)
|
|
3486
|
-
|
|
3487
|
-
# session handoff
|
|
3488
|
-
session_handoff = session_subparsers.add_parser(
|
|
3489
|
-
"handoff", help="Set or show session handoff context"
|
|
3490
|
-
)
|
|
3491
|
-
session_handoff.add_argument(
|
|
3492
|
-
"--session-id", help="Session ID (defaults to active session)"
|
|
3493
|
-
)
|
|
3494
|
-
session_handoff.add_argument(
|
|
3495
|
-
"--agent", help="Agent filter (used for --show when no session provided)"
|
|
3496
|
-
)
|
|
3497
|
-
session_handoff.add_argument("--notes", help="Handoff notes for the next session")
|
|
3498
|
-
session_handoff.add_argument("--recommend", help="Recommended next steps")
|
|
3499
|
-
session_handoff.add_argument(
|
|
3500
|
-
"--blocker", action="append", default=[], help="Blocker to record (repeatable)"
|
|
3501
|
-
)
|
|
3502
|
-
session_handoff.add_argument(
|
|
3503
|
-
"--show", action="store_true", help="Show handoff context instead of setting it"
|
|
3504
|
-
)
|
|
3505
|
-
session_handoff.add_argument(
|
|
3506
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3507
|
-
)
|
|
3508
|
-
session_handoff.add_argument(
|
|
3509
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3510
|
-
)
|
|
3511
|
-
|
|
3512
|
-
# session list
|
|
3513
|
-
session_list = session_subparsers.add_parser("list", help="List all sessions")
|
|
3514
|
-
session_list.add_argument(
|
|
3515
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3516
|
-
)
|
|
3517
|
-
session_list.add_argument(
|
|
3518
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3519
|
-
)
|
|
3520
|
-
|
|
3521
|
-
# session start-info (optimized for AI agents)
|
|
3522
|
-
session_start_info = session_subparsers.add_parser(
|
|
3523
|
-
"start-info", help="Get comprehensive session start information (optimized)"
|
|
3524
|
-
)
|
|
3525
|
-
session_start_info.add_argument("--agent", default="claude", help="Agent name")
|
|
3526
|
-
session_start_info.add_argument(
|
|
3527
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3528
|
-
)
|
|
3529
|
-
session_start_info.add_argument(
|
|
3530
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3531
|
-
)
|
|
3532
|
-
session_start_info.add_argument(
|
|
3533
|
-
"--no-git", action="store_true", help="Skip git log"
|
|
3534
|
-
)
|
|
3535
|
-
session_start_info.add_argument(
|
|
3536
|
-
"--git-count", type=int, default=5, help="Number of git commits to include"
|
|
3537
|
-
)
|
|
3538
|
-
session_start_info.add_argument(
|
|
3539
|
-
"--top-n", type=int, default=3, help="Number of bottlenecks/recommendations"
|
|
3540
|
-
)
|
|
3541
|
-
session_start_info.add_argument(
|
|
3542
|
-
"--max-agents",
|
|
3543
|
-
type=int,
|
|
3544
|
-
default=3,
|
|
3545
|
-
help="Max agents for parallel work analysis",
|
|
3546
|
-
)
|
|
3547
|
-
|
|
3548
|
-
# session status-report (and resume alias)
|
|
3549
|
-
session_report = session_subparsers.add_parser(
|
|
3550
|
-
"status-report", help="Print comprehensive session status report"
|
|
3551
|
-
)
|
|
3552
|
-
session_report.add_argument(
|
|
3553
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3554
|
-
)
|
|
3555
|
-
|
|
3556
|
-
session_resume = session_subparsers.add_parser(
|
|
3557
|
-
"resume", help="Alias for status-report (Resume session context)"
|
|
3558
|
-
)
|
|
3559
|
-
session_resume.add_argument(
|
|
3560
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3561
|
-
)
|
|
3562
|
-
|
|
3563
|
-
# session dedupe
|
|
3564
|
-
session_dedupe = session_subparsers.add_parser(
|
|
3565
|
-
"dedupe",
|
|
3566
|
-
help="Move SessionStart-only sessions into a subfolder",
|
|
3567
|
-
)
|
|
3568
|
-
session_dedupe.add_argument(
|
|
3569
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3570
|
-
)
|
|
3571
|
-
session_dedupe.add_argument(
|
|
3572
|
-
"--max-events", type=int, default=1, help="Max events to consider orphaned"
|
|
3573
|
-
)
|
|
3574
|
-
session_dedupe.add_argument(
|
|
3575
|
-
"--move-dir", default="_orphans", help="Subfolder name under sessions/"
|
|
3576
|
-
)
|
|
3577
|
-
session_dedupe.add_argument(
|
|
3578
|
-
"--dry-run",
|
|
3579
|
-
action="store_true",
|
|
3580
|
-
help="Show what would happen without moving files",
|
|
3581
|
-
)
|
|
3582
|
-
session_dedupe.add_argument(
|
|
3583
|
-
"--no-stale-active",
|
|
3584
|
-
action="store_true",
|
|
3585
|
-
help="Do not mark extra active sessions as stale",
|
|
3586
|
-
)
|
|
3587
|
-
|
|
3588
|
-
# session link
|
|
3589
|
-
session_link = session_subparsers.add_parser(
|
|
3590
|
-
"link", help="Link a feature to a session retroactively"
|
|
3591
|
-
)
|
|
3592
|
-
session_link.add_argument("session_id", help="Session ID")
|
|
3593
|
-
session_link.add_argument("feature_id", help="Feature ID to link")
|
|
3594
|
-
session_link.add_argument(
|
|
3595
|
-
"--collection", "-c", default="features", help="Feature collection"
|
|
3596
|
-
)
|
|
3597
|
-
session_link.add_argument(
|
|
3598
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3599
|
-
)
|
|
3600
|
-
session_link.add_argument(
|
|
3601
|
-
"--bidirectional",
|
|
3602
|
-
"-b",
|
|
3603
|
-
action="store_true",
|
|
3604
|
-
help="Also add session to feature's implemented-in edges",
|
|
3605
|
-
)
|
|
3606
|
-
session_link.add_argument(
|
|
3607
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3608
|
-
)
|
|
3609
|
-
|
|
3610
|
-
# session validate-attribution
|
|
3611
|
-
session_validate = session_subparsers.add_parser(
|
|
3612
|
-
"validate-attribution", help="Validate feature attribution and tracking"
|
|
3613
|
-
)
|
|
3614
|
-
session_validate.add_argument("feature_id", help="Feature ID to validate")
|
|
3615
|
-
session_validate.add_argument(
|
|
3616
|
-
"--collection", "-c", default="features", help="Feature collection"
|
|
3617
|
-
)
|
|
3618
|
-
session_validate.add_argument(
|
|
3619
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3620
|
-
)
|
|
3621
|
-
session_validate.add_argument(
|
|
3622
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3623
|
-
)
|
|
3624
|
-
|
|
3625
|
-
# activity (legacy: was "track")
|
|
3626
|
-
activity_parser = subparsers.add_parser(
|
|
3627
|
-
"activity",
|
|
3628
|
-
help="Track an activity (legacy: use 'htmlgraph track' for new features)",
|
|
3629
|
-
)
|
|
3630
|
-
activity_parser.add_argument("tool", help="Tool name (Edit, Bash, Read, etc.)")
|
|
3631
|
-
activity_parser.add_argument("summary", help="Activity summary")
|
|
3632
|
-
activity_parser.add_argument(
|
|
3633
|
-
"--session", help="Session ID (uses active session if not provided)"
|
|
3634
|
-
)
|
|
3635
|
-
activity_parser.add_argument("--files", nargs="*", help="Files involved")
|
|
3636
|
-
activity_parser.add_argument("--failed", action="store_true", help="Mark as failed")
|
|
3637
|
-
activity_parser.add_argument(
|
|
3638
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3639
|
-
)
|
|
3640
|
-
activity_parser.add_argument(
|
|
3641
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3642
|
-
)
|
|
3643
|
-
|
|
3644
|
-
# =========================================================================
|
|
3645
|
-
# Transcript Management (Claude Code Integration)
|
|
3646
|
-
# =========================================================================
|
|
3647
|
-
|
|
3648
|
-
transcript_parser = subparsers.add_parser(
|
|
3649
|
-
"transcript", help="Claude Code transcript integration"
|
|
3650
|
-
)
|
|
3651
|
-
transcript_subparsers = transcript_parser.add_subparsers(
|
|
3652
|
-
dest="transcript_command", help="Transcript command"
|
|
3653
|
-
)
|
|
3654
|
-
|
|
3655
|
-
# transcript list
|
|
3656
|
-
transcript_list = transcript_subparsers.add_parser(
|
|
3657
|
-
"list", help="List available Claude Code transcripts"
|
|
3658
|
-
)
|
|
3659
|
-
transcript_list.add_argument("--project", "-p", help="Project path to filter by")
|
|
3660
|
-
transcript_list.add_argument(
|
|
3661
|
-
"--limit", "-n", type=int, default=20, help="Maximum transcripts to show"
|
|
3662
|
-
)
|
|
3663
|
-
transcript_list.add_argument(
|
|
3664
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3665
|
-
)
|
|
3666
|
-
|
|
3667
|
-
# transcript import
|
|
3668
|
-
transcript_import = transcript_subparsers.add_parser(
|
|
3669
|
-
"import", help="Import a Claude Code transcript"
|
|
3670
|
-
)
|
|
3671
|
-
transcript_import.add_argument(
|
|
3672
|
-
"session_id", help="Claude Code session ID to import"
|
|
3673
|
-
)
|
|
3674
|
-
transcript_import.add_argument(
|
|
3675
|
-
"--to-session",
|
|
3676
|
-
help="HtmlGraph session ID to import into (creates new if not specified)",
|
|
3677
|
-
)
|
|
3678
|
-
transcript_import.add_argument("--link-feature", help="Feature ID to link to")
|
|
3679
|
-
transcript_import.add_argument(
|
|
3680
|
-
"--agent", default="claude-code", help="Agent name for new session"
|
|
3681
|
-
)
|
|
3682
|
-
transcript_import.add_argument(
|
|
3683
|
-
"--overwrite", action="store_true", help="Overwrite existing activities"
|
|
3684
|
-
)
|
|
3685
|
-
transcript_import.add_argument(
|
|
3686
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3687
|
-
)
|
|
3688
|
-
transcript_import.add_argument(
|
|
3689
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3690
|
-
)
|
|
3691
|
-
|
|
3692
|
-
# transcript link
|
|
3693
|
-
transcript_link = transcript_subparsers.add_parser(
|
|
3694
|
-
"link", help="Link a transcript to an HtmlGraph session"
|
|
3695
|
-
)
|
|
3696
|
-
transcript_link.add_argument("session_id", help="Claude Code session ID")
|
|
3697
|
-
transcript_link.add_argument(
|
|
3698
|
-
"--to-session", required=True, help="HtmlGraph session ID to link to"
|
|
3699
|
-
)
|
|
3700
|
-
transcript_link.add_argument(
|
|
3701
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3702
|
-
)
|
|
3703
|
-
transcript_link.add_argument(
|
|
3704
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3705
|
-
)
|
|
3706
|
-
|
|
3707
|
-
# transcript stats
|
|
3708
|
-
transcript_stats = transcript_subparsers.add_parser(
|
|
3709
|
-
"stats", help="Show transcript statistics for a session"
|
|
3710
|
-
)
|
|
3711
|
-
transcript_stats.add_argument("session_id", help="HtmlGraph session ID")
|
|
3712
|
-
transcript_stats.add_argument(
|
|
3713
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3714
|
-
)
|
|
3715
|
-
transcript_stats.add_argument(
|
|
3716
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3717
|
-
)
|
|
3718
|
-
|
|
3719
|
-
# transcript auto-link
|
|
3720
|
-
transcript_auto_link = transcript_subparsers.add_parser(
|
|
3721
|
-
"auto-link", help="Auto-link transcripts by git branch"
|
|
3722
|
-
)
|
|
3723
|
-
transcript_auto_link.add_argument(
|
|
3724
|
-
"--branch", "-b", help="Git branch (uses current if not specified)"
|
|
3725
|
-
)
|
|
3726
|
-
transcript_auto_link.add_argument("--agent", help="Filter by agent")
|
|
3727
|
-
transcript_auto_link.add_argument(
|
|
3728
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3729
|
-
)
|
|
3730
|
-
transcript_auto_link.add_argument(
|
|
3731
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3732
|
-
)
|
|
3733
|
-
|
|
3734
|
-
# transcript health (analytics)
|
|
3735
|
-
transcript_health = transcript_subparsers.add_parser(
|
|
3736
|
-
"health", help="Show session health metrics from transcript"
|
|
3737
|
-
)
|
|
3738
|
-
transcript_health.add_argument(
|
|
3739
|
-
"transcript_id", help="Transcript/session ID to analyze"
|
|
3740
|
-
)
|
|
3741
|
-
transcript_health.add_argument(
|
|
3742
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3743
|
-
)
|
|
3744
|
-
transcript_health.add_argument(
|
|
3745
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3746
|
-
)
|
|
3747
|
-
|
|
3748
|
-
# transcript patterns (analytics)
|
|
3749
|
-
transcript_patterns = transcript_subparsers.add_parser(
|
|
3750
|
-
"patterns", help="Detect workflow patterns in transcripts"
|
|
3751
|
-
)
|
|
3752
|
-
transcript_patterns.add_argument(
|
|
3753
|
-
"--transcript-id", "-t", help="Specific transcript to analyze (default: all)"
|
|
3754
|
-
)
|
|
3755
|
-
transcript_patterns.add_argument(
|
|
3756
|
-
"--min-length", type=int, default=3, help="Minimum pattern length (default: 3)"
|
|
3757
|
-
)
|
|
3758
|
-
transcript_patterns.add_argument(
|
|
3759
|
-
"--max-length", type=int, default=5, help="Maximum pattern length (default: 5)"
|
|
3760
|
-
)
|
|
3761
|
-
transcript_patterns.add_argument(
|
|
3762
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3763
|
-
)
|
|
3764
|
-
transcript_patterns.add_argument(
|
|
3765
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3766
|
-
)
|
|
3767
|
-
|
|
3768
|
-
# transcript transitions (analytics)
|
|
3769
|
-
transcript_transitions = transcript_subparsers.add_parser(
|
|
3770
|
-
"transitions", help="Show tool transition matrix"
|
|
3771
|
-
)
|
|
3772
|
-
transcript_transitions.add_argument(
|
|
3773
|
-
"--transcript-id", "-t", help="Specific transcript to analyze (default: all)"
|
|
3774
|
-
)
|
|
3775
|
-
transcript_transitions.add_argument(
|
|
3776
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3777
|
-
)
|
|
3778
|
-
transcript_transitions.add_argument(
|
|
3779
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3780
|
-
)
|
|
3781
|
-
|
|
3782
|
-
# transcript recommendations (analytics)
|
|
3783
|
-
transcript_recs = transcript_subparsers.add_parser(
|
|
3784
|
-
"recommendations", help="Get workflow improvement recommendations"
|
|
3785
|
-
)
|
|
3786
|
-
transcript_recs.add_argument(
|
|
3787
|
-
"--transcript-id", "-t", help="Specific transcript to analyze (default: all)"
|
|
3788
|
-
)
|
|
3789
|
-
transcript_recs.add_argument(
|
|
3790
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3791
|
-
)
|
|
3792
|
-
transcript_recs.add_argument(
|
|
3793
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3794
|
-
)
|
|
3795
|
-
|
|
3796
|
-
# transcript insights (analytics)
|
|
3797
|
-
transcript_insights = transcript_subparsers.add_parser(
|
|
3798
|
-
"insights", help="Get comprehensive transcript insights"
|
|
3799
|
-
)
|
|
3800
|
-
transcript_insights.add_argument(
|
|
3801
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3802
|
-
)
|
|
3803
|
-
transcript_insights.add_argument(
|
|
3804
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3805
|
-
)
|
|
3806
|
-
|
|
3807
|
-
# transcript export (HTML export)
|
|
3808
|
-
transcript_export = transcript_subparsers.add_parser(
|
|
3809
|
-
"export", help="Export transcript to HTML format"
|
|
3810
|
-
)
|
|
3811
|
-
transcript_export.add_argument(
|
|
3812
|
-
"transcript_id", help="Transcript/session ID to export"
|
|
3813
|
-
)
|
|
3814
|
-
transcript_export.add_argument(
|
|
3815
|
-
"-o", "--output", help="Output file path (prints to stdout if not specified)"
|
|
3816
|
-
)
|
|
3817
|
-
transcript_export.add_argument(
|
|
3818
|
-
"--include-thinking",
|
|
3819
|
-
action="store_true",
|
|
3820
|
-
help="Include thinking traces in output",
|
|
3821
|
-
)
|
|
3822
|
-
|
|
3823
|
-
# transcript track-stats (track-level aggregation)
|
|
3824
|
-
transcript_track = transcript_subparsers.add_parser(
|
|
3825
|
-
"track-stats", help="Get aggregated transcript stats for a track"
|
|
3826
|
-
)
|
|
3827
|
-
transcript_track.add_argument("track_id", help="Track ID to aggregate")
|
|
3828
|
-
transcript_track.add_argument(
|
|
3829
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3830
|
-
)
|
|
3831
|
-
transcript_track.add_argument(
|
|
3832
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3833
|
-
)
|
|
3834
|
-
|
|
3835
|
-
# transcript link-feature (link transcript to feature for parallel agent tracking)
|
|
3836
|
-
transcript_link_feature = transcript_subparsers.add_parser(
|
|
3837
|
-
"link-feature",
|
|
3838
|
-
help="Link a Claude Code transcript to a feature (for parallel agent tracking)",
|
|
3839
|
-
)
|
|
3840
|
-
transcript_link_feature.add_argument(
|
|
3841
|
-
"transcript_id", help="Claude Code transcript/agent session ID"
|
|
3842
|
-
)
|
|
3843
|
-
transcript_link_feature.add_argument(
|
|
3844
|
-
"--to-feature", "-f", required=True, help="Feature ID to link to"
|
|
3845
|
-
)
|
|
3846
|
-
transcript_link_feature.add_argument(
|
|
3847
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3848
|
-
)
|
|
3849
|
-
transcript_link_feature.add_argument(
|
|
3850
|
-
"--format", choices=["text", "json"], default="text", help="Output format"
|
|
3851
|
-
)
|
|
3852
|
-
|
|
3853
|
-
# =========================================================================
|
|
3854
|
-
# Work Management (Smart Routing)
|
|
3855
|
-
# =========================================================================
|
|
3856
|
-
|
|
3857
|
-
# work (with subcommands)
|
|
3858
|
-
work_parser = subparsers.add_parser(
|
|
3859
|
-
"work", help="Work management with smart routing"
|
|
3860
|
-
)
|
|
3861
|
-
work_subparsers = work_parser.add_subparsers(
|
|
3862
|
-
dest="work_command", help="Work command"
|
|
3863
|
-
)
|
|
3864
|
-
|
|
3865
|
-
# work next
|
|
3866
|
-
work_next = work_subparsers.add_parser(
|
|
3867
|
-
"next", help="Get next best task using smart routing"
|
|
3868
|
-
)
|
|
3869
|
-
work_next.add_argument(
|
|
3870
|
-
"--agent",
|
|
3871
|
-
default=os.environ.get("HTMLGRAPH_AGENT") or "claude",
|
|
3872
|
-
help="Agent ID (default: $HTMLGRAPH_AGENT or 'claude')",
|
|
3873
|
-
)
|
|
3874
|
-
work_next.add_argument(
|
|
3875
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3876
|
-
)
|
|
3877
|
-
work_next.add_argument(
|
|
3878
|
-
"--auto-claim", action="store_true", help="Automatically claim the task"
|
|
3879
|
-
)
|
|
3880
|
-
work_next.add_argument(
|
|
3881
|
-
"--min-score",
|
|
3882
|
-
type=float,
|
|
3883
|
-
default=20.0,
|
|
3884
|
-
help="Minimum routing score (default: 20.0)",
|
|
3885
|
-
)
|
|
3886
|
-
work_next.add_argument(
|
|
3887
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3888
|
-
)
|
|
3889
|
-
|
|
3890
|
-
# work queue
|
|
3891
|
-
work_queue = work_subparsers.add_parser("queue", help="Get prioritized work queue")
|
|
3892
|
-
work_queue.add_argument(
|
|
3893
|
-
"--agent",
|
|
3894
|
-
default=os.environ.get("HTMLGRAPH_AGENT") or "claude",
|
|
3895
|
-
help="Agent ID (default: $HTMLGRAPH_AGENT or 'claude')",
|
|
3896
|
-
)
|
|
3897
|
-
work_queue.add_argument(
|
|
3898
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3899
|
-
)
|
|
3900
|
-
work_queue.add_argument(
|
|
3901
|
-
"--limit",
|
|
3902
|
-
"-l",
|
|
3903
|
-
type=int,
|
|
3904
|
-
default=10,
|
|
3905
|
-
help="Maximum tasks to show (default: 10)",
|
|
3906
|
-
)
|
|
3907
|
-
work_queue.add_argument(
|
|
3908
|
-
"--min-score",
|
|
3909
|
-
type=float,
|
|
3910
|
-
default=20.0,
|
|
3911
|
-
help="Minimum routing score (default: 20.0)",
|
|
3912
|
-
)
|
|
3913
|
-
work_queue.add_argument(
|
|
3914
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3915
|
-
)
|
|
3916
|
-
|
|
3917
|
-
# agent (with subcommands)
|
|
3918
|
-
agent_parser = subparsers.add_parser("agent", help="Agent management")
|
|
3919
|
-
agent_subparsers = agent_parser.add_subparsers(
|
|
3920
|
-
dest="agent_command", help="Agent command"
|
|
3921
|
-
)
|
|
3922
|
-
|
|
3923
|
-
# agent list
|
|
3924
|
-
agent_list = agent_subparsers.add_parser("list", help="List all registered agents")
|
|
3925
|
-
agent_list.add_argument(
|
|
3926
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3927
|
-
)
|
|
3928
|
-
agent_list.add_argument(
|
|
3929
|
-
"--active-only", action="store_true", help="Only show active agents"
|
|
3930
|
-
)
|
|
3931
|
-
agent_list.add_argument(
|
|
3932
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3933
|
-
)
|
|
3934
|
-
|
|
3935
|
-
# =========================================================================
|
|
3936
|
-
# Feature Management
|
|
3937
|
-
# =========================================================================
|
|
3938
|
-
|
|
3939
|
-
# feature (with subcommands)
|
|
3940
|
-
feature_parser = subparsers.add_parser("feature", help="Feature management")
|
|
3941
|
-
feature_subparsers = feature_parser.add_subparsers(
|
|
3942
|
-
dest="feature_command", help="Feature command"
|
|
3943
|
-
)
|
|
3944
|
-
|
|
3945
|
-
# feature create
|
|
3946
|
-
feature_create = feature_subparsers.add_parser(
|
|
3947
|
-
"create", help="Create a new feature"
|
|
3948
|
-
)
|
|
3949
|
-
feature_create.add_argument("title", help="Feature title")
|
|
3950
|
-
feature_create.add_argument(
|
|
3951
|
-
"--collection", "-c", default="features", help="Collection (features, bugs)"
|
|
3952
|
-
)
|
|
3953
|
-
feature_create.add_argument("--description", "-d", help="Description")
|
|
3954
|
-
feature_create.add_argument(
|
|
3955
|
-
"--priority",
|
|
3956
|
-
"-p",
|
|
3957
|
-
default="medium",
|
|
3958
|
-
choices=["low", "medium", "high", "critical"],
|
|
3959
|
-
help="Priority",
|
|
3960
|
-
)
|
|
3961
|
-
feature_create.add_argument("--steps", nargs="*", help="Implementation steps")
|
|
3962
|
-
feature_create.add_argument(
|
|
3963
|
-
"--agent",
|
|
3964
|
-
default=os.environ.get("HTMLGRAPH_AGENT") or "cli",
|
|
3965
|
-
help="Agent name for attribution (default: $HTMLGRAPH_AGENT or 'cli')",
|
|
3966
|
-
)
|
|
3967
|
-
feature_create.add_argument(
|
|
3968
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3969
|
-
)
|
|
3970
|
-
feature_create.add_argument(
|
|
3971
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3972
|
-
)
|
|
3973
|
-
|
|
3974
|
-
# feature start
|
|
3975
|
-
feature_start = feature_subparsers.add_parser(
|
|
3976
|
-
"start", help="Start working on a feature"
|
|
3977
|
-
)
|
|
3978
|
-
feature_start.add_argument("id", help="Feature ID")
|
|
3979
|
-
feature_start.add_argument(
|
|
3980
|
-
"--collection", "-c", default="features", help="Collection (features, bugs)"
|
|
3981
|
-
)
|
|
3982
|
-
feature_start.add_argument(
|
|
3983
|
-
"--agent",
|
|
3984
|
-
default=os.environ.get("HTMLGRAPH_AGENT") or "cli",
|
|
3985
|
-
help="Agent name for attribution (default: $HTMLGRAPH_AGENT or 'cli')",
|
|
3986
|
-
)
|
|
3987
|
-
feature_start.add_argument(
|
|
3988
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
3989
|
-
)
|
|
3990
|
-
feature_start.add_argument(
|
|
3991
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
3992
|
-
)
|
|
3993
|
-
|
|
3994
|
-
# feature complete
|
|
3995
|
-
feature_complete = feature_subparsers.add_parser(
|
|
3996
|
-
"complete", help="Mark feature as complete"
|
|
3997
|
-
)
|
|
3998
|
-
feature_complete.add_argument("id", help="Feature ID")
|
|
3999
|
-
feature_complete.add_argument(
|
|
4000
|
-
"--collection", "-c", default="features", help="Collection (features, bugs)"
|
|
4001
|
-
)
|
|
4002
|
-
feature_complete.add_argument(
|
|
4003
|
-
"--agent",
|
|
4004
|
-
default=os.environ.get("HTMLGRAPH_AGENT") or "cli",
|
|
4005
|
-
help="Agent name for attribution (default: $HTMLGRAPH_AGENT or 'cli')",
|
|
4006
|
-
)
|
|
4007
|
-
feature_complete.add_argument(
|
|
4008
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4009
|
-
)
|
|
4010
|
-
feature_complete.add_argument(
|
|
4011
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4012
|
-
)
|
|
4013
|
-
|
|
4014
|
-
# feature primary
|
|
4015
|
-
feature_primary = feature_subparsers.add_parser(
|
|
4016
|
-
"primary", help="Set primary feature"
|
|
4017
|
-
)
|
|
4018
|
-
feature_primary.add_argument("id", help="Feature ID")
|
|
4019
|
-
feature_primary.add_argument(
|
|
4020
|
-
"--collection", "-c", default="features", help="Collection (features, bugs)"
|
|
4021
|
-
)
|
|
4022
|
-
feature_primary.add_argument(
|
|
4023
|
-
"--agent",
|
|
4024
|
-
default=os.environ.get("HTMLGRAPH_AGENT") or "cli",
|
|
4025
|
-
help="Agent name for attribution (default: $HTMLGRAPH_AGENT or 'cli')",
|
|
4026
|
-
)
|
|
4027
|
-
feature_primary.add_argument(
|
|
4028
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4029
|
-
)
|
|
4030
|
-
feature_primary.add_argument(
|
|
4031
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4032
|
-
)
|
|
4033
|
-
|
|
4034
|
-
# feature claim
|
|
4035
|
-
feature_claim = feature_subparsers.add_parser("claim", help="Claim a feature")
|
|
4036
|
-
feature_claim.add_argument("id", help="Feature ID")
|
|
4037
|
-
feature_claim.add_argument(
|
|
4038
|
-
"--collection", "-c", default="features", help="Collection (features, bugs)"
|
|
4039
|
-
)
|
|
4040
|
-
feature_claim.add_argument(
|
|
4041
|
-
"--agent",
|
|
4042
|
-
default=os.environ.get("HTMLGRAPH_AGENT") or "cli",
|
|
4043
|
-
help="Agent name for attribution (default: $HTMLGRAPH_AGENT or 'cli')",
|
|
4044
|
-
)
|
|
4045
|
-
feature_claim.add_argument(
|
|
4046
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4047
|
-
)
|
|
4048
|
-
feature_claim.add_argument(
|
|
4049
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4050
|
-
)
|
|
4051
|
-
|
|
4052
|
-
# feature release
|
|
4053
|
-
feature_release = feature_subparsers.add_parser(
|
|
4054
|
-
"release", help="Release a feature claim"
|
|
4055
|
-
)
|
|
4056
|
-
feature_release.add_argument("id", help="Feature ID")
|
|
4057
|
-
feature_release.add_argument(
|
|
4058
|
-
"--collection", "-c", default="features", help="Collection (features, bugs)"
|
|
4059
|
-
)
|
|
4060
|
-
feature_release.add_argument(
|
|
4061
|
-
"--agent",
|
|
4062
|
-
default=os.environ.get("HTMLGRAPH_AGENT") or "cli",
|
|
4063
|
-
help="Agent name for attribution (default: $HTMLGRAPH_AGENT or 'cli')",
|
|
4064
|
-
)
|
|
4065
|
-
feature_release.add_argument(
|
|
4066
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4067
|
-
)
|
|
4068
|
-
feature_release.add_argument(
|
|
4069
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4070
|
-
)
|
|
4071
|
-
|
|
4072
|
-
# feature auto-release
|
|
4073
|
-
feature_auto_release = feature_subparsers.add_parser(
|
|
4074
|
-
"auto-release", help="Release all features claimed by agent"
|
|
4075
|
-
)
|
|
4076
|
-
feature_auto_release.add_argument(
|
|
4077
|
-
"--agent",
|
|
4078
|
-
default=os.environ.get("HTMLGRAPH_AGENT") or "cli",
|
|
4079
|
-
help="Agent name for attribution (default: $HTMLGRAPH_AGENT or 'cli')",
|
|
4080
|
-
)
|
|
4081
|
-
feature_auto_release.add_argument(
|
|
4082
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4083
|
-
)
|
|
4084
|
-
feature_auto_release.add_argument(
|
|
4085
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4086
|
-
)
|
|
4087
|
-
|
|
4088
|
-
# feature list
|
|
4089
|
-
feature_list = feature_subparsers.add_parser("list", help="List features")
|
|
4090
|
-
feature_list.add_argument("--status", "-s", help="Filter by status")
|
|
4091
|
-
feature_list.add_argument(
|
|
4092
|
-
"--collection", "-c", default="features", help="Collection (features, bugs)"
|
|
4093
|
-
)
|
|
4094
|
-
feature_list.add_argument(
|
|
4095
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4096
|
-
)
|
|
4097
|
-
# Note: --format flag is inherited from global parser (line 3312)
|
|
4098
|
-
|
|
4099
|
-
# feature step-complete
|
|
4100
|
-
feature_step_complete = feature_subparsers.add_parser(
|
|
4101
|
-
"step-complete", help="Mark feature step(s) as complete"
|
|
4102
|
-
)
|
|
4103
|
-
feature_step_complete.add_argument("id", help="Feature ID")
|
|
4104
|
-
feature_step_complete.add_argument(
|
|
4105
|
-
"steps",
|
|
4106
|
-
nargs="+",
|
|
4107
|
-
help="Step index(es) to mark complete (0-based, supports: 0 1 2 or 0,1,2)",
|
|
4108
|
-
)
|
|
4109
|
-
feature_step_complete.add_argument(
|
|
4110
|
-
"--collection", "-c", default="features", help="Collection (features, bugs)"
|
|
4111
|
-
)
|
|
4112
|
-
feature_step_complete.add_argument(
|
|
4113
|
-
"--agent",
|
|
4114
|
-
default=os.environ.get("HTMLGRAPH_AGENT") or "cli",
|
|
4115
|
-
help="Agent name for attribution (default: $HTMLGRAPH_AGENT or 'cli')",
|
|
4116
|
-
)
|
|
4117
|
-
feature_step_complete.add_argument(
|
|
4118
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4119
|
-
)
|
|
4120
|
-
feature_step_complete.add_argument(
|
|
4121
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4122
|
-
)
|
|
4123
|
-
feature_step_complete.add_argument(
|
|
4124
|
-
"--host", default="localhost", help="API host (default: localhost)"
|
|
4125
|
-
)
|
|
4126
|
-
feature_step_complete.add_argument(
|
|
4127
|
-
"--port", type=int, default=8080, help="API port (default: 8080)"
|
|
4128
|
-
)
|
|
4129
|
-
|
|
4130
|
-
# feature delete
|
|
4131
|
-
feature_delete = feature_subparsers.add_parser("delete", help="Delete a feature")
|
|
4132
|
-
feature_delete.add_argument("id", help="Feature ID to delete")
|
|
4133
|
-
feature_delete.add_argument(
|
|
4134
|
-
"--collection", "-c", default="features", help="Collection (features, bugs)"
|
|
4135
|
-
)
|
|
4136
|
-
feature_delete.add_argument(
|
|
4137
|
-
"--yes", "-y", action="store_true", help="Skip confirmation prompt"
|
|
4138
|
-
)
|
|
4139
|
-
feature_delete.add_argument(
|
|
4140
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4141
|
-
)
|
|
4142
|
-
feature_delete.add_argument(
|
|
4143
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4144
|
-
)
|
|
4145
|
-
|
|
4146
|
-
# =========================================================================
|
|
4147
|
-
# Track Management (Conductor-Style Planning)
|
|
4148
|
-
# =========================================================================
|
|
4149
|
-
|
|
4150
|
-
# track (with subcommands)
|
|
4151
|
-
track_parser = subparsers.add_parser(
|
|
4152
|
-
"track", help="Track management (Conductor-style planning)"
|
|
4153
|
-
)
|
|
4154
|
-
track_subparsers = track_parser.add_subparsers(
|
|
4155
|
-
dest="track_command", help="Track command"
|
|
4156
|
-
)
|
|
4157
|
-
|
|
4158
|
-
# track new
|
|
4159
|
-
track_new = track_subparsers.add_parser("new", help="Create a new track")
|
|
4160
|
-
track_new.add_argument("title", help="Track title")
|
|
4161
|
-
track_new.add_argument("--description", "-d", help="Track description")
|
|
4162
|
-
track_new.add_argument(
|
|
4163
|
-
"--priority",
|
|
4164
|
-
"-p",
|
|
4165
|
-
default="medium",
|
|
4166
|
-
choices=["low", "medium", "high", "critical"],
|
|
4167
|
-
help="Priority",
|
|
4168
|
-
)
|
|
4169
|
-
track_new.add_argument(
|
|
4170
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4171
|
-
)
|
|
4172
|
-
track_new.add_argument(
|
|
4173
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4174
|
-
)
|
|
4175
|
-
|
|
4176
|
-
# track list
|
|
4177
|
-
track_list = track_subparsers.add_parser("list", help="List all tracks")
|
|
4178
|
-
track_list.add_argument(
|
|
4179
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4180
|
-
)
|
|
4181
|
-
track_list.add_argument(
|
|
4182
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4183
|
-
)
|
|
4184
|
-
|
|
4185
|
-
# track spec
|
|
4186
|
-
track_spec = track_subparsers.add_parser("spec", help="Create a spec for a track")
|
|
4187
|
-
track_spec.add_argument("track_id", help="Track ID")
|
|
4188
|
-
track_spec.add_argument("title", help="Spec title")
|
|
4189
|
-
track_spec.add_argument("--overview", "-o", help="Spec overview")
|
|
4190
|
-
track_spec.add_argument("--context", "-c", help="Context/rationale")
|
|
4191
|
-
track_spec.add_argument("--author", "-a", default="claude-code", help="Spec author")
|
|
4192
|
-
track_spec.add_argument(
|
|
4193
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4194
|
-
)
|
|
4195
|
-
track_spec.add_argument(
|
|
4196
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4197
|
-
)
|
|
4198
|
-
|
|
4199
|
-
# track plan
|
|
4200
|
-
track_plan = track_subparsers.add_parser("plan", help="Create a plan for a track")
|
|
4201
|
-
track_plan.add_argument("track_id", help="Track ID")
|
|
4202
|
-
track_plan.add_argument("title", help="Plan title")
|
|
4203
|
-
track_plan.add_argument(
|
|
4204
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4205
|
-
)
|
|
4206
|
-
track_plan.add_argument(
|
|
4207
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4208
|
-
)
|
|
4209
|
-
|
|
4210
|
-
# track delete
|
|
4211
|
-
track_delete = track_subparsers.add_parser("delete", help="Delete a track")
|
|
4212
|
-
track_delete.add_argument("track_id", help="Track ID to delete")
|
|
4213
|
-
track_delete.add_argument(
|
|
4214
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4215
|
-
)
|
|
4216
|
-
track_delete.add_argument(
|
|
4217
|
-
"--format", "-f", choices=["text", "json"], default="text", help="Output format"
|
|
4218
|
-
)
|
|
4219
|
-
|
|
4220
|
-
# =========================================================================
|
|
4221
|
-
# Analytics
|
|
4222
|
-
# =========================================================================
|
|
4223
|
-
|
|
4224
|
-
# analytics
|
|
4225
|
-
analytics_parser = subparsers.add_parser(
|
|
4226
|
-
"analytics", help="Work type analytics and project health metrics"
|
|
4227
|
-
)
|
|
4228
|
-
analytics_parser.add_argument(
|
|
4229
|
-
"--session-id", "-s", help="Analyze specific session ID"
|
|
4230
|
-
)
|
|
4231
|
-
analytics_parser.add_argument(
|
|
4232
|
-
"--recent", "-r", type=int, help="Analyze N recent sessions"
|
|
4233
|
-
)
|
|
4234
|
-
analytics_parser.add_argument(
|
|
4235
|
-
"--agent", default="cli", help="Agent name for SDK initialization"
|
|
4236
|
-
)
|
|
4237
|
-
analytics_parser.add_argument(
|
|
4238
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4239
|
-
)
|
|
4240
|
-
|
|
4241
|
-
# =========================================================================
|
|
4242
|
-
# Events & Analytics Index
|
|
4243
|
-
# =========================================================================
|
|
4244
|
-
|
|
4245
|
-
events_parser = subparsers.add_parser("events", help="Event log utilities")
|
|
4246
|
-
events_subparsers = events_parser.add_subparsers(
|
|
4247
|
-
dest="events_command", help="Events command"
|
|
4248
|
-
)
|
|
4249
|
-
|
|
4250
|
-
events_export = events_subparsers.add_parser(
|
|
4251
|
-
"export-sessions",
|
|
4252
|
-
help="Export session HTML activity logs to JSONL under .htmlgraph/events/",
|
|
4253
|
-
)
|
|
4254
|
-
events_export.add_argument(
|
|
4255
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4256
|
-
)
|
|
4257
|
-
events_export.add_argument(
|
|
4258
|
-
"--overwrite", action="store_true", help="Overwrite existing JSONL files"
|
|
4259
|
-
)
|
|
4260
|
-
events_export.add_argument(
|
|
4261
|
-
"--include-subdirs",
|
|
4262
|
-
action="store_true",
|
|
4263
|
-
help="Include subdirectories like sessions/_orphans/",
|
|
4264
|
-
)
|
|
4265
|
-
|
|
4266
|
-
index_parser = subparsers.add_parser("index", help="Analytics index commands")
|
|
4267
|
-
index_subparsers = index_parser.add_subparsers(
|
|
4268
|
-
dest="index_command", help="Index command"
|
|
4269
|
-
)
|
|
4270
|
-
|
|
4271
|
-
index_rebuild = index_subparsers.add_parser(
|
|
4272
|
-
"rebuild",
|
|
4273
|
-
help="Rebuild .htmlgraph/index.sqlite from .htmlgraph/events/*.jsonl",
|
|
4274
|
-
)
|
|
4275
|
-
index_rebuild.add_argument(
|
|
4276
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4277
|
-
)
|
|
4278
|
-
|
|
4279
|
-
# watch
|
|
4280
|
-
watch_parser = subparsers.add_parser(
|
|
4281
|
-
"watch", help="Watch file changes and log events"
|
|
4282
|
-
)
|
|
4283
|
-
watch_parser.add_argument(
|
|
4284
|
-
"--root", "-r", default=".", help="Root directory to watch"
|
|
4285
|
-
)
|
|
4286
|
-
watch_parser.add_argument(
|
|
4287
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4288
|
-
)
|
|
4289
|
-
watch_parser.add_argument(
|
|
4290
|
-
"--session-id", help="Session ID (defaults to deduped active session)"
|
|
4291
|
-
)
|
|
4292
|
-
watch_parser.add_argument(
|
|
4293
|
-
"--agent", default="codex", help="Agent name for the watcher"
|
|
4294
|
-
)
|
|
4295
|
-
watch_parser.add_argument(
|
|
4296
|
-
"--interval", type=float, default=2.0, help="Polling interval seconds"
|
|
4297
|
-
)
|
|
4298
|
-
watch_parser.add_argument(
|
|
4299
|
-
"--batch-seconds", type=float, default=5.0, help="Batch window seconds"
|
|
4300
|
-
)
|
|
4301
|
-
|
|
4302
|
-
# git-event
|
|
4303
|
-
git_event_parser = subparsers.add_parser(
|
|
4304
|
-
"git-event", help="Log Git events (commit, checkout, merge, push)"
|
|
4305
|
-
)
|
|
4306
|
-
git_event_parser.add_argument(
|
|
4307
|
-
"event_type",
|
|
4308
|
-
choices=["commit", "checkout", "merge", "push"],
|
|
4309
|
-
help="Type of Git event",
|
|
4310
|
-
)
|
|
4311
|
-
git_event_parser.add_argument(
|
|
4312
|
-
"args",
|
|
4313
|
-
nargs="*",
|
|
4314
|
-
help="Event-specific args (checkout: old new flag; merge: squash_flag; push: remote_name remote_url)",
|
|
4315
|
-
)
|
|
4316
|
-
|
|
4317
|
-
# mcp
|
|
4318
|
-
mcp_parser = subparsers.add_parser("mcp", help="Minimal MCP server (stdio)")
|
|
4319
|
-
mcp_subparsers = mcp_parser.add_subparsers(dest="mcp_command", help="MCP command")
|
|
4320
|
-
mcp_serve = mcp_subparsers.add_parser("serve", help="Serve MCP over stdio")
|
|
4321
|
-
mcp_serve.add_argument(
|
|
4322
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4323
|
-
)
|
|
4324
|
-
mcp_serve.add_argument(
|
|
4325
|
-
"--agent", default="mcp", help="Agent name for session attribution"
|
|
4326
|
-
)
|
|
4327
|
-
|
|
4328
|
-
# setup
|
|
4329
|
-
setup_parser = subparsers.add_parser(
|
|
4330
|
-
"setup", help="Set up HtmlGraph for AI CLI platforms"
|
|
4331
|
-
)
|
|
4332
|
-
setup_subparsers = setup_parser.add_subparsers(
|
|
4333
|
-
dest="setup_command", help="Platform to set up"
|
|
4334
|
-
)
|
|
4335
|
-
|
|
4336
|
-
setup_claude = setup_subparsers.add_parser("claude", help="Set up for Claude Code")
|
|
4337
|
-
setup_claude.add_argument(
|
|
4338
|
-
"--auto-install",
|
|
4339
|
-
action="store_true",
|
|
4340
|
-
help="Automatically install when possible",
|
|
4341
|
-
)
|
|
4342
|
-
|
|
4343
|
-
setup_codex = setup_subparsers.add_parser("codex", help="Set up for Codex CLI")
|
|
4344
|
-
setup_codex.add_argument(
|
|
4345
|
-
"--auto-install",
|
|
4346
|
-
action="store_true",
|
|
4347
|
-
help="Automatically install when possible",
|
|
4348
|
-
)
|
|
4349
|
-
|
|
4350
|
-
setup_gemini = setup_subparsers.add_parser("gemini", help="Set up for Gemini CLI")
|
|
4351
|
-
setup_gemini.add_argument(
|
|
4352
|
-
"--auto-install",
|
|
4353
|
-
action="store_true",
|
|
4354
|
-
help="Automatically install when possible",
|
|
4355
|
-
)
|
|
4356
|
-
|
|
4357
|
-
setup_all_parser = setup_subparsers.add_parser(
|
|
4358
|
-
"all", help="Set up for all supported platforms"
|
|
4359
|
-
)
|
|
4360
|
-
setup_all_parser.add_argument(
|
|
4361
|
-
"--auto-install",
|
|
4362
|
-
action="store_true",
|
|
4363
|
-
help="Automatically install when possible",
|
|
4364
|
-
)
|
|
4365
|
-
|
|
4366
|
-
# publish
|
|
4367
|
-
publish_parser = subparsers.add_parser(
|
|
4368
|
-
"publish", help="Build and publish package to PyPI"
|
|
4369
|
-
)
|
|
4370
|
-
publish_parser.add_argument(
|
|
4371
|
-
"--dry-run", action="store_true", help="Build only, do not publish"
|
|
4372
|
-
)
|
|
4373
|
-
|
|
4374
|
-
# sync-docs
|
|
4375
|
-
sync_docs_parser = subparsers.add_parser(
|
|
4376
|
-
"sync-docs", help="Synchronize AI agent memory files across platforms"
|
|
4377
|
-
)
|
|
4378
|
-
sync_docs_parser.add_argument(
|
|
4379
|
-
"--check",
|
|
4380
|
-
action="store_true",
|
|
4381
|
-
help="Check if files are synchronized (no changes)",
|
|
4382
|
-
)
|
|
4383
|
-
sync_docs_parser.add_argument(
|
|
4384
|
-
"--generate",
|
|
4385
|
-
metavar="PLATFORM",
|
|
4386
|
-
help="Generate a platform-specific file (gemini, claude, codex)",
|
|
4387
|
-
)
|
|
4388
|
-
sync_docs_parser.add_argument(
|
|
4389
|
-
"--project-root",
|
|
4390
|
-
type=str,
|
|
4391
|
-
help="Project root directory (default: current directory)",
|
|
4392
|
-
)
|
|
4393
|
-
sync_docs_parser.add_argument(
|
|
4394
|
-
"--force", action="store_true", help="Overwrite existing files when generating"
|
|
4395
|
-
)
|
|
4396
|
-
|
|
4397
|
-
# deploy
|
|
4398
|
-
deploy_parser = subparsers.add_parser(
|
|
4399
|
-
"deploy", help="Flexible deployment system for packaging and publishing"
|
|
4400
|
-
)
|
|
4401
|
-
deploy_subparsers = deploy_parser.add_subparsers(
|
|
4402
|
-
dest="deploy_command", help="Deploy command"
|
|
4403
|
-
)
|
|
4404
|
-
|
|
4405
|
-
# deploy init
|
|
4406
|
-
deploy_init = deploy_subparsers.add_parser(
|
|
4407
|
-
"init", help="Initialize deployment configuration"
|
|
4408
|
-
)
|
|
4409
|
-
deploy_init.add_argument(
|
|
4410
|
-
"--output", "-o", help="Output file path (default: htmlgraph-deploy.toml)"
|
|
4411
|
-
)
|
|
4412
|
-
deploy_init.add_argument(
|
|
4413
|
-
"--force", action="store_true", help="Overwrite existing configuration"
|
|
4414
|
-
)
|
|
4415
|
-
|
|
4416
|
-
# deploy run
|
|
4417
|
-
deploy_run = deploy_subparsers.add_parser("run", help="Run deployment process")
|
|
4418
|
-
deploy_run.add_argument(
|
|
4419
|
-
"--config", "-c", help="Configuration file (default: htmlgraph-deploy.toml)"
|
|
4420
|
-
)
|
|
4421
|
-
deploy_run.add_argument(
|
|
4422
|
-
"--dry-run",
|
|
4423
|
-
action="store_true",
|
|
4424
|
-
help="Show what would happen without executing",
|
|
4425
|
-
)
|
|
4426
|
-
deploy_run.add_argument(
|
|
4427
|
-
"--docs-only", action="store_true", help="Only commit and push to git"
|
|
4428
|
-
)
|
|
4429
|
-
deploy_run.add_argument(
|
|
4430
|
-
"--build-only", action="store_true", help="Only build package"
|
|
4431
|
-
)
|
|
4432
|
-
deploy_run.add_argument(
|
|
4433
|
-
"--skip-pypi", action="store_true", help="Skip PyPI publishing"
|
|
4434
|
-
)
|
|
4435
|
-
deploy_run.add_argument(
|
|
4436
|
-
"--skip-plugins", action="store_true", help="Skip plugin updates"
|
|
4437
|
-
)
|
|
4438
|
-
|
|
4439
|
-
# orchestrator (with subcommands)
|
|
4440
|
-
orchestrator_parser = subparsers.add_parser(
|
|
4441
|
-
"orchestrator", help="Orchestrator mode management"
|
|
4442
|
-
)
|
|
4443
|
-
orchestrator_subparsers = orchestrator_parser.add_subparsers(
|
|
4444
|
-
dest="orchestrator_command", help="Orchestrator command"
|
|
4445
|
-
)
|
|
4446
|
-
|
|
4447
|
-
# orchestrator enable
|
|
4448
|
-
orchestrator_enable = orchestrator_subparsers.add_parser(
|
|
4449
|
-
"enable", help="Enable orchestrator mode"
|
|
4450
|
-
)
|
|
4451
|
-
orchestrator_enable.add_argument(
|
|
4452
|
-
"--level",
|
|
4453
|
-
"-l",
|
|
4454
|
-
default="strict",
|
|
4455
|
-
choices=["strict", "guidance"],
|
|
4456
|
-
help="Enforcement level (default: strict)",
|
|
4457
|
-
)
|
|
4458
|
-
orchestrator_enable.add_argument(
|
|
4459
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4460
|
-
)
|
|
4461
|
-
|
|
4462
|
-
# orchestrator disable
|
|
4463
|
-
orchestrator_disable = orchestrator_subparsers.add_parser(
|
|
4464
|
-
"disable", help="Disable orchestrator mode"
|
|
4465
|
-
)
|
|
4466
|
-
orchestrator_disable.add_argument(
|
|
4467
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4468
|
-
)
|
|
4469
|
-
|
|
4470
|
-
# orchestrator status
|
|
4471
|
-
orchestrator_status = orchestrator_subparsers.add_parser(
|
|
4472
|
-
"status", help="Show orchestrator mode status"
|
|
4473
|
-
)
|
|
4474
|
-
orchestrator_status.add_argument(
|
|
4475
|
-
"--graph-dir", "-g", default=".htmlgraph", help="Graph directory"
|
|
4476
|
-
)
|
|
4477
|
-
|
|
4478
|
-
# install-gemini-extension
|
|
4479
|
-
subparsers.add_parser(
|
|
4480
|
-
"install-gemini-extension",
|
|
4481
|
-
help="Install the Gemini CLI extension from the bundled package",
|
|
4482
|
-
)
|
|
4483
|
-
|
|
4484
|
-
args = parser.parse_args()
|
|
4485
|
-
|
|
4486
|
-
if args.command == "serve":
|
|
4487
|
-
cmd_serve(args)
|
|
4488
|
-
elif args.command == "init":
|
|
4489
|
-
cmd_init(args)
|
|
4490
|
-
elif args.command == "install-hooks":
|
|
4491
|
-
cmd_install_hooks(args)
|
|
4492
|
-
elif args.command == "status":
|
|
4493
|
-
cmd_status(args)
|
|
4494
|
-
elif args.command == "debug":
|
|
4495
|
-
cmd_debug(args)
|
|
4496
|
-
elif args.command == "query":
|
|
4497
|
-
cmd_query(args)
|
|
4498
|
-
elif args.command == "session":
|
|
4499
|
-
if args.session_command == "start":
|
|
4500
|
-
cmd_session_start(args)
|
|
4501
|
-
elif args.session_command == "end":
|
|
4502
|
-
cmd_session_end(args)
|
|
4503
|
-
elif args.session_command == "list":
|
|
4504
|
-
cmd_session_list(args)
|
|
4505
|
-
elif args.session_command == "start-info":
|
|
4506
|
-
cmd_session_start_info(args)
|
|
4507
|
-
elif (
|
|
4508
|
-
args.session_command == "status-report" or args.session_command == "resume"
|
|
4509
|
-
):
|
|
4510
|
-
cmd_session_status_report(args)
|
|
4511
|
-
elif args.session_command == "dedupe":
|
|
4512
|
-
cmd_session_dedupe(args)
|
|
4513
|
-
elif args.session_command == "link":
|
|
4514
|
-
cmd_session_link(args)
|
|
4515
|
-
elif args.session_command == "validate-attribution":
|
|
4516
|
-
cmd_session_validate_attribution(args)
|
|
4517
|
-
elif args.session_command == "handoff":
|
|
4518
|
-
cmd_session_handoff(args)
|
|
4519
|
-
else:
|
|
4520
|
-
session_parser.print_help()
|
|
4521
|
-
sys.exit(1)
|
|
4522
|
-
elif args.command == "activity":
|
|
4523
|
-
# Legacy activity tracking command
|
|
4524
|
-
cmd_track(args)
|
|
4525
|
-
elif args.command == "transcript":
|
|
4526
|
-
# Claude Code transcript integration
|
|
4527
|
-
if args.transcript_command == "list":
|
|
4528
|
-
cmd_transcript_list(args)
|
|
4529
|
-
elif args.transcript_command == "import":
|
|
4530
|
-
cmd_transcript_import(args)
|
|
4531
|
-
elif args.transcript_command == "link":
|
|
4532
|
-
cmd_transcript_link(args)
|
|
4533
|
-
elif args.transcript_command == "stats":
|
|
4534
|
-
cmd_transcript_stats(args)
|
|
4535
|
-
elif args.transcript_command == "auto-link":
|
|
4536
|
-
cmd_transcript_auto_link(args)
|
|
4537
|
-
elif args.transcript_command == "health":
|
|
4538
|
-
cmd_transcript_health(args)
|
|
4539
|
-
elif args.transcript_command == "patterns":
|
|
4540
|
-
cmd_transcript_patterns(args)
|
|
4541
|
-
elif args.transcript_command == "transitions":
|
|
4542
|
-
cmd_transcript_transitions(args)
|
|
4543
|
-
elif args.transcript_command == "recommendations":
|
|
4544
|
-
cmd_transcript_recommendations(args)
|
|
4545
|
-
elif args.transcript_command == "insights":
|
|
4546
|
-
cmd_transcript_insights(args)
|
|
4547
|
-
elif args.transcript_command == "export":
|
|
4548
|
-
cmd_transcript_export(args)
|
|
4549
|
-
elif args.transcript_command == "track-stats":
|
|
4550
|
-
cmd_transcript_track_stats(args)
|
|
4551
|
-
elif args.transcript_command == "link-feature":
|
|
4552
|
-
cmd_transcript_link_feature(args)
|
|
4553
|
-
else:
|
|
4554
|
-
transcript_parser.print_help()
|
|
4555
|
-
sys.exit(1)
|
|
4556
|
-
elif args.command == "track":
|
|
4557
|
-
# New track management commands
|
|
4558
|
-
if args.track_command == "new":
|
|
4559
|
-
cmd_track_new(args)
|
|
4560
|
-
elif args.track_command == "list":
|
|
4561
|
-
cmd_track_list(args)
|
|
4562
|
-
elif args.track_command == "spec":
|
|
4563
|
-
cmd_track_spec(args)
|
|
4564
|
-
elif args.track_command == "plan":
|
|
4565
|
-
cmd_track_plan(args)
|
|
4566
|
-
elif args.track_command == "delete":
|
|
4567
|
-
cmd_track_delete(args)
|
|
4568
|
-
else:
|
|
4569
|
-
track_parser.print_help()
|
|
4570
|
-
sys.exit(1)
|
|
4571
|
-
elif args.command == "work":
|
|
4572
|
-
# Work management with smart routing
|
|
4573
|
-
if args.work_command == "next":
|
|
4574
|
-
cmd_work_next(args)
|
|
4575
|
-
elif args.work_command == "queue":
|
|
4576
|
-
cmd_work_queue(args)
|
|
4577
|
-
else:
|
|
4578
|
-
work_parser.print_help()
|
|
4579
|
-
sys.exit(1)
|
|
4580
|
-
elif args.command == "agent":
|
|
4581
|
-
# Agent management
|
|
4582
|
-
if args.agent_command == "list":
|
|
4583
|
-
cmd_agent_list(args)
|
|
4584
|
-
else:
|
|
4585
|
-
agent_parser.print_help()
|
|
4586
|
-
sys.exit(1)
|
|
4587
|
-
elif args.command == "feature":
|
|
4588
|
-
if args.feature_command == "create":
|
|
4589
|
-
cmd_feature_create(args)
|
|
4590
|
-
elif args.feature_command == "start":
|
|
4591
|
-
cmd_feature_start(args)
|
|
4592
|
-
elif args.feature_command == "complete":
|
|
4593
|
-
cmd_feature_complete(args)
|
|
4594
|
-
elif args.feature_command == "primary":
|
|
4595
|
-
cmd_feature_primary(args)
|
|
4596
|
-
elif args.feature_command == "claim":
|
|
4597
|
-
cmd_feature_claim(args)
|
|
4598
|
-
elif args.feature_command == "release":
|
|
4599
|
-
cmd_feature_release(args)
|
|
4600
|
-
elif args.feature_command == "auto-release":
|
|
4601
|
-
cmd_feature_auto_release(args)
|
|
4602
|
-
elif args.feature_command == "list":
|
|
4603
|
-
cmd_feature_list(args)
|
|
4604
|
-
elif args.feature_command == "step-complete":
|
|
4605
|
-
cmd_feature_step_complete(args)
|
|
4606
|
-
elif args.feature_command == "delete":
|
|
4607
|
-
cmd_feature_delete(args)
|
|
4608
|
-
else:
|
|
4609
|
-
feature_parser.print_help()
|
|
4610
|
-
sys.exit(1)
|
|
4611
|
-
elif args.command == "analytics":
|
|
4612
|
-
from htmlgraph.cli_analytics import cmd_analytics
|
|
4613
|
-
|
|
4614
|
-
cmd_analytics(args)
|
|
4615
|
-
elif args.command == "events":
|
|
4616
|
-
if args.events_command == "export-sessions":
|
|
4617
|
-
cmd_events_export(args)
|
|
4618
|
-
else:
|
|
4619
|
-
events_parser.print_help()
|
|
4620
|
-
sys.exit(1)
|
|
4621
|
-
elif args.command == "index":
|
|
4622
|
-
if args.index_command == "rebuild":
|
|
4623
|
-
cmd_index_rebuild(args)
|
|
4624
|
-
else:
|
|
4625
|
-
index_parser.print_help()
|
|
4626
|
-
sys.exit(1)
|
|
4627
|
-
elif args.command == "watch":
|
|
4628
|
-
cmd_watch(args)
|
|
4629
|
-
elif args.command == "git-event":
|
|
4630
|
-
cmd_git_event(args)
|
|
4631
|
-
elif args.command == "mcp":
|
|
4632
|
-
if args.mcp_command == "serve":
|
|
4633
|
-
cmd_mcp_serve(args)
|
|
4634
|
-
else:
|
|
4635
|
-
mcp_parser.print_help()
|
|
4636
|
-
sys.exit(1)
|
|
4637
|
-
elif args.command == "setup":
|
|
4638
|
-
from htmlgraph.setup import (
|
|
4639
|
-
setup_all as setup_all_fn,
|
|
4640
|
-
)
|
|
4641
|
-
from htmlgraph.setup import (
|
|
4642
|
-
setup_claude as setup_claude_fn,
|
|
4643
|
-
)
|
|
4644
|
-
from htmlgraph.setup import (
|
|
4645
|
-
setup_codex as setup_codex_fn,
|
|
4646
|
-
)
|
|
4647
|
-
from htmlgraph.setup import (
|
|
4648
|
-
setup_gemini as setup_gemini_fn,
|
|
4649
|
-
)
|
|
4650
|
-
|
|
4651
|
-
if args.setup_command == "claude":
|
|
4652
|
-
setup_claude_fn(args)
|
|
4653
|
-
elif args.setup_command == "codex":
|
|
4654
|
-
setup_codex_fn(args)
|
|
4655
|
-
elif args.setup_command == "gemini":
|
|
4656
|
-
setup_gemini_fn(args)
|
|
4657
|
-
elif args.setup_command == "all":
|
|
4658
|
-
setup_all_fn(args)
|
|
4659
|
-
else:
|
|
4660
|
-
setup_parser.print_help()
|
|
4661
|
-
sys.exit(1)
|
|
4662
|
-
elif args.command == "publish":
|
|
4663
|
-
cmd_publish(args)
|
|
4664
|
-
elif args.command == "sync-docs":
|
|
4665
|
-
cmd_sync_docs(args)
|
|
4666
|
-
elif args.command == "deploy":
|
|
4667
|
-
if args.deploy_command == "init":
|
|
4668
|
-
cmd_deploy_init(args)
|
|
4669
|
-
elif args.deploy_command == "run":
|
|
4670
|
-
cmd_deploy_run(args)
|
|
4671
|
-
else:
|
|
4672
|
-
deploy_parser.print_help()
|
|
4673
|
-
sys.exit(1)
|
|
4674
|
-
elif args.command == "orchestrator":
|
|
4675
|
-
if args.orchestrator_command == "enable":
|
|
4676
|
-
cmd_orchestrator_enable(args)
|
|
4677
|
-
elif args.orchestrator_command == "disable":
|
|
4678
|
-
cmd_orchestrator_disable(args)
|
|
4679
|
-
elif args.orchestrator_command == "status":
|
|
4680
|
-
cmd_orchestrator_status(args)
|
|
4681
|
-
else:
|
|
4682
|
-
orchestrator_parser.print_help()
|
|
4683
|
-
sys.exit(1)
|
|
4684
|
-
elif args.command == "install-gemini-extension":
|
|
4685
|
-
cmd_install_gemini_extension(args)
|
|
4686
|
-
else:
|
|
4687
|
-
parser.print_help()
|
|
4688
|
-
sys.exit(1)
|
|
4689
|
-
|
|
4690
|
-
|
|
4691
|
-
# =============================================================================
|
|
4692
|
-
# Deployment Commands
|
|
4693
|
-
# =============================================================================
|
|
4694
|
-
|
|
4695
|
-
|
|
4696
|
-
def cmd_deploy_init(args: argparse.Namespace) -> None:
|
|
4697
|
-
"""Initialize deployment configuration."""
|
|
4698
|
-
from htmlgraph.deploy import create_deployment_config_template
|
|
4699
|
-
|
|
4700
|
-
output_path = Path(args.output or "htmlgraph-deploy.toml")
|
|
4701
|
-
|
|
4702
|
-
if output_path.exists() and not args.force:
|
|
4703
|
-
print(
|
|
4704
|
-
f"Error: {output_path} already exists. Use --force to overwrite.",
|
|
4705
|
-
file=sys.stderr,
|
|
4706
|
-
)
|
|
4707
|
-
sys.exit(1)
|
|
4708
|
-
|
|
4709
|
-
create_deployment_config_template(output_path)
|
|
4710
|
-
|
|
4711
|
-
|
|
4712
|
-
def cmd_deploy_run(args: argparse.Namespace) -> None:
|
|
4713
|
-
"""Run deployment process."""
|
|
4714
|
-
from htmlgraph.deploy import Deployer, DeploymentConfig
|
|
4715
|
-
|
|
4716
|
-
# Load configuration
|
|
4717
|
-
config_path = Path(args.config or "htmlgraph-deploy.toml")
|
|
4718
|
-
|
|
4719
|
-
if not config_path.exists():
|
|
4720
|
-
print(f"Error: Configuration file not found: {config_path}", file=sys.stderr)
|
|
4721
|
-
print(
|
|
4722
|
-
"Run 'htmlgraph deploy init' to create a template configuration.",
|
|
4723
|
-
file=sys.stderr,
|
|
4724
|
-
)
|
|
4725
|
-
sys.exit(1)
|
|
4726
|
-
|
|
4727
|
-
try:
|
|
4728
|
-
config = DeploymentConfig.from_toml(config_path)
|
|
4729
|
-
except Exception as e:
|
|
4730
|
-
print(f"Error loading configuration: {e}", file=sys.stderr)
|
|
4731
|
-
sys.exit(1)
|
|
4732
|
-
|
|
4733
|
-
# Handle shortcut flags
|
|
4734
|
-
skip_steps = []
|
|
4735
|
-
only_steps = None
|
|
4736
|
-
|
|
4737
|
-
if args.docs_only:
|
|
4738
|
-
only_steps = ["git-push"]
|
|
4739
|
-
elif args.build_only:
|
|
4740
|
-
only_steps = ["build"]
|
|
4741
|
-
elif args.skip_pypi:
|
|
4742
|
-
skip_steps.append("pypi-publish")
|
|
4743
|
-
elif args.skip_plugins:
|
|
4744
|
-
skip_steps.append("update-plugins")
|
|
4745
|
-
|
|
4746
|
-
# Create deployer
|
|
4747
|
-
deployer = Deployer(
|
|
4748
|
-
config=config,
|
|
4749
|
-
dry_run=args.dry_run,
|
|
4750
|
-
skip_steps=skip_steps,
|
|
4751
|
-
only_steps=only_steps,
|
|
4752
|
-
)
|
|
4753
|
-
|
|
4754
|
-
# Run deployment
|
|
4755
|
-
deployer.deploy()
|
|
4756
|
-
|
|
4757
|
-
|
|
4758
|
-
# =============================================================================
|
|
4759
|
-
# Documentation Sync Command
|
|
4760
|
-
# =============================================================================
|
|
4761
|
-
|
|
4762
|
-
|
|
4763
|
-
def cmd_sync_docs(args: argparse.Namespace) -> int:
|
|
4764
|
-
"""Synchronize AI agent memory files across platforms."""
|
|
4765
|
-
from htmlgraph.sync_docs import (
|
|
4766
|
-
check_all_files,
|
|
4767
|
-
generate_platform_file,
|
|
4768
|
-
sync_all_files,
|
|
4769
|
-
)
|
|
4770
|
-
|
|
4771
|
-
project_root = Path(args.project_root or os.getcwd()).resolve()
|
|
4772
|
-
|
|
4773
|
-
if args.check:
|
|
4774
|
-
# Check mode
|
|
4775
|
-
print("š Checking memory files...")
|
|
4776
|
-
results = check_all_files(project_root)
|
|
4777
|
-
|
|
4778
|
-
print("\nStatus:")
|
|
4779
|
-
all_good = True
|
|
4780
|
-
for filename, status in results.items():
|
|
4781
|
-
if filename == "AGENTS.md":
|
|
4782
|
-
if status:
|
|
4783
|
-
print(f" ā
{filename} exists")
|
|
4784
|
-
else:
|
|
4785
|
-
print(f" ā {filename} MISSING (required)")
|
|
4786
|
-
all_good = False
|
|
4787
|
-
else:
|
|
4788
|
-
if status:
|
|
4789
|
-
print(f" ā
{filename} references AGENTS.md")
|
|
4790
|
-
else:
|
|
4791
|
-
print(f" ā ļø {filename} missing reference")
|
|
4792
|
-
all_good = False
|
|
4793
|
-
|
|
4794
|
-
if all_good:
|
|
4795
|
-
print("\nā
All files are properly synchronized!")
|
|
4796
|
-
return 0
|
|
4797
|
-
else:
|
|
4798
|
-
print("\nā ļø Some files need attention")
|
|
4799
|
-
return 1
|
|
4800
|
-
|
|
4801
|
-
elif args.generate:
|
|
4802
|
-
# Generate mode
|
|
4803
|
-
platform = args.generate.lower()
|
|
4804
|
-
print(f"š Generating {platform.upper()} memory file...")
|
|
4805
|
-
|
|
4806
|
-
try:
|
|
4807
|
-
content = generate_platform_file(platform, project_root)
|
|
4808
|
-
from htmlgraph.sync_docs import PLATFORM_TEMPLATES
|
|
4809
|
-
|
|
4810
|
-
template = PLATFORM_TEMPLATES[platform]
|
|
4811
|
-
filepath = project_root / template["filename"]
|
|
4812
|
-
|
|
4813
|
-
if filepath.exists() and not args.force:
|
|
4814
|
-
print(f"ā ļø {filepath.name} already exists. Use --force to overwrite.")
|
|
4815
|
-
return 1
|
|
4816
|
-
|
|
4817
|
-
filepath.write_text(content)
|
|
4818
|
-
print(f"ā
Created: {filepath}")
|
|
4819
|
-
print("\nThe file references AGENTS.md for core documentation.")
|
|
4820
|
-
return 0
|
|
4821
|
-
|
|
4822
|
-
except ValueError as e:
|
|
4823
|
-
print(f"ā Error: {e}")
|
|
4824
|
-
return 1
|
|
4825
|
-
|
|
4826
|
-
else:
|
|
4827
|
-
# Sync mode (default)
|
|
4828
|
-
print("š Synchronizing memory files...")
|
|
4829
|
-
changes = sync_all_files(project_root)
|
|
4830
|
-
|
|
4831
|
-
print("\nResults:")
|
|
4832
|
-
for change in changes:
|
|
4833
|
-
print(f" {change}")
|
|
4834
|
-
|
|
4835
|
-
return 1 if any("ā ļø" in c or "ā" in c for c in changes) else 0
|
|
4836
|
-
|
|
4837
|
-
|
|
4838
|
-
if __name__ == "__main__":
|
|
4839
|
-
main()
|