htmlgraph 0.9.3__py3-none-any.whl → 0.27.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- htmlgraph/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/.htmlgraph/agents.json +72 -0
- htmlgraph/.htmlgraph/htmlgraph.db +0 -0
- htmlgraph/__init__.py +173 -17
- htmlgraph/__init__.pyi +123 -0
- htmlgraph/agent_detection.py +127 -0
- htmlgraph/agent_registry.py +45 -30
- htmlgraph/agents.py +160 -107
- htmlgraph/analytics/__init__.py +9 -2
- htmlgraph/analytics/cli.py +190 -51
- htmlgraph/analytics/cost_analyzer.py +391 -0
- htmlgraph/analytics/cost_monitor.py +664 -0
- htmlgraph/analytics/cost_reporter.py +675 -0
- htmlgraph/analytics/cross_session.py +617 -0
- htmlgraph/analytics/dependency.py +192 -100
- htmlgraph/analytics/pattern_learning.py +771 -0
- htmlgraph/analytics/session_graph.py +707 -0
- htmlgraph/analytics/strategic/__init__.py +80 -0
- htmlgraph/analytics/strategic/cost_optimizer.py +611 -0
- htmlgraph/analytics/strategic/pattern_detector.py +876 -0
- htmlgraph/analytics/strategic/preference_manager.py +709 -0
- htmlgraph/analytics/strategic/suggestion_engine.py +747 -0
- htmlgraph/analytics/work_type.py +190 -14
- htmlgraph/analytics_index.py +135 -51
- htmlgraph/api/__init__.py +3 -0
- htmlgraph/api/cost_alerts_websocket.py +416 -0
- htmlgraph/api/main.py +2498 -0
- htmlgraph/api/static/htmx.min.js +1 -0
- htmlgraph/api/static/style-redesign.css +1344 -0
- htmlgraph/api/static/style.css +1079 -0
- htmlgraph/api/templates/dashboard-redesign.html +1366 -0
- htmlgraph/api/templates/dashboard.html +794 -0
- htmlgraph/api/templates/partials/activity-feed-hierarchical.html +326 -0
- htmlgraph/api/templates/partials/activity-feed.html +1100 -0
- htmlgraph/api/templates/partials/agents-redesign.html +317 -0
- htmlgraph/api/templates/partials/agents.html +317 -0
- htmlgraph/api/templates/partials/event-traces.html +373 -0
- htmlgraph/api/templates/partials/features-kanban-redesign.html +509 -0
- htmlgraph/api/templates/partials/features.html +578 -0
- htmlgraph/api/templates/partials/metrics-redesign.html +346 -0
- htmlgraph/api/templates/partials/metrics.html +346 -0
- htmlgraph/api/templates/partials/orchestration-redesign.html +443 -0
- htmlgraph/api/templates/partials/orchestration.html +198 -0
- htmlgraph/api/templates/partials/spawners.html +375 -0
- htmlgraph/api/templates/partials/work-items.html +613 -0
- htmlgraph/api/websocket.py +538 -0
- htmlgraph/archive/__init__.py +24 -0
- htmlgraph/archive/bloom.py +234 -0
- htmlgraph/archive/fts.py +297 -0
- htmlgraph/archive/manager.py +583 -0
- htmlgraph/archive/search.py +244 -0
- htmlgraph/atomic_ops.py +560 -0
- htmlgraph/attribute_index.py +208 -0
- htmlgraph/bounded_paths.py +539 -0
- htmlgraph/builders/__init__.py +14 -0
- htmlgraph/builders/base.py +118 -29
- htmlgraph/builders/bug.py +150 -0
- htmlgraph/builders/chore.py +119 -0
- htmlgraph/builders/epic.py +150 -0
- htmlgraph/builders/feature.py +31 -6
- htmlgraph/builders/insight.py +195 -0
- htmlgraph/builders/metric.py +217 -0
- htmlgraph/builders/pattern.py +202 -0
- htmlgraph/builders/phase.py +162 -0
- htmlgraph/builders/spike.py +52 -19
- htmlgraph/builders/track.py +148 -72
- htmlgraph/cigs/__init__.py +81 -0
- htmlgraph/cigs/autonomy.py +385 -0
- htmlgraph/cigs/cost.py +475 -0
- htmlgraph/cigs/messages_basic.py +472 -0
- htmlgraph/cigs/messaging.py +365 -0
- htmlgraph/cigs/models.py +771 -0
- htmlgraph/cigs/pattern_storage.py +427 -0
- htmlgraph/cigs/patterns.py +503 -0
- htmlgraph/cigs/posttool_analyzer.py +234 -0
- htmlgraph/cigs/reporter.py +818 -0
- htmlgraph/cigs/tracker.py +317 -0
- htmlgraph/cli/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/cli/.htmlgraph/agents.json +72 -0
- htmlgraph/cli/.htmlgraph/htmlgraph.db +0 -0
- htmlgraph/cli/__init__.py +42 -0
- htmlgraph/cli/__main__.py +6 -0
- htmlgraph/cli/analytics.py +1424 -0
- htmlgraph/cli/base.py +685 -0
- htmlgraph/cli/constants.py +206 -0
- htmlgraph/cli/core.py +954 -0
- htmlgraph/cli/main.py +147 -0
- htmlgraph/cli/models.py +475 -0
- htmlgraph/cli/templates/__init__.py +1 -0
- htmlgraph/cli/templates/cost_dashboard.py +399 -0
- htmlgraph/cli/work/__init__.py +239 -0
- htmlgraph/cli/work/browse.py +115 -0
- htmlgraph/cli/work/features.py +568 -0
- htmlgraph/cli/work/orchestration.py +676 -0
- htmlgraph/cli/work/report.py +728 -0
- htmlgraph/cli/work/sessions.py +466 -0
- htmlgraph/cli/work/snapshot.py +559 -0
- htmlgraph/cli/work/tracks.py +486 -0
- htmlgraph/cli_commands/__init__.py +1 -0
- htmlgraph/cli_commands/feature.py +195 -0
- htmlgraph/cli_framework.py +115 -0
- htmlgraph/collections/__init__.py +18 -0
- htmlgraph/collections/base.py +415 -98
- htmlgraph/collections/bug.py +53 -0
- htmlgraph/collections/chore.py +53 -0
- htmlgraph/collections/epic.py +53 -0
- htmlgraph/collections/feature.py +12 -26
- htmlgraph/collections/insight.py +100 -0
- htmlgraph/collections/metric.py +92 -0
- htmlgraph/collections/pattern.py +97 -0
- htmlgraph/collections/phase.py +53 -0
- htmlgraph/collections/session.py +194 -0
- htmlgraph/collections/spike.py +56 -16
- htmlgraph/collections/task_delegation.py +241 -0
- htmlgraph/collections/todo.py +511 -0
- htmlgraph/collections/traces.py +487 -0
- htmlgraph/config/cost_models.json +56 -0
- htmlgraph/config.py +190 -0
- htmlgraph/context_analytics.py +344 -0
- htmlgraph/converter.py +216 -28
- htmlgraph/cost_analysis/__init__.py +5 -0
- htmlgraph/cost_analysis/analyzer.py +438 -0
- htmlgraph/dashboard.html +2406 -307
- htmlgraph/dashboard.html.backup +6592 -0
- htmlgraph/dashboard.html.bak +7181 -0
- htmlgraph/dashboard.html.bak2 +7231 -0
- htmlgraph/dashboard.html.bak3 +7232 -0
- htmlgraph/db/__init__.py +38 -0
- htmlgraph/db/queries.py +790 -0
- htmlgraph/db/schema.py +1788 -0
- htmlgraph/decorators.py +317 -0
- htmlgraph/dependency_models.py +19 -2
- htmlgraph/deploy.py +142 -125
- htmlgraph/deployment_models.py +474 -0
- htmlgraph/docs/API_REFERENCE.md +841 -0
- htmlgraph/docs/HTTP_API.md +750 -0
- htmlgraph/docs/INTEGRATION_GUIDE.md +752 -0
- htmlgraph/docs/ORCHESTRATION_PATTERNS.md +717 -0
- htmlgraph/docs/README.md +532 -0
- htmlgraph/docs/__init__.py +77 -0
- htmlgraph/docs/docs_version.py +55 -0
- htmlgraph/docs/metadata.py +93 -0
- htmlgraph/docs/migrations.py +232 -0
- htmlgraph/docs/template_engine.py +143 -0
- htmlgraph/docs/templates/_sections/cli_reference.md.j2 +52 -0
- htmlgraph/docs/templates/_sections/core_concepts.md.j2 +29 -0
- htmlgraph/docs/templates/_sections/sdk_basics.md.j2 +69 -0
- htmlgraph/docs/templates/base_agents.md.j2 +78 -0
- htmlgraph/docs/templates/example_user_override.md.j2 +47 -0
- htmlgraph/docs/version_check.py +163 -0
- htmlgraph/edge_index.py +182 -27
- htmlgraph/error_handler.py +544 -0
- htmlgraph/event_log.py +100 -52
- htmlgraph/event_migration.py +13 -4
- htmlgraph/exceptions.py +49 -0
- htmlgraph/file_watcher.py +101 -28
- htmlgraph/find_api.py +75 -63
- htmlgraph/git_events.py +145 -63
- htmlgraph/graph.py +1122 -106
- htmlgraph/hooks/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/hooks/.htmlgraph/agents.json +72 -0
- htmlgraph/hooks/.htmlgraph/index.sqlite +0 -0
- htmlgraph/hooks/__init__.py +45 -0
- htmlgraph/hooks/bootstrap.py +169 -0
- htmlgraph/hooks/cigs_pretool_enforcer.py +354 -0
- htmlgraph/hooks/concurrent_sessions.py +208 -0
- htmlgraph/hooks/context.py +350 -0
- htmlgraph/hooks/drift_handler.py +525 -0
- htmlgraph/hooks/event_tracker.py +1314 -0
- htmlgraph/hooks/git_commands.py +175 -0
- htmlgraph/hooks/hooks-config.example.json +12 -0
- htmlgraph/hooks/installer.py +343 -0
- htmlgraph/hooks/orchestrator.py +674 -0
- htmlgraph/hooks/orchestrator_reflector.py +223 -0
- htmlgraph/hooks/post-checkout.sh +28 -0
- htmlgraph/hooks/post-commit.sh +24 -0
- htmlgraph/hooks/post-merge.sh +26 -0
- htmlgraph/hooks/post_tool_use_failure.py +273 -0
- htmlgraph/hooks/post_tool_use_handler.py +257 -0
- htmlgraph/hooks/posttooluse.py +408 -0
- htmlgraph/hooks/pre-commit.sh +94 -0
- htmlgraph/hooks/pre-push.sh +28 -0
- htmlgraph/hooks/pretooluse.py +819 -0
- htmlgraph/hooks/prompt_analyzer.py +637 -0
- htmlgraph/hooks/session_handler.py +668 -0
- htmlgraph/hooks/session_summary.py +395 -0
- htmlgraph/hooks/state_manager.py +504 -0
- htmlgraph/hooks/subagent_detection.py +202 -0
- htmlgraph/hooks/subagent_stop.py +369 -0
- htmlgraph/hooks/task_enforcer.py +255 -0
- htmlgraph/hooks/task_validator.py +177 -0
- htmlgraph/hooks/validator.py +628 -0
- htmlgraph/ids.py +41 -27
- htmlgraph/index.d.ts +286 -0
- htmlgraph/learning.py +767 -0
- htmlgraph/mcp_server.py +69 -23
- htmlgraph/models.py +1586 -87
- htmlgraph/operations/README.md +62 -0
- htmlgraph/operations/__init__.py +79 -0
- htmlgraph/operations/analytics.py +339 -0
- htmlgraph/operations/bootstrap.py +289 -0
- htmlgraph/operations/events.py +244 -0
- htmlgraph/operations/fastapi_server.py +231 -0
- htmlgraph/operations/hooks.py +350 -0
- htmlgraph/operations/initialization.py +597 -0
- htmlgraph/operations/initialization.py.backup +228 -0
- htmlgraph/operations/server.py +303 -0
- htmlgraph/orchestration/__init__.py +58 -0
- htmlgraph/orchestration/claude_launcher.py +179 -0
- htmlgraph/orchestration/command_builder.py +72 -0
- htmlgraph/orchestration/headless_spawner.py +281 -0
- htmlgraph/orchestration/live_events.py +377 -0
- htmlgraph/orchestration/model_selection.py +327 -0
- htmlgraph/orchestration/plugin_manager.py +140 -0
- htmlgraph/orchestration/prompts.py +137 -0
- htmlgraph/orchestration/spawner_event_tracker.py +383 -0
- htmlgraph/orchestration/spawners/__init__.py +16 -0
- htmlgraph/orchestration/spawners/base.py +194 -0
- htmlgraph/orchestration/spawners/claude.py +173 -0
- htmlgraph/orchestration/spawners/codex.py +435 -0
- htmlgraph/orchestration/spawners/copilot.py +294 -0
- htmlgraph/orchestration/spawners/gemini.py +471 -0
- htmlgraph/orchestration/subprocess_runner.py +36 -0
- htmlgraph/orchestration/task_coordination.py +343 -0
- htmlgraph/orchestration.md +563 -0
- htmlgraph/orchestrator-system-prompt-optimized.txt +863 -0
- htmlgraph/orchestrator.py +669 -0
- htmlgraph/orchestrator_config.py +357 -0
- htmlgraph/orchestrator_mode.py +328 -0
- htmlgraph/orchestrator_validator.py +133 -0
- htmlgraph/parallel.py +646 -0
- htmlgraph/parser.py +160 -35
- htmlgraph/path_query.py +608 -0
- htmlgraph/pattern_matcher.py +636 -0
- htmlgraph/planning.py +147 -52
- htmlgraph/pydantic_models.py +476 -0
- htmlgraph/quality_gates.py +350 -0
- htmlgraph/query_builder.py +109 -72
- htmlgraph/query_composer.py +509 -0
- htmlgraph/reflection.py +443 -0
- htmlgraph/refs.py +344 -0
- htmlgraph/repo_hash.py +512 -0
- htmlgraph/repositories/__init__.py +292 -0
- htmlgraph/repositories/analytics_repository.py +455 -0
- htmlgraph/repositories/analytics_repository_standard.py +628 -0
- htmlgraph/repositories/feature_repository.py +581 -0
- htmlgraph/repositories/feature_repository_htmlfile.py +668 -0
- htmlgraph/repositories/feature_repository_memory.py +607 -0
- htmlgraph/repositories/feature_repository_sqlite.py +858 -0
- htmlgraph/repositories/filter_service.py +620 -0
- htmlgraph/repositories/filter_service_standard.py +445 -0
- htmlgraph/repositories/shared_cache.py +621 -0
- htmlgraph/repositories/shared_cache_memory.py +395 -0
- htmlgraph/repositories/track_repository.py +552 -0
- htmlgraph/repositories/track_repository_htmlfile.py +619 -0
- htmlgraph/repositories/track_repository_memory.py +508 -0
- htmlgraph/repositories/track_repository_sqlite.py +711 -0
- htmlgraph/routing.py +8 -19
- htmlgraph/scripts/deploy.py +1 -2
- htmlgraph/sdk/__init__.py +398 -0
- htmlgraph/sdk/__init__.pyi +14 -0
- htmlgraph/sdk/analytics/__init__.py +19 -0
- htmlgraph/sdk/analytics/engine.py +155 -0
- htmlgraph/sdk/analytics/helpers.py +178 -0
- htmlgraph/sdk/analytics/registry.py +109 -0
- htmlgraph/sdk/base.py +484 -0
- htmlgraph/sdk/constants.py +216 -0
- htmlgraph/sdk/core.pyi +308 -0
- htmlgraph/sdk/discovery.py +120 -0
- htmlgraph/sdk/help/__init__.py +12 -0
- htmlgraph/sdk/help/mixin.py +699 -0
- htmlgraph/sdk/mixins/__init__.py +15 -0
- htmlgraph/sdk/mixins/attribution.py +113 -0
- htmlgraph/sdk/mixins/mixin.py +410 -0
- htmlgraph/sdk/operations/__init__.py +12 -0
- htmlgraph/sdk/operations/mixin.py +427 -0
- htmlgraph/sdk/orchestration/__init__.py +17 -0
- htmlgraph/sdk/orchestration/coordinator.py +203 -0
- htmlgraph/sdk/orchestration/spawner.py +204 -0
- htmlgraph/sdk/planning/__init__.py +19 -0
- htmlgraph/sdk/planning/bottlenecks.py +93 -0
- htmlgraph/sdk/planning/mixin.py +211 -0
- htmlgraph/sdk/planning/parallel.py +186 -0
- htmlgraph/sdk/planning/queue.py +210 -0
- htmlgraph/sdk/planning/recommendations.py +87 -0
- htmlgraph/sdk/planning/smart_planning.py +319 -0
- htmlgraph/sdk/session/__init__.py +19 -0
- htmlgraph/sdk/session/continuity.py +57 -0
- htmlgraph/sdk/session/handoff.py +110 -0
- htmlgraph/sdk/session/info.py +309 -0
- htmlgraph/sdk/session/manager.py +103 -0
- htmlgraph/sdk/strategic/__init__.py +26 -0
- htmlgraph/sdk/strategic/mixin.py +563 -0
- htmlgraph/server.py +685 -180
- htmlgraph/services/__init__.py +10 -0
- htmlgraph/services/claiming.py +199 -0
- htmlgraph/session_hooks.py +300 -0
- htmlgraph/session_manager.py +1392 -175
- htmlgraph/session_registry.py +587 -0
- htmlgraph/session_state.py +436 -0
- htmlgraph/session_warning.py +201 -0
- htmlgraph/sessions/__init__.py +23 -0
- htmlgraph/sessions/handoff.py +756 -0
- htmlgraph/setup.py +34 -17
- htmlgraph/spike_index.py +143 -0
- htmlgraph/sync_docs.py +12 -15
- htmlgraph/system_prompts.py +450 -0
- htmlgraph/templates/AGENTS.md.template +366 -0
- htmlgraph/templates/CLAUDE.md.template +97 -0
- htmlgraph/templates/GEMINI.md.template +87 -0
- htmlgraph/templates/orchestration-view.html +350 -0
- htmlgraph/track_builder.py +146 -15
- htmlgraph/track_manager.py +69 -21
- htmlgraph/transcript.py +890 -0
- htmlgraph/transcript_analytics.py +699 -0
- htmlgraph/types.py +323 -0
- htmlgraph/validation.py +115 -0
- htmlgraph/watch.py +8 -5
- htmlgraph/work_type_utils.py +3 -2
- {htmlgraph-0.9.3.data → htmlgraph-0.27.5.data}/data/htmlgraph/dashboard.html +2406 -307
- htmlgraph-0.27.5.data/data/htmlgraph/templates/AGENTS.md.template +366 -0
- htmlgraph-0.27.5.data/data/htmlgraph/templates/CLAUDE.md.template +97 -0
- htmlgraph-0.27.5.data/data/htmlgraph/templates/GEMINI.md.template +87 -0
- {htmlgraph-0.9.3.dist-info → htmlgraph-0.27.5.dist-info}/METADATA +97 -64
- htmlgraph-0.27.5.dist-info/RECORD +337 -0
- {htmlgraph-0.9.3.dist-info → htmlgraph-0.27.5.dist-info}/entry_points.txt +1 -1
- htmlgraph/cli.py +0 -2688
- htmlgraph/sdk.py +0 -709
- htmlgraph-0.9.3.dist-info/RECORD +0 -61
- {htmlgraph-0.9.3.data → htmlgraph-0.27.5.data}/data/htmlgraph/styles.css +0 -0
- {htmlgraph-0.9.3.dist-info → htmlgraph-0.27.5.dist-info}/WHEEL +0 -0
htmlgraph/session_manager.py
CHANGED
|
@@ -9,18 +9,27 @@ Provides:
|
|
|
9
9
|
- WIP limits enforcement
|
|
10
10
|
"""
|
|
11
11
|
|
|
12
|
-
import os
|
|
13
|
-
import re
|
|
14
12
|
import fnmatch
|
|
15
|
-
|
|
13
|
+
import logging
|
|
14
|
+
import re
|
|
15
|
+
from datetime import datetime, timedelta, timezone
|
|
16
16
|
from pathlib import Path
|
|
17
|
-
from typing import Any
|
|
17
|
+
from typing import Any
|
|
18
18
|
|
|
19
|
-
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
from htmlgraph.agent_detection import detect_agent_name
|
|
22
|
+
from htmlgraph.converter import (
|
|
23
|
+
SessionConverter,
|
|
24
|
+
dict_to_node,
|
|
25
|
+
)
|
|
26
|
+
from htmlgraph.event_log import EventRecord, JsonlEventLog
|
|
27
|
+
from htmlgraph.exceptions import SessionNotFoundError
|
|
20
28
|
from htmlgraph.graph import HtmlGraph
|
|
21
|
-
from htmlgraph.converter import session_to_html, html_to_session, SessionConverter, dict_to_node
|
|
22
|
-
from htmlgraph.event_log import JsonlEventLog, EventRecord
|
|
23
29
|
from htmlgraph.ids import generate_id
|
|
30
|
+
from htmlgraph.models import ActivityEntry, ErrorEntry, Node, Session
|
|
31
|
+
from htmlgraph.services import ClaimingService
|
|
32
|
+
from htmlgraph.spike_index import ActiveAutoSpikeIndex
|
|
24
33
|
|
|
25
34
|
|
|
26
35
|
class SessionManager:
|
|
@@ -74,6 +83,8 @@ class SessionManager:
|
|
|
74
83
|
graph_dir: str | Path = ".htmlgraph",
|
|
75
84
|
wip_limit: int = DEFAULT_WIP_LIMIT,
|
|
76
85
|
session_dedupe_window_seconds: int = DEFAULT_SESSION_DEDUPE_WINDOW_SECONDS,
|
|
86
|
+
features_graph: HtmlGraph | None = None,
|
|
87
|
+
bugs_graph: HtmlGraph | None = None,
|
|
77
88
|
):
|
|
78
89
|
"""
|
|
79
90
|
Initialize SessionManager.
|
|
@@ -81,6 +92,9 @@ class SessionManager:
|
|
|
81
92
|
Args:
|
|
82
93
|
graph_dir: Directory containing HtmlGraph data
|
|
83
94
|
wip_limit: Maximum features in progress simultaneously
|
|
95
|
+
session_dedupe_window_seconds: Deduplication window for sessions
|
|
96
|
+
features_graph: Optional pre-initialized HtmlGraph for features (avoids double-loading)
|
|
97
|
+
bugs_graph: Optional pre-initialized HtmlGraph for bugs (avoids double-loading)
|
|
84
98
|
"""
|
|
85
99
|
self.graph_dir = Path(graph_dir)
|
|
86
100
|
self.wip_limit = wip_limit
|
|
@@ -99,13 +113,41 @@ class SessionManager:
|
|
|
99
113
|
# Session converter
|
|
100
114
|
self.session_converter = SessionConverter(self.sessions_dir)
|
|
101
115
|
|
|
102
|
-
# Feature graphs
|
|
103
|
-
|
|
104
|
-
self.
|
|
116
|
+
# Feature graphs - reuse provided instances to avoid double-loading, or create new with lazy loading
|
|
117
|
+
# Note: Use 'is not None' check because HtmlGraph.__bool__ returns False when empty
|
|
118
|
+
self.features_graph = (
|
|
119
|
+
features_graph
|
|
120
|
+
if features_graph is not None
|
|
121
|
+
else HtmlGraph(self.features_dir, auto_load=False)
|
|
122
|
+
)
|
|
123
|
+
self.bugs_graph = (
|
|
124
|
+
bugs_graph
|
|
125
|
+
if bugs_graph is not None
|
|
126
|
+
else HtmlGraph(self.bugs_dir, auto_load=False)
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
# Claiming service (handles feature claims/releases)
|
|
130
|
+
self.claiming_service = ClaimingService(
|
|
131
|
+
features_graph=self.features_graph,
|
|
132
|
+
bugs_graph=self.bugs_graph,
|
|
133
|
+
session_manager=self,
|
|
134
|
+
)
|
|
105
135
|
|
|
106
136
|
# Cache for active session
|
|
107
137
|
self._active_session: Session | None = None
|
|
108
138
|
|
|
139
|
+
# Cache for active sessions list (invalidated on session lifecycle changes)
|
|
140
|
+
self._active_sessions_cache: list[Session] | None = None
|
|
141
|
+
self._sessions_cache_dirty: bool = True
|
|
142
|
+
|
|
143
|
+
# Cache for active features (invalidated on start/complete/release)
|
|
144
|
+
self._active_features_cache: list[Node] | None = None
|
|
145
|
+
self._features_cache_dirty: bool = True
|
|
146
|
+
|
|
147
|
+
# Fast index for active auto-generated spikes (avoids scanning all spike files)
|
|
148
|
+
self._spike_index = ActiveAutoSpikeIndex(self.graph_dir)
|
|
149
|
+
self._active_auto_spikes: set[str] = self._spike_index.get_all()
|
|
150
|
+
|
|
109
151
|
# Append-only event log (Git-friendly source of truth for activities)
|
|
110
152
|
self.events_dir = self.graph_dir / "events"
|
|
111
153
|
self.event_log = JsonlEventLog(self.events_dir)
|
|
@@ -115,10 +157,22 @@ class SessionManager:
|
|
|
115
157
|
# =========================================================================
|
|
116
158
|
|
|
117
159
|
def _list_active_sessions(self) -> list[Session]:
|
|
118
|
-
"""
|
|
119
|
-
|
|
160
|
+
"""
|
|
161
|
+
Return all active sessions found on disk.
|
|
162
|
+
|
|
163
|
+
Uses caching to avoid repeated file I/O. The cache is invalidated
|
|
164
|
+
automatically when sessions are created, ended, or marked as stale.
|
|
165
|
+
"""
|
|
166
|
+
if self._sessions_cache_dirty or self._active_sessions_cache is None:
|
|
167
|
+
self._active_sessions_cache = [
|
|
168
|
+
s for s in self.session_converter.load_all() if s.status == "active"
|
|
169
|
+
]
|
|
170
|
+
self._sessions_cache_dirty = False
|
|
171
|
+
return self._active_sessions_cache
|
|
120
172
|
|
|
121
|
-
def _choose_canonical_active_session(
|
|
173
|
+
def _choose_canonical_active_session(
|
|
174
|
+
self, sessions: list[Session]
|
|
175
|
+
) -> Session | None:
|
|
122
176
|
"""Choose a stable 'canonical' session when multiple are active."""
|
|
123
177
|
if not sessions:
|
|
124
178
|
return None
|
|
@@ -132,11 +186,12 @@ class SessionManager:
|
|
|
132
186
|
"""Mark a session as stale (kept for history but not considered active)."""
|
|
133
187
|
if session.status != "active":
|
|
134
188
|
return
|
|
135
|
-
now = datetime.now()
|
|
189
|
+
now = datetime.now(timezone.utc)
|
|
136
190
|
session.status = "stale"
|
|
137
191
|
session.ended_at = now
|
|
138
192
|
session.last_activity = now
|
|
139
193
|
self.session_converter.save(session)
|
|
194
|
+
self._sessions_cache_dirty = True
|
|
140
195
|
|
|
141
196
|
def normalize_active_sessions(self) -> dict[str, int]:
|
|
142
197
|
"""
|
|
@@ -170,26 +225,32 @@ class SessionManager:
|
|
|
170
225
|
def start_session(
|
|
171
226
|
self,
|
|
172
227
|
session_id: str | None = None,
|
|
173
|
-
agent: str =
|
|
228
|
+
agent: str | None = None,
|
|
174
229
|
is_subagent: bool = False,
|
|
175
230
|
continued_from: str | None = None,
|
|
176
231
|
start_commit: str | None = None,
|
|
177
232
|
title: str | None = None,
|
|
233
|
+
parent_session_id: str | None = None,
|
|
178
234
|
) -> Session:
|
|
179
235
|
"""
|
|
180
236
|
Start a new session.
|
|
181
237
|
|
|
182
238
|
Args:
|
|
183
239
|
session_id: Unique session identifier (auto-generated if None)
|
|
184
|
-
agent: Agent name (
|
|
240
|
+
agent: Agent name (auto-detected if None)
|
|
185
241
|
is_subagent: True if this is a Task subagent
|
|
186
242
|
continued_from: Previous session ID if continuing
|
|
187
243
|
start_commit: Git commit hash at session start
|
|
188
244
|
title: Optional human-readable title
|
|
245
|
+
parent_session_id: ID of parent session (for subagents)
|
|
189
246
|
|
|
190
247
|
Returns:
|
|
191
248
|
New Session instance
|
|
192
249
|
"""
|
|
250
|
+
# Auto-detect agent if not provided
|
|
251
|
+
if agent is None:
|
|
252
|
+
agent = detect_agent_name()
|
|
253
|
+
|
|
193
254
|
now = datetime.now()
|
|
194
255
|
|
|
195
256
|
# Auto-generate collision-resistant session ID if not provided
|
|
@@ -209,6 +270,7 @@ class SessionManager:
|
|
|
209
270
|
if title and not existing.title:
|
|
210
271
|
existing.title = title
|
|
211
272
|
self.session_converter.save(existing)
|
|
273
|
+
self._sessions_cache_dirty = True
|
|
212
274
|
self._active_session = existing
|
|
213
275
|
return existing
|
|
214
276
|
|
|
@@ -220,7 +282,8 @@ class SessionManager:
|
|
|
220
282
|
# The session will only end when the Stop hook is called (process terminates).
|
|
221
283
|
if not is_subagent:
|
|
222
284
|
active_sessions = [
|
|
223
|
-
s
|
|
285
|
+
s
|
|
286
|
+
for s in self._list_active_sessions()
|
|
224
287
|
if (not s.is_subagent) and s.agent == agent
|
|
225
288
|
]
|
|
226
289
|
canonical = self._choose_canonical_active_session(active_sessions)
|
|
@@ -231,6 +294,7 @@ class SessionManager:
|
|
|
231
294
|
self._active_session = canonical
|
|
232
295
|
canonical.last_activity = now # Update activity timestamp
|
|
233
296
|
self.session_converter.save(canonical)
|
|
297
|
+
self._sessions_cache_dirty = True
|
|
234
298
|
return canonical
|
|
235
299
|
|
|
236
300
|
# If we're truly starting a new session (different commit), mark old sessions as stale.
|
|
@@ -247,21 +311,273 @@ class SessionManager:
|
|
|
247
311
|
started_at=now,
|
|
248
312
|
last_activity=now,
|
|
249
313
|
title=title or "",
|
|
314
|
+
parent_session=parent_session_id,
|
|
250
315
|
)
|
|
251
316
|
|
|
252
317
|
# Add session start event
|
|
253
|
-
session.add_activity(
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
318
|
+
session.add_activity(
|
|
319
|
+
ActivityEntry(
|
|
320
|
+
tool="SessionStart",
|
|
321
|
+
summary="Session started",
|
|
322
|
+
timestamp=now,
|
|
323
|
+
)
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
# Set parent session in environment for subsequent subprocesses (e.g. HeadlessSpawner)
|
|
327
|
+
# This ensures that any tools spawned by this session link back to it
|
|
328
|
+
import os
|
|
329
|
+
|
|
330
|
+
os.environ["HTMLGRAPH_PARENT_SESSION"] = session.id
|
|
258
331
|
|
|
259
332
|
# Save to disk
|
|
260
333
|
self.session_converter.save(session)
|
|
334
|
+
self._sessions_cache_dirty = True
|
|
261
335
|
self._active_session = session
|
|
262
336
|
|
|
337
|
+
# Complete any lingering transition spikes from previous conversations
|
|
338
|
+
# This marks the end of the previous conversation's transition period
|
|
339
|
+
self._complete_transition_spikes_on_conversation_start(session.agent)
|
|
340
|
+
|
|
341
|
+
# Auto-create session-init spike for transitional activities
|
|
342
|
+
self._create_session_init_spike(session)
|
|
343
|
+
|
|
263
344
|
return session
|
|
264
345
|
|
|
346
|
+
def _create_session_init_spike(self, session: Session) -> Node | None:
|
|
347
|
+
"""
|
|
348
|
+
Auto-create a session-init spike to catch pre-feature activities.
|
|
349
|
+
|
|
350
|
+
This spike captures work done before the first feature is started:
|
|
351
|
+
- Session startup, reviewing context
|
|
352
|
+
- Planning what to work on
|
|
353
|
+
- General exploration
|
|
354
|
+
|
|
355
|
+
The spike auto-completes when the first feature is started.
|
|
356
|
+
"""
|
|
357
|
+
from htmlgraph.converter import NodeConverter
|
|
358
|
+
|
|
359
|
+
spike_id = f"spike-init-{session.id[:8]}"
|
|
360
|
+
|
|
361
|
+
# Check if spike already exists (idempotency)
|
|
362
|
+
spike_converter = NodeConverter(self.graph_dir / "spikes")
|
|
363
|
+
existing = spike_converter.load(spike_id)
|
|
364
|
+
if existing:
|
|
365
|
+
# Add to index if it's still active
|
|
366
|
+
if existing.status == "in-progress":
|
|
367
|
+
self._active_auto_spikes.add(existing.id)
|
|
368
|
+
self._spike_index.add(existing.id, "session-init", session.id)
|
|
369
|
+
return existing
|
|
370
|
+
|
|
371
|
+
# Create session-init spike
|
|
372
|
+
spike = Node(
|
|
373
|
+
id=spike_id,
|
|
374
|
+
title=f"Session Init: {session.agent}",
|
|
375
|
+
type="spike",
|
|
376
|
+
status="in-progress",
|
|
377
|
+
priority="low",
|
|
378
|
+
spike_subtype="session-init",
|
|
379
|
+
auto_generated=True,
|
|
380
|
+
session_id=session.id,
|
|
381
|
+
model_name=session.agent, # Store agent name as model
|
|
382
|
+
content="Auto-generated spike for session startup activities.\n\nCaptures work before first feature is started:\n- Context review\n- Planning\n- Exploration\n\nAuto-completes when first feature is claimed.",
|
|
383
|
+
)
|
|
384
|
+
|
|
385
|
+
# Save spike
|
|
386
|
+
spike_converter.save(spike)
|
|
387
|
+
|
|
388
|
+
# Add to active auto-spikes index (both in-memory and persistent)
|
|
389
|
+
self._active_auto_spikes.add(spike.id)
|
|
390
|
+
self._spike_index.add(spike.id, "session-init", session.id)
|
|
391
|
+
|
|
392
|
+
# Link session to spike
|
|
393
|
+
if spike.id not in session.worked_on:
|
|
394
|
+
session.worked_on.append(spike.id)
|
|
395
|
+
self.session_converter.save(session)
|
|
396
|
+
|
|
397
|
+
return spike
|
|
398
|
+
|
|
399
|
+
def _create_transition_spike(
|
|
400
|
+
self, session: Session, from_feature_id: str
|
|
401
|
+
) -> Node | None:
|
|
402
|
+
"""
|
|
403
|
+
Auto-create a transition spike after feature completion.
|
|
404
|
+
|
|
405
|
+
This spike captures work done between features:
|
|
406
|
+
- Post-completion cleanup
|
|
407
|
+
- Review and planning
|
|
408
|
+
- Context switching
|
|
409
|
+
|
|
410
|
+
The spike auto-completes when the next feature is started.
|
|
411
|
+
"""
|
|
412
|
+
from htmlgraph.converter import NodeConverter
|
|
413
|
+
|
|
414
|
+
spike_id = generate_id(node_type="spike", title="transition")
|
|
415
|
+
|
|
416
|
+
# Create transition spike
|
|
417
|
+
spike = Node(
|
|
418
|
+
id=spike_id,
|
|
419
|
+
title=f"Transition from {from_feature_id[:12]}",
|
|
420
|
+
type="spike",
|
|
421
|
+
status="in-progress",
|
|
422
|
+
priority="low",
|
|
423
|
+
spike_subtype="transition",
|
|
424
|
+
auto_generated=True,
|
|
425
|
+
session_id=session.id,
|
|
426
|
+
from_feature_id=from_feature_id,
|
|
427
|
+
model_name=session.agent,
|
|
428
|
+
content=f"Auto-generated transition spike.\n\nCaptures post-completion activities:\n- Cleanup and review\n- Planning next work\n- Context switching\n\nFrom: {from_feature_id}\nAuto-completes when next feature is started.",
|
|
429
|
+
)
|
|
430
|
+
|
|
431
|
+
# Save spike
|
|
432
|
+
spike_converter = NodeConverter(self.graph_dir / "spikes")
|
|
433
|
+
spike_converter.save(spike)
|
|
434
|
+
|
|
435
|
+
# Add to active auto-spikes index (both in-memory and persistent)
|
|
436
|
+
self._active_auto_spikes.add(spike.id)
|
|
437
|
+
self._spike_index.add(spike.id, "transition", session.id)
|
|
438
|
+
|
|
439
|
+
# Link session to spike
|
|
440
|
+
if spike.id not in session.worked_on:
|
|
441
|
+
session.worked_on.append(spike.id)
|
|
442
|
+
self.session_converter.save(session)
|
|
443
|
+
|
|
444
|
+
return spike
|
|
445
|
+
|
|
446
|
+
def _complete_transition_spikes_on_conversation_start(
|
|
447
|
+
self, agent: str
|
|
448
|
+
) -> list[Node]:
|
|
449
|
+
"""
|
|
450
|
+
Complete transition spikes from previous conversations when a new conversation starts.
|
|
451
|
+
|
|
452
|
+
This implements the state management pattern:
|
|
453
|
+
1. Work item completes → creates transition spike
|
|
454
|
+
2. New conversation starts → completes previous transition spike
|
|
455
|
+
3. New work item starts → completes session-init spike
|
|
456
|
+
|
|
457
|
+
Args:
|
|
458
|
+
agent: Agent starting the new conversation
|
|
459
|
+
|
|
460
|
+
Returns:
|
|
461
|
+
List of completed transition spikes
|
|
462
|
+
"""
|
|
463
|
+
from htmlgraph.converter import NodeConverter
|
|
464
|
+
|
|
465
|
+
spike_converter = NodeConverter(self.graph_dir / "spikes")
|
|
466
|
+
completed_spikes = []
|
|
467
|
+
|
|
468
|
+
# Complete only TRANSITION spikes (not session-init, which should persist)
|
|
469
|
+
for spike_id in list(self._active_auto_spikes):
|
|
470
|
+
spike = spike_converter.load(spike_id)
|
|
471
|
+
|
|
472
|
+
if not spike:
|
|
473
|
+
self._active_auto_spikes.discard(spike_id)
|
|
474
|
+
self._spike_index.remove(spike_id)
|
|
475
|
+
continue
|
|
476
|
+
|
|
477
|
+
# Only complete transition spikes on conversation start
|
|
478
|
+
if not (
|
|
479
|
+
spike.type == "spike"
|
|
480
|
+
and getattr(spike, "auto_generated", False)
|
|
481
|
+
and getattr(spike, "spike_subtype", None) == "transition"
|
|
482
|
+
and spike.status == "in-progress"
|
|
483
|
+
):
|
|
484
|
+
continue
|
|
485
|
+
|
|
486
|
+
# Complete the transition spike
|
|
487
|
+
spike.status = "done"
|
|
488
|
+
spike.updated = datetime.now()
|
|
489
|
+
spike.properties["completed_by"] = "conversation-start"
|
|
490
|
+
|
|
491
|
+
spike_converter.save(spike)
|
|
492
|
+
completed_spikes.append(spike)
|
|
493
|
+
self._active_auto_spikes.discard(spike_id)
|
|
494
|
+
self._spike_index.remove(spike_id)
|
|
495
|
+
|
|
496
|
+
logger.debug(f"Completed transition spike {spike_id} on conversation start")
|
|
497
|
+
|
|
498
|
+
return completed_spikes
|
|
499
|
+
|
|
500
|
+
def _complete_active_auto_spikes(
|
|
501
|
+
self, agent: str, to_feature_id: str
|
|
502
|
+
) -> list[Node]:
|
|
503
|
+
"""
|
|
504
|
+
Auto-complete any active auto-generated spikes when a feature starts.
|
|
505
|
+
|
|
506
|
+
When starting a regular feature, the transitional period is over,
|
|
507
|
+
so we complete session-init and transition spikes.
|
|
508
|
+
|
|
509
|
+
Args:
|
|
510
|
+
agent: Agent starting the feature
|
|
511
|
+
to_feature_id: Feature being started
|
|
512
|
+
|
|
513
|
+
Returns:
|
|
514
|
+
List of completed spikes
|
|
515
|
+
"""
|
|
516
|
+
from htmlgraph.converter import NodeConverter
|
|
517
|
+
|
|
518
|
+
spike_converter = NodeConverter(self.graph_dir / "spikes")
|
|
519
|
+
completed_spikes = []
|
|
520
|
+
|
|
521
|
+
# Only load spikes we know are active from the index
|
|
522
|
+
# This avoids the expensive load_all() operation
|
|
523
|
+
for spike_id in list(self._active_auto_spikes):
|
|
524
|
+
spike = spike_converter.load(spike_id)
|
|
525
|
+
|
|
526
|
+
# Safety check: verify it's actually an active auto-spike
|
|
527
|
+
if not spike:
|
|
528
|
+
# Spike was deleted or doesn't exist - remove from index
|
|
529
|
+
self._active_auto_spikes.discard(spike_id)
|
|
530
|
+
self._spike_index.remove(spike_id)
|
|
531
|
+
continue
|
|
532
|
+
|
|
533
|
+
if not (
|
|
534
|
+
spike.type == "spike"
|
|
535
|
+
and spike.auto_generated
|
|
536
|
+
and spike.spike_subtype
|
|
537
|
+
in ("session-init", "transition", "conversation-init")
|
|
538
|
+
and spike.status == "in-progress"
|
|
539
|
+
):
|
|
540
|
+
# Spike is no longer active - remove from index
|
|
541
|
+
self._active_auto_spikes.discard(spike_id)
|
|
542
|
+
self._spike_index.remove(spike_id)
|
|
543
|
+
continue
|
|
544
|
+
|
|
545
|
+
# Complete the spike
|
|
546
|
+
spike.status = "done"
|
|
547
|
+
spike.updated = datetime.now()
|
|
548
|
+
spike.to_feature_id = (
|
|
549
|
+
to_feature_id # Record what feature we transitioned to
|
|
550
|
+
)
|
|
551
|
+
|
|
552
|
+
spike_converter.save(spike)
|
|
553
|
+
completed_spikes.append(spike)
|
|
554
|
+
|
|
555
|
+
# Remove from active index (both in-memory and persistent) since it's now completed
|
|
556
|
+
self._active_auto_spikes.discard(spike_id)
|
|
557
|
+
self._spike_index.remove(spike_id)
|
|
558
|
+
|
|
559
|
+
# Import transcript when auto-spikes complete (work boundary)
|
|
560
|
+
if completed_spikes:
|
|
561
|
+
session = self.get_active_session(agent=agent)
|
|
562
|
+
if session and session.transcript_id:
|
|
563
|
+
try:
|
|
564
|
+
from htmlgraph.transcript import TranscriptReader
|
|
565
|
+
|
|
566
|
+
reader = TranscriptReader()
|
|
567
|
+
transcript = reader.read_session(session.transcript_id)
|
|
568
|
+
if transcript:
|
|
569
|
+
self.import_transcript_events(
|
|
570
|
+
session_id=session.id,
|
|
571
|
+
transcript_session=transcript,
|
|
572
|
+
overwrite=True,
|
|
573
|
+
)
|
|
574
|
+
except Exception as e:
|
|
575
|
+
logger.warning(
|
|
576
|
+
f"Failed to import transcript events on auto-spike completion: {e}"
|
|
577
|
+
)
|
|
578
|
+
|
|
579
|
+
return completed_spikes
|
|
580
|
+
|
|
265
581
|
def get_session(self, session_id: str) -> Session | None:
|
|
266
582
|
"""Get a session by ID."""
|
|
267
583
|
if self._active_session and self._active_session.id == session_id:
|
|
@@ -276,7 +592,7 @@ class SessionManager:
|
|
|
276
592
|
if not sessions:
|
|
277
593
|
return None
|
|
278
594
|
|
|
279
|
-
def sort_key(session: Session):
|
|
595
|
+
def sort_key(session: Session) -> datetime:
|
|
280
596
|
if session.ended_at:
|
|
281
597
|
return session.ended_at
|
|
282
598
|
if session.last_activity:
|
|
@@ -422,16 +738,19 @@ class SessionManager:
|
|
|
422
738
|
session.blockers = blockers
|
|
423
739
|
|
|
424
740
|
session.end()
|
|
425
|
-
session.add_activity(
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
741
|
+
session.add_activity(
|
|
742
|
+
ActivityEntry(
|
|
743
|
+
tool="SessionEnd",
|
|
744
|
+
summary="Session ended",
|
|
745
|
+
timestamp=datetime.now(timezone.utc),
|
|
746
|
+
)
|
|
747
|
+
)
|
|
430
748
|
|
|
431
749
|
# Release all features claimed by this session
|
|
432
750
|
self.release_session_features(session_id)
|
|
433
751
|
|
|
434
752
|
self.session_converter.save(session)
|
|
753
|
+
self._sessions_cache_dirty = True
|
|
435
754
|
|
|
436
755
|
if self._active_session and self._active_session.id == session_id:
|
|
437
756
|
self._active_session = None
|
|
@@ -462,15 +781,169 @@ class SessionManager:
|
|
|
462
781
|
updated = True
|
|
463
782
|
|
|
464
783
|
if updated:
|
|
465
|
-
session.add_activity(
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
784
|
+
session.add_activity(
|
|
785
|
+
ActivityEntry(
|
|
786
|
+
tool="SessionHandoff",
|
|
787
|
+
summary="Session handoff updated",
|
|
788
|
+
timestamp=datetime.now(),
|
|
789
|
+
)
|
|
790
|
+
)
|
|
470
791
|
self.session_converter.save(session)
|
|
471
792
|
|
|
472
793
|
return session
|
|
473
794
|
|
|
795
|
+
def continue_from_last(
|
|
796
|
+
self,
|
|
797
|
+
agent: str | None = None,
|
|
798
|
+
auto_create_session: bool = True,
|
|
799
|
+
) -> tuple[Session | None, Any]: # Returns (new_session, resume_info)
|
|
800
|
+
"""
|
|
801
|
+
Continue work from the last completed session.
|
|
802
|
+
|
|
803
|
+
Loads context from the previous session including:
|
|
804
|
+
- Handoff notes and next focus
|
|
805
|
+
- Blockers
|
|
806
|
+
- Recommended context files
|
|
807
|
+
- Recent commits
|
|
808
|
+
- Features worked on
|
|
809
|
+
|
|
810
|
+
Args:
|
|
811
|
+
agent: Filter by agent (None = current agent)
|
|
812
|
+
auto_create_session: Create new session if True
|
|
813
|
+
|
|
814
|
+
Returns:
|
|
815
|
+
Tuple of (new_session, resume_info) or (None, None) if no previous session
|
|
816
|
+
|
|
817
|
+
Example:
|
|
818
|
+
>>> manager = SessionManager(".htmlgraph")
|
|
819
|
+
>>> new_session, resume = manager.continue_from_last(agent="claude")
|
|
820
|
+
>>> if resume:
|
|
821
|
+
... print(resume.summary)
|
|
822
|
+
... print(resume.recommended_files)
|
|
823
|
+
"""
|
|
824
|
+
# Import handoff module
|
|
825
|
+
from typing import Any
|
|
826
|
+
|
|
827
|
+
from htmlgraph.sessions.handoff import SessionResume
|
|
828
|
+
|
|
829
|
+
# Create a minimal SDK-like object with just the directory
|
|
830
|
+
# to avoid circular dependency and database initialization issues
|
|
831
|
+
class MinimalSDK:
|
|
832
|
+
def __init__(self, directory: Path) -> None:
|
|
833
|
+
self._directory = directory
|
|
834
|
+
|
|
835
|
+
sdk: Any = MinimalSDK(self.graph_dir)
|
|
836
|
+
resume = SessionResume(sdk)
|
|
837
|
+
|
|
838
|
+
# Get last session
|
|
839
|
+
last_session = resume.get_last_session(agent=agent)
|
|
840
|
+
if not last_session:
|
|
841
|
+
return None, None
|
|
842
|
+
|
|
843
|
+
# Build resume info
|
|
844
|
+
resume_info = resume.build_resume_info(last_session)
|
|
845
|
+
|
|
846
|
+
# Create new session if requested
|
|
847
|
+
new_session = None
|
|
848
|
+
if auto_create_session:
|
|
849
|
+
from htmlgraph.ids import generate_id
|
|
850
|
+
|
|
851
|
+
session_id = generate_id("sess")
|
|
852
|
+
new_session = self.start_session(
|
|
853
|
+
session_id=session_id,
|
|
854
|
+
agent=agent or last_session.agent,
|
|
855
|
+
title=f"Continuing from {last_session.id}",
|
|
856
|
+
)
|
|
857
|
+
|
|
858
|
+
# Link to previous session
|
|
859
|
+
new_session.continued_from = last_session.id
|
|
860
|
+
self.session_converter.save(new_session)
|
|
861
|
+
|
|
862
|
+
return new_session, resume_info
|
|
863
|
+
|
|
864
|
+
def end_session_with_handoff(
|
|
865
|
+
self,
|
|
866
|
+
session_id: str,
|
|
867
|
+
summary: str | None = None,
|
|
868
|
+
next_focus: str | None = None,
|
|
869
|
+
blockers: list[str] | None = None,
|
|
870
|
+
keep_context: list[str] | None = None,
|
|
871
|
+
auto_recommend_context: bool = True,
|
|
872
|
+
) -> Session | None:
|
|
873
|
+
"""
|
|
874
|
+
End session with handoff information for next session.
|
|
875
|
+
|
|
876
|
+
Args:
|
|
877
|
+
session_id: Session to end
|
|
878
|
+
summary: What was accomplished (handoff notes)
|
|
879
|
+
next_focus: What should be done next
|
|
880
|
+
blockers: List of blockers preventing progress
|
|
881
|
+
keep_context: List of files to keep context for
|
|
882
|
+
auto_recommend_context: Auto-recommend files from git history
|
|
883
|
+
|
|
884
|
+
Returns:
|
|
885
|
+
Updated session or None
|
|
886
|
+
|
|
887
|
+
Example:
|
|
888
|
+
>>> manager.end_session_with_handoff(
|
|
889
|
+
... session_id="sess-123",
|
|
890
|
+
... summary="Completed OAuth integration",
|
|
891
|
+
... next_focus="Implement JWT token refresh",
|
|
892
|
+
... blockers=["Waiting for security review"],
|
|
893
|
+
... keep_context=["src/auth/oauth.py"]
|
|
894
|
+
... )
|
|
895
|
+
"""
|
|
896
|
+
from htmlgraph.sessions.handoff import (
|
|
897
|
+
ContextRecommender,
|
|
898
|
+
HandoffBuilder,
|
|
899
|
+
)
|
|
900
|
+
|
|
901
|
+
# Get session
|
|
902
|
+
session = self.get_session(session_id)
|
|
903
|
+
if not session:
|
|
904
|
+
return None
|
|
905
|
+
|
|
906
|
+
# Build handoff using HandoffBuilder
|
|
907
|
+
builder = HandoffBuilder(session)
|
|
908
|
+
|
|
909
|
+
if summary:
|
|
910
|
+
builder.add_summary(summary)
|
|
911
|
+
|
|
912
|
+
if next_focus:
|
|
913
|
+
builder.add_next_focus(next_focus)
|
|
914
|
+
|
|
915
|
+
if blockers:
|
|
916
|
+
builder.add_blockers(blockers)
|
|
917
|
+
|
|
918
|
+
if keep_context:
|
|
919
|
+
builder.add_context_files(keep_context)
|
|
920
|
+
|
|
921
|
+
# Auto-recommend context files
|
|
922
|
+
if auto_recommend_context:
|
|
923
|
+
recommender = ContextRecommender()
|
|
924
|
+
builder.auto_recommend_context(recommender, max_files=10)
|
|
925
|
+
|
|
926
|
+
handoff_data = builder.build()
|
|
927
|
+
|
|
928
|
+
# Update session with handoff data
|
|
929
|
+
session.handoff_notes = handoff_data["handoff_notes"]
|
|
930
|
+
session.recommended_next = handoff_data["recommended_next"]
|
|
931
|
+
session.blockers = handoff_data["blockers"]
|
|
932
|
+
session.recommended_context = handoff_data["recommended_context"]
|
|
933
|
+
|
|
934
|
+
# Persist handoff data to database before ending session
|
|
935
|
+
self.session_converter.save(session)
|
|
936
|
+
|
|
937
|
+
# End the session
|
|
938
|
+
self.end_session(session_id)
|
|
939
|
+
|
|
940
|
+
# Track handoff effectiveness (optional - only if database available)
|
|
941
|
+
# Note: SessionManager doesn't have direct database access,
|
|
942
|
+
# handoff tracking is primarily done through SDK
|
|
943
|
+
# Users should use SDK.end_session_with_handoff() for full tracking
|
|
944
|
+
|
|
945
|
+
return session
|
|
946
|
+
|
|
474
947
|
def release_session_features(self, session_id: str) -> list[str]:
|
|
475
948
|
"""
|
|
476
949
|
Release all features claimed by a specific session.
|
|
@@ -481,18 +954,127 @@ class SessionManager:
|
|
|
481
954
|
Returns:
|
|
482
955
|
List of released feature IDs
|
|
483
956
|
"""
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
957
|
+
return self.claiming_service.release_session_features(session_id)
|
|
958
|
+
|
|
959
|
+
def log_error(
|
|
960
|
+
self,
|
|
961
|
+
session_id: str,
|
|
962
|
+
error: Exception,
|
|
963
|
+
traceback_str: str,
|
|
964
|
+
context: dict[str, Any] | None = None,
|
|
965
|
+
) -> None:
|
|
966
|
+
"""
|
|
967
|
+
Log error with full traceback to session.
|
|
968
|
+
|
|
969
|
+
Stores complete error details for later retrieval via debug command.
|
|
970
|
+
Minimizes console output for better token efficiency.
|
|
971
|
+
|
|
972
|
+
Args:
|
|
973
|
+
session_id: Session ID to log error to
|
|
974
|
+
error: The exception object
|
|
975
|
+
traceback_str: Full traceback string
|
|
976
|
+
context: Optional context dict (e.g. current file, line number)
|
|
977
|
+
"""
|
|
978
|
+
session = self.get_session(session_id)
|
|
979
|
+
if not session:
|
|
980
|
+
return
|
|
981
|
+
|
|
982
|
+
error_entry = ErrorEntry(
|
|
983
|
+
timestamp=datetime.now(),
|
|
984
|
+
error_type=error.__class__.__name__,
|
|
985
|
+
message=str(error),
|
|
986
|
+
traceback=traceback_str,
|
|
987
|
+
)
|
|
988
|
+
|
|
989
|
+
# Append error record to error_log
|
|
990
|
+
session.error_log.append(error_entry)
|
|
991
|
+
|
|
992
|
+
# Save updated session
|
|
993
|
+
self.session_converter.save(session)
|
|
994
|
+
|
|
995
|
+
def get_session_errors(self, session_id: str) -> list[dict[str, Any]]:
|
|
996
|
+
"""
|
|
997
|
+
Retrieve all errors logged for a session.
|
|
998
|
+
|
|
999
|
+
Args:
|
|
1000
|
+
session_id: Session ID
|
|
1001
|
+
|
|
1002
|
+
Returns:
|
|
1003
|
+
List of error records, or empty list if none
|
|
1004
|
+
"""
|
|
1005
|
+
session = self.get_session(session_id)
|
|
1006
|
+
if not session:
|
|
1007
|
+
return []
|
|
1008
|
+
return [error.model_dump() for error in session.error_log]
|
|
1009
|
+
|
|
1010
|
+
def search_errors(
|
|
1011
|
+
self,
|
|
1012
|
+
session_id: str,
|
|
1013
|
+
error_type: str | None = None,
|
|
1014
|
+
pattern: str | None = None,
|
|
1015
|
+
) -> list[dict[str, Any]]:
|
|
1016
|
+
"""
|
|
1017
|
+
Search errors in a session by type and/or pattern.
|
|
1018
|
+
|
|
1019
|
+
Args:
|
|
1020
|
+
session_id: Session ID to search
|
|
1021
|
+
error_type: Filter by exception type (e.g., "ValueError")
|
|
1022
|
+
pattern: Regex pattern to match in error message
|
|
1023
|
+
|
|
1024
|
+
Returns:
|
|
1025
|
+
List of matching error records
|
|
1026
|
+
"""
|
|
1027
|
+
session = self.get_session(session_id)
|
|
1028
|
+
if not session:
|
|
1029
|
+
return []
|
|
1030
|
+
|
|
1031
|
+
errors = [error.model_dump() for error in session.error_log]
|
|
1032
|
+
|
|
1033
|
+
# Filter by error type
|
|
1034
|
+
if error_type:
|
|
1035
|
+
errors = [e for e in errors if e.get("error_type") == error_type]
|
|
1036
|
+
|
|
1037
|
+
# Filter by pattern in message
|
|
1038
|
+
if pattern:
|
|
1039
|
+
compiled_pattern = re.compile(pattern, re.IGNORECASE)
|
|
1040
|
+
errors = [
|
|
1041
|
+
e for e in errors if compiled_pattern.search(e.get("message", ""))
|
|
1042
|
+
]
|
|
1043
|
+
|
|
1044
|
+
return errors
|
|
1045
|
+
|
|
1046
|
+
def get_error_summary(self, session_id: str) -> dict[str, Any]:
|
|
1047
|
+
"""
|
|
1048
|
+
Get summary statistics of errors in a session.
|
|
1049
|
+
|
|
1050
|
+
Args:
|
|
1051
|
+
session_id: Session ID
|
|
1052
|
+
|
|
1053
|
+
Returns:
|
|
1054
|
+
Dictionary with error summary statistics
|
|
1055
|
+
"""
|
|
1056
|
+
session = self.get_session(session_id)
|
|
1057
|
+
if not session or not session.error_log:
|
|
1058
|
+
return {
|
|
1059
|
+
"total_errors": 0,
|
|
1060
|
+
"error_types": {},
|
|
1061
|
+
"first_error": None,
|
|
1062
|
+
"last_error": None,
|
|
1063
|
+
}
|
|
1064
|
+
|
|
1065
|
+
errors = session.error_log
|
|
1066
|
+
error_types: dict[str, int] = {}
|
|
1067
|
+
|
|
1068
|
+
for error in errors:
|
|
1069
|
+
error_type = error.error_type
|
|
1070
|
+
error_types[error_type] = error_types.get(error_type, 0) + 1
|
|
1071
|
+
|
|
1072
|
+
return {
|
|
1073
|
+
"total_errors": len(errors),
|
|
1074
|
+
"error_types": error_types,
|
|
1075
|
+
"first_error": errors[0].model_dump() if errors else None,
|
|
1076
|
+
"last_error": errors[-1].model_dump() if errors else None,
|
|
1077
|
+
}
|
|
496
1078
|
|
|
497
1079
|
# =========================================================================
|
|
498
1080
|
# Activity Tracking
|
|
@@ -527,7 +1109,7 @@ class SessionManager:
|
|
|
527
1109
|
"""
|
|
528
1110
|
session = self.get_session(session_id)
|
|
529
1111
|
if not session:
|
|
530
|
-
raise
|
|
1112
|
+
raise SessionNotFoundError(session_id)
|
|
531
1113
|
|
|
532
1114
|
# Get active features for attribution
|
|
533
1115
|
active_features = self.get_active_features()
|
|
@@ -543,10 +1125,26 @@ class SessionManager:
|
|
|
543
1125
|
# Inherit feature from parent if not explicitly set
|
|
544
1126
|
if not attributed_feature and active_features:
|
|
545
1127
|
# Use primary feature or first active feature
|
|
546
|
-
primary = next(
|
|
547
|
-
|
|
1128
|
+
primary = next(
|
|
1129
|
+
(f for f in active_features if f.properties.get("is_primary")), None
|
|
1130
|
+
)
|
|
1131
|
+
attributed_feature = (
|
|
1132
|
+
(primary or active_features[0]).id if active_features else None
|
|
1133
|
+
)
|
|
548
1134
|
drift_score = None # No drift for child activities
|
|
549
1135
|
attribution_reason = "child_activity"
|
|
1136
|
+
# Skip drift calculation for system overhead activities
|
|
1137
|
+
elif self._is_system_overhead(tool, summary, file_paths or []):
|
|
1138
|
+
# Attribute to primary or first active feature, but no drift score
|
|
1139
|
+
if not attributed_feature and active_features:
|
|
1140
|
+
primary = next(
|
|
1141
|
+
(f for f in active_features if f.properties.get("is_primary")), None
|
|
1142
|
+
)
|
|
1143
|
+
attributed_feature = (
|
|
1144
|
+
(primary or active_features[0]).id if active_features else None
|
|
1145
|
+
)
|
|
1146
|
+
drift_score = None # No drift for system overhead
|
|
1147
|
+
attribution_reason = "system_overhead"
|
|
550
1148
|
elif not attributed_feature and active_features:
|
|
551
1149
|
attribution = self.attribute_activity(
|
|
552
1150
|
tool=tool,
|
|
@@ -563,7 +1161,7 @@ class SessionManager:
|
|
|
563
1161
|
# This ensures multi-agent safety - no race conditions even with parallel agents
|
|
564
1162
|
event_id = generate_id(
|
|
565
1163
|
node_type="event",
|
|
566
|
-
title=f"{tool}:{summary[:50]}" # Include tool + summary for content-addressability
|
|
1164
|
+
title=f"{tool}:{summary[:50]}", # Include tool + summary for content-addressability
|
|
567
1165
|
)
|
|
568
1166
|
|
|
569
1167
|
entry = ActivityEntry(
|
|
@@ -580,35 +1178,43 @@ class SessionManager:
|
|
|
580
1178
|
"file_paths": file_paths,
|
|
581
1179
|
"attribution_reason": attribution_reason,
|
|
582
1180
|
"session_id": session_id, # Include session context in payload
|
|
583
|
-
}
|
|
1181
|
+
}
|
|
1182
|
+
if file_paths or attribution_reason or session_id
|
|
1183
|
+
else payload,
|
|
584
1184
|
)
|
|
585
1185
|
|
|
586
1186
|
# Append to JSONL event log (source of truth for analytics)
|
|
587
1187
|
try:
|
|
588
1188
|
# Auto-infer work type from feature_id (Phase 1: Work Type Classification)
|
|
589
1189
|
from htmlgraph.work_type_utils import infer_work_type_from_id
|
|
1190
|
+
|
|
590
1191
|
work_type = infer_work_type_from_id(entry.feature_id)
|
|
591
1192
|
|
|
592
|
-
self.event_log.append(
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
1193
|
+
self.event_log.append(
|
|
1194
|
+
EventRecord(
|
|
1195
|
+
event_id=entry.id or "",
|
|
1196
|
+
timestamp=entry.timestamp,
|
|
1197
|
+
session_id=session_id,
|
|
1198
|
+
agent=session.agent,
|
|
1199
|
+
tool=entry.tool,
|
|
1200
|
+
summary=entry.summary,
|
|
1201
|
+
success=entry.success,
|
|
1202
|
+
feature_id=entry.feature_id,
|
|
1203
|
+
drift_score=entry.drift_score,
|
|
1204
|
+
start_commit=session.start_commit,
|
|
1205
|
+
continued_from=session.continued_from,
|
|
1206
|
+
work_type=work_type,
|
|
1207
|
+
session_status=session.status,
|
|
1208
|
+
file_paths=file_paths,
|
|
1209
|
+
parent_session_id=session.parent_session,
|
|
1210
|
+
payload=entry.payload
|
|
1211
|
+
if isinstance(entry.payload, dict)
|
|
1212
|
+
else payload,
|
|
1213
|
+
)
|
|
1214
|
+
)
|
|
1215
|
+
except Exception as e:
|
|
610
1216
|
# Never break core tracking because of analytics logging.
|
|
611
|
-
|
|
1217
|
+
logger.warning(f"Failed to append to event log: {e}")
|
|
612
1218
|
|
|
613
1219
|
# Optional: keep SQLite index up to date if it already exists.
|
|
614
1220
|
# This keeps the dashboard fast while keeping Git as the source of truth.
|
|
@@ -619,29 +1225,37 @@ class SessionManager:
|
|
|
619
1225
|
|
|
620
1226
|
idx = AnalyticsIndex(index_path)
|
|
621
1227
|
idx.ensure_schema()
|
|
622
|
-
idx.upsert_session(
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
1228
|
+
idx.upsert_session(
|
|
1229
|
+
{
|
|
1230
|
+
"session_id": session_id,
|
|
1231
|
+
"agent": session.agent,
|
|
1232
|
+
"start_commit": session.start_commit,
|
|
1233
|
+
"continued_from": session.continued_from,
|
|
1234
|
+
"status": session.status,
|
|
1235
|
+
"started_at": session.started_at.isoformat(),
|
|
1236
|
+
"ended_at": session.ended_at.isoformat()
|
|
1237
|
+
if session.ended_at
|
|
1238
|
+
else None,
|
|
1239
|
+
}
|
|
1240
|
+
)
|
|
1241
|
+
idx.upsert_event(
|
|
1242
|
+
{
|
|
1243
|
+
"event_id": entry.id,
|
|
1244
|
+
"timestamp": entry.timestamp.isoformat(),
|
|
1245
|
+
"session_id": session_id,
|
|
1246
|
+
"tool": entry.tool,
|
|
1247
|
+
"summary": entry.summary,
|
|
1248
|
+
"success": entry.success,
|
|
1249
|
+
"feature_id": entry.feature_id,
|
|
1250
|
+
"drift_score": entry.drift_score,
|
|
1251
|
+
"file_paths": file_paths or [],
|
|
1252
|
+
"payload": entry.payload
|
|
1253
|
+
if isinstance(entry.payload, dict)
|
|
1254
|
+
else payload,
|
|
1255
|
+
}
|
|
1256
|
+
)
|
|
1257
|
+
except Exception as e:
|
|
1258
|
+
logger.warning(f"Failed to update SQLite index: {e}")
|
|
645
1259
|
|
|
646
1260
|
# Add to session
|
|
647
1261
|
session.add_activity(entry)
|
|
@@ -690,6 +1304,27 @@ class SessionManager:
|
|
|
690
1304
|
# Smart Attribution
|
|
691
1305
|
# =========================================================================
|
|
692
1306
|
|
|
1307
|
+
def _get_active_auto_spike(self, active_features: list[Node]) -> Node | None:
|
|
1308
|
+
"""
|
|
1309
|
+
Find an active auto-generated spike (session-init, conversation-init, or transition).
|
|
1310
|
+
|
|
1311
|
+
Auto-spikes take precedence over regular features for attribution
|
|
1312
|
+
since they're specifically designed to catch transitional activities.
|
|
1313
|
+
|
|
1314
|
+
Returns:
|
|
1315
|
+
Active auto-spike or None
|
|
1316
|
+
"""
|
|
1317
|
+
for feature in active_features:
|
|
1318
|
+
if (
|
|
1319
|
+
feature.type == "spike"
|
|
1320
|
+
and feature.auto_generated
|
|
1321
|
+
and feature.spike_subtype
|
|
1322
|
+
in ("session-init", "conversation-init", "transition")
|
|
1323
|
+
and feature.status == "in-progress"
|
|
1324
|
+
):
|
|
1325
|
+
return feature
|
|
1326
|
+
return None
|
|
1327
|
+
|
|
693
1328
|
def attribute_activity(
|
|
694
1329
|
self,
|
|
695
1330
|
tool: str,
|
|
@@ -699,7 +1334,9 @@ class SessionManager:
|
|
|
699
1334
|
agent: str | None = None,
|
|
700
1335
|
) -> dict[str, Any]:
|
|
701
1336
|
"""
|
|
702
|
-
Score and attribute an activity to the best matching feature.
|
|
1337
|
+
Score and attribute an activity to the best matching feature or auto-spike.
|
|
1338
|
+
|
|
1339
|
+
Auto-spikes have priority over features for transitional activities.
|
|
703
1340
|
|
|
704
1341
|
Args:
|
|
705
1342
|
tool: Tool name
|
|
@@ -711,6 +1348,18 @@ class SessionManager:
|
|
|
711
1348
|
Returns:
|
|
712
1349
|
Dict with feature_id, score, drift_score, reason
|
|
713
1350
|
"""
|
|
1351
|
+
# Priority 1: Check for active auto-generated spikes (session-init, transition)
|
|
1352
|
+
# These capture transitional activities before features are active
|
|
1353
|
+
active_spike = self._get_active_auto_spike(active_features)
|
|
1354
|
+
if active_spike:
|
|
1355
|
+
return {
|
|
1356
|
+
"feature_id": active_spike.id,
|
|
1357
|
+
"score": 1.0, # Perfect match - spike is designed for this
|
|
1358
|
+
"drift_score": 0.0, # No drift - this is expected
|
|
1359
|
+
"reason": f"auto_spike_{active_spike.spike_subtype}",
|
|
1360
|
+
}
|
|
1361
|
+
|
|
1362
|
+
# Priority 2: Regular feature attribution
|
|
714
1363
|
if not active_features:
|
|
715
1364
|
return {
|
|
716
1365
|
"feature_id": None,
|
|
@@ -831,9 +1480,20 @@ class SessionManager:
|
|
|
831
1480
|
def _extract_keywords(self, text: str) -> set[str]:
|
|
832
1481
|
"""Extract keywords from text."""
|
|
833
1482
|
# Simple keyword extraction - lowercase words > 3 chars
|
|
834
|
-
words = re.findall(r
|
|
1483
|
+
words = re.findall(r"\b[a-zA-Z]{3,}\b", text.lower())
|
|
835
1484
|
# Filter common words
|
|
836
|
-
stop_words = {
|
|
1485
|
+
stop_words = {
|
|
1486
|
+
"the",
|
|
1487
|
+
"and",
|
|
1488
|
+
"for",
|
|
1489
|
+
"with",
|
|
1490
|
+
"this",
|
|
1491
|
+
"that",
|
|
1492
|
+
"from",
|
|
1493
|
+
"are",
|
|
1494
|
+
"was",
|
|
1495
|
+
"were",
|
|
1496
|
+
}
|
|
837
1497
|
return set(words) - stop_words
|
|
838
1498
|
|
|
839
1499
|
def _score_keyword_overlap(self, text: str, keywords: set[str]) -> float:
|
|
@@ -846,6 +1506,139 @@ class SessionManager:
|
|
|
846
1506
|
|
|
847
1507
|
return len(overlap) / len(keywords) if keywords else 0.0
|
|
848
1508
|
|
|
1509
|
+
def _is_system_overhead(
|
|
1510
|
+
self, tool: str, summary: str, file_paths: list[str]
|
|
1511
|
+
) -> bool:
|
|
1512
|
+
"""
|
|
1513
|
+
Determine if an activity is system overhead that shouldn't count as drift.
|
|
1514
|
+
|
|
1515
|
+
System overhead includes:
|
|
1516
|
+
- Skill invocations for system skills (htmlgraph-tracker, etc.)
|
|
1517
|
+
- Read/Write operations on .htmlgraph/ metadata files
|
|
1518
|
+
- Infrastructure files (config, docs, build artifacts, IDE files)
|
|
1519
|
+
"""
|
|
1520
|
+
# System skills that are overhead, not feature work
|
|
1521
|
+
system_skills = {
|
|
1522
|
+
"htmlgraph-tracker",
|
|
1523
|
+
"htmlgraph:htmlgraph-tracker",
|
|
1524
|
+
}
|
|
1525
|
+
|
|
1526
|
+
# Check if this is a Skill invocation for a system skill
|
|
1527
|
+
if tool == "Skill":
|
|
1528
|
+
# Extract skill name from summary (format: "Skill: {'skill': 'htmlgraph-tracker'}")
|
|
1529
|
+
for skill_name in system_skills:
|
|
1530
|
+
if skill_name in summary.lower():
|
|
1531
|
+
return True
|
|
1532
|
+
|
|
1533
|
+
# Infrastructure file patterns to exclude from drift scoring
|
|
1534
|
+
infrastructure_patterns = [
|
|
1535
|
+
# HtmlGraph metadata
|
|
1536
|
+
".htmlgraph/",
|
|
1537
|
+
# Configuration files
|
|
1538
|
+
"pyproject.toml",
|
|
1539
|
+
"package.json",
|
|
1540
|
+
"package-lock.json",
|
|
1541
|
+
"setup.py",
|
|
1542
|
+
"setup.cfg",
|
|
1543
|
+
"requirements.txt",
|
|
1544
|
+
"requirements-dev.txt",
|
|
1545
|
+
".gitignore",
|
|
1546
|
+
".gitattributes",
|
|
1547
|
+
".editorconfig",
|
|
1548
|
+
"pytest.ini",
|
|
1549
|
+
"tox.ini",
|
|
1550
|
+
".coveragerc",
|
|
1551
|
+
# CI/CD configs
|
|
1552
|
+
".github/",
|
|
1553
|
+
".gitlab-ci.yml",
|
|
1554
|
+
".travis.yml",
|
|
1555
|
+
"circle.yml",
|
|
1556
|
+
".pre-commit-config.yaml",
|
|
1557
|
+
# Build and distribution
|
|
1558
|
+
"dist/",
|
|
1559
|
+
"build/",
|
|
1560
|
+
".eggs/",
|
|
1561
|
+
"*.egg-info/",
|
|
1562
|
+
"__pycache__/",
|
|
1563
|
+
"*.pyc",
|
|
1564
|
+
"*.pyo",
|
|
1565
|
+
"*.pyd",
|
|
1566
|
+
# IDE and editor files
|
|
1567
|
+
".vscode/",
|
|
1568
|
+
".idea/",
|
|
1569
|
+
"*.swp",
|
|
1570
|
+
"*.swo",
|
|
1571
|
+
"*~",
|
|
1572
|
+
".DS_Store",
|
|
1573
|
+
"Thumbs.db",
|
|
1574
|
+
# Testing artifacts
|
|
1575
|
+
".pytest_cache/",
|
|
1576
|
+
".coverage",
|
|
1577
|
+
"htmlcov/",
|
|
1578
|
+
".tox/",
|
|
1579
|
+
# Environment and secrets
|
|
1580
|
+
".env",
|
|
1581
|
+
".env.local",
|
|
1582
|
+
".env.*.local",
|
|
1583
|
+
# Documentation (consider docs/ as infrastructure)
|
|
1584
|
+
"README.md",
|
|
1585
|
+
"CONTRIBUTING.md",
|
|
1586
|
+
"LICENSE",
|
|
1587
|
+
"CHANGELOG.md",
|
|
1588
|
+
"docs/",
|
|
1589
|
+
# Other common infrastructure
|
|
1590
|
+
".contextune/",
|
|
1591
|
+
".parallel/",
|
|
1592
|
+
"node_modules/",
|
|
1593
|
+
".venv/",
|
|
1594
|
+
"venv/",
|
|
1595
|
+
]
|
|
1596
|
+
|
|
1597
|
+
# Check if any file paths match infrastructure patterns
|
|
1598
|
+
if file_paths:
|
|
1599
|
+
for path in file_paths:
|
|
1600
|
+
# Normalize path
|
|
1601
|
+
path_normalized = path.replace("\\", "/")
|
|
1602
|
+
path_lower = path_normalized.lower()
|
|
1603
|
+
|
|
1604
|
+
for pattern in infrastructure_patterns:
|
|
1605
|
+
pattern_lower = pattern.lower()
|
|
1606
|
+
|
|
1607
|
+
# Directory patterns (end with /)
|
|
1608
|
+
if pattern_lower.endswith("/"):
|
|
1609
|
+
# For wildcard directory patterns like "*.egg-info/"
|
|
1610
|
+
if "*" in pattern_lower:
|
|
1611
|
+
import fnmatch
|
|
1612
|
+
|
|
1613
|
+
# Check each path segment
|
|
1614
|
+
path_parts = path_lower.split("/")
|
|
1615
|
+
for part in path_parts:
|
|
1616
|
+
if fnmatch.fnmatch(part, pattern_lower.rstrip("/")):
|
|
1617
|
+
return True
|
|
1618
|
+
# For regular directory patterns like ".htmlgraph/"
|
|
1619
|
+
elif pattern_lower in path_lower or path_lower.startswith(
|
|
1620
|
+
pattern_lower
|
|
1621
|
+
):
|
|
1622
|
+
return True
|
|
1623
|
+
# Wildcard file patterns (e.g., *.pyc)
|
|
1624
|
+
elif "*" in pattern_lower:
|
|
1625
|
+
import fnmatch
|
|
1626
|
+
|
|
1627
|
+
# Check the filename (last part of path)
|
|
1628
|
+
filename = path_lower.split("/")[-1]
|
|
1629
|
+
if fnmatch.fnmatch(filename, pattern_lower):
|
|
1630
|
+
return True
|
|
1631
|
+
# Exact filename match
|
|
1632
|
+
else:
|
|
1633
|
+
# Check if path ends with the pattern (handles both absolute and relative)
|
|
1634
|
+
if (
|
|
1635
|
+
path_lower.endswith(pattern_lower)
|
|
1636
|
+
or f"/{pattern_lower}" in path_lower
|
|
1637
|
+
):
|
|
1638
|
+
return True
|
|
1639
|
+
|
|
1640
|
+
return False
|
|
1641
|
+
|
|
849
1642
|
# =========================================================================
|
|
850
1643
|
# Drift Detection
|
|
851
1644
|
# =========================================================================
|
|
@@ -866,12 +1659,15 @@ class SessionManager:
|
|
|
866
1659
|
|
|
867
1660
|
# Get recent activities for this feature
|
|
868
1661
|
feature_activities = [
|
|
869
|
-
a for a in session.activity_log[-20:]
|
|
870
|
-
if a.feature_id == feature_id
|
|
1662
|
+
a for a in session.activity_log[-20:] if a.feature_id == feature_id
|
|
871
1663
|
]
|
|
872
1664
|
|
|
873
1665
|
if not feature_activities:
|
|
874
|
-
return {
|
|
1666
|
+
return {
|
|
1667
|
+
"is_drifting": False,
|
|
1668
|
+
"drift_score": 0,
|
|
1669
|
+
"reasons": ["no_recent_activity"],
|
|
1670
|
+
}
|
|
875
1671
|
|
|
876
1672
|
# 1. Check time since last meaningful progress
|
|
877
1673
|
last_activity = feature_activities[-1]
|
|
@@ -884,7 +1680,7 @@ class SessionManager:
|
|
|
884
1680
|
recent_tools = [a.tool for a in feature_activities[-10:]]
|
|
885
1681
|
if len(recent_tools) >= 6:
|
|
886
1682
|
# Check for repetitive patterns
|
|
887
|
-
tool_counts = {}
|
|
1683
|
+
tool_counts: dict[str, int] = {}
|
|
888
1684
|
for t in recent_tools:
|
|
889
1685
|
tool_counts[t] = tool_counts.get(t, 0) + 1
|
|
890
1686
|
max_repeat = max(tool_counts.values())
|
|
@@ -893,7 +1689,9 @@ class SessionManager:
|
|
|
893
1689
|
reasons.append("repetitive_pattern")
|
|
894
1690
|
|
|
895
1691
|
# 3. Check average drift scores
|
|
896
|
-
drift_scores = [
|
|
1692
|
+
drift_scores = [
|
|
1693
|
+
a.drift_score for a in feature_activities if a.drift_score is not None
|
|
1694
|
+
]
|
|
897
1695
|
if drift_scores:
|
|
898
1696
|
avg_drift = sum(drift_scores) / len(drift_scores)
|
|
899
1697
|
if avg_drift > 0.6:
|
|
@@ -959,12 +1757,29 @@ class SessionManager:
|
|
|
959
1757
|
feature_id=feature_id,
|
|
960
1758
|
payload=payload,
|
|
961
1759
|
)
|
|
962
|
-
except Exception:
|
|
1760
|
+
except Exception as e:
|
|
963
1761
|
# Never break feature ops because of tracking.
|
|
1762
|
+
logger.warning(f"Failed to log work item action ({tool}): {e}")
|
|
964
1763
|
return
|
|
965
1764
|
|
|
966
1765
|
def get_active_features(self) -> list[Node]:
|
|
967
|
-
"""
|
|
1766
|
+
"""
|
|
1767
|
+
Get all features with status 'in-progress'.
|
|
1768
|
+
|
|
1769
|
+
Uses a cache to avoid O(n) disk reads on every tool use.
|
|
1770
|
+
Cache is invalidated when features are started, completed, or released.
|
|
1771
|
+
"""
|
|
1772
|
+
if self._features_cache_dirty or self._active_features_cache is None:
|
|
1773
|
+
self._active_features_cache = self._compute_active_features()
|
|
1774
|
+
self._features_cache_dirty = False
|
|
1775
|
+
return self._active_features_cache
|
|
1776
|
+
|
|
1777
|
+
def _compute_active_features(self) -> list[Node]:
|
|
1778
|
+
"""
|
|
1779
|
+
Compute active features by iterating all features from disk.
|
|
1780
|
+
|
|
1781
|
+
This is the slow path - only called when cache is dirty.
|
|
1782
|
+
"""
|
|
968
1783
|
features = []
|
|
969
1784
|
|
|
970
1785
|
# From features collection
|
|
@@ -1097,7 +1912,9 @@ class SessionManager:
|
|
|
1097
1912
|
# Check WIP limit
|
|
1098
1913
|
active = self.get_active_features()
|
|
1099
1914
|
if len(active) >= self.wip_limit and node not in active:
|
|
1100
|
-
raise ValueError(
|
|
1915
|
+
raise ValueError(
|
|
1916
|
+
f"WIP limit ({self.wip_limit}) reached. Complete existing work first."
|
|
1917
|
+
)
|
|
1101
1918
|
|
|
1102
1919
|
# Auto-claim if starting and not already claimed
|
|
1103
1920
|
if agent and not node.agent_assigned:
|
|
@@ -1111,8 +1928,20 @@ class SessionManager:
|
|
|
1111
1928
|
node.updated = datetime.now()
|
|
1112
1929
|
graph.update(node)
|
|
1113
1930
|
|
|
1931
|
+
# Invalidate active features cache
|
|
1932
|
+
self._features_cache_dirty = True
|
|
1933
|
+
|
|
1934
|
+
# Auto-complete any active auto-spikes (session-init or transition)
|
|
1935
|
+
# When a regular feature starts, transitional period is over
|
|
1936
|
+
if agent:
|
|
1937
|
+
self._complete_active_auto_spikes(agent, to_feature_id=feature_id)
|
|
1938
|
+
|
|
1114
1939
|
# Link feature to active session (bidirectional)
|
|
1115
|
-
active_session =
|
|
1940
|
+
active_session = (
|
|
1941
|
+
self.get_active_session_for_agent(agent)
|
|
1942
|
+
if agent
|
|
1943
|
+
else self.get_active_session()
|
|
1944
|
+
)
|
|
1116
1945
|
if agent and not active_session:
|
|
1117
1946
|
active_session = self._ensure_session_for_agent(agent)
|
|
1118
1947
|
if active_session:
|
|
@@ -1136,6 +1965,7 @@ class SessionManager:
|
|
|
1136
1965
|
*,
|
|
1137
1966
|
agent: str | None = None,
|
|
1138
1967
|
log_activity: bool = True,
|
|
1968
|
+
transcript_id: str | None = None,
|
|
1139
1969
|
) -> Node | None:
|
|
1140
1970
|
"""
|
|
1141
1971
|
Mark a feature as done.
|
|
@@ -1145,6 +1975,8 @@ class SessionManager:
|
|
|
1145
1975
|
collection: Collection name
|
|
1146
1976
|
agent: Optional agent name for attribution/logging
|
|
1147
1977
|
log_activity: If true, write an event record (requires agent)
|
|
1978
|
+
transcript_id: Optional transcript ID (agent session) that implemented this feature.
|
|
1979
|
+
Used to link parallel agent transcripts to features.
|
|
1148
1980
|
|
|
1149
1981
|
Returns:
|
|
1150
1982
|
Updated Node or None
|
|
@@ -1152,22 +1984,100 @@ class SessionManager:
|
|
|
1152
1984
|
graph = self._get_graph(collection)
|
|
1153
1985
|
node = graph.get(feature_id)
|
|
1154
1986
|
if not node:
|
|
1155
|
-
|
|
1987
|
+
# Node might have been created by SDK's collection (different graph instance)
|
|
1988
|
+
# Try reloading from disk
|
|
1989
|
+
node = graph.reload_node(feature_id)
|
|
1990
|
+
if not node:
|
|
1991
|
+
return None
|
|
1156
1992
|
|
|
1157
1993
|
node.status = "done"
|
|
1158
1994
|
node.updated = datetime.now()
|
|
1159
1995
|
node.properties["completed_at"] = datetime.now().isoformat()
|
|
1996
|
+
|
|
1997
|
+
# Link transcript if provided (for parallel agent tracking)
|
|
1998
|
+
if transcript_id:
|
|
1999
|
+
self._link_transcript_to_feature(node, transcript_id, graph)
|
|
2000
|
+
|
|
1160
2001
|
graph.update(node)
|
|
1161
2002
|
|
|
2003
|
+
# Invalidate active features cache
|
|
2004
|
+
self._features_cache_dirty = True
|
|
2005
|
+
|
|
1162
2006
|
if log_activity and agent:
|
|
2007
|
+
# Include transcript_id in payload for traceability
|
|
2008
|
+
payload = {"collection": collection, "action": "complete"}
|
|
2009
|
+
if transcript_id:
|
|
2010
|
+
payload["transcript_id"] = transcript_id
|
|
2011
|
+
|
|
1163
2012
|
self._maybe_log_work_item_action(
|
|
1164
2013
|
agent=agent,
|
|
1165
2014
|
tool="FeatureComplete",
|
|
1166
2015
|
summary=f"Completed: {collection}/{feature_id}",
|
|
1167
2016
|
feature_id=feature_id,
|
|
1168
|
-
payload=
|
|
2017
|
+
payload=payload,
|
|
1169
2018
|
)
|
|
1170
2019
|
|
|
2020
|
+
# Auto-import transcript on work item completion
|
|
2021
|
+
session = self.get_active_session(agent=agent)
|
|
2022
|
+
if session and session.transcript_id:
|
|
2023
|
+
try:
|
|
2024
|
+
from htmlgraph.transcript import TranscriptReader
|
|
2025
|
+
|
|
2026
|
+
reader = TranscriptReader()
|
|
2027
|
+
transcript = reader.read_session(session.transcript_id)
|
|
2028
|
+
if transcript:
|
|
2029
|
+
self.import_transcript_events(
|
|
2030
|
+
session_id=session.id,
|
|
2031
|
+
transcript_session=transcript,
|
|
2032
|
+
overwrite=True, # Replace hook data with high-fidelity transcript
|
|
2033
|
+
)
|
|
2034
|
+
except Exception as e:
|
|
2035
|
+
logger.warning(
|
|
2036
|
+
f"Failed to auto-import transcript on feature completion: {e}"
|
|
2037
|
+
)
|
|
2038
|
+
|
|
2039
|
+
# Auto-create transition spike for post-completion activities
|
|
2040
|
+
# This captures work between features. Completed when next feature starts,
|
|
2041
|
+
# or when a new conversation starts (completing previous conversation's spike).
|
|
2042
|
+
if session:
|
|
2043
|
+
self._create_transition_spike(session, from_feature_id=feature_id)
|
|
2044
|
+
|
|
2045
|
+
# Analyze session for anti-patterns and errors on completion
|
|
2046
|
+
# This surfaces feedback to the orchestrator about mistakes made
|
|
2047
|
+
if session:
|
|
2048
|
+
try:
|
|
2049
|
+
from htmlgraph.learning import LearningPersistence
|
|
2050
|
+
from htmlgraph.sdk import SDK
|
|
2051
|
+
|
|
2052
|
+
# Create SDK instance for analysis (shares same graph directory)
|
|
2053
|
+
sdk = SDK(agent=agent or "unknown", directory=self.graph_dir)
|
|
2054
|
+
learning = LearningPersistence(sdk)
|
|
2055
|
+
analysis = learning.analyze_for_orchestrator(session.id)
|
|
2056
|
+
node.properties["completion_analysis"] = analysis
|
|
2057
|
+
|
|
2058
|
+
# PERSIST learning insights to graph (not just ephemeral properties)
|
|
2059
|
+
# This creates queryable SessionInsight and Pattern nodes
|
|
2060
|
+
insight_id = learning.persist_session_insight(session.id)
|
|
2061
|
+
if insight_id:
|
|
2062
|
+
node.properties["insight_id"] = insight_id
|
|
2063
|
+
logger.debug(f"Persisted learning insight: {insight_id}")
|
|
2064
|
+
|
|
2065
|
+
# Persist patterns detected across sessions
|
|
2066
|
+
pattern_ids = learning.persist_patterns()
|
|
2067
|
+
if pattern_ids:
|
|
2068
|
+
logger.debug(f"Persisted {len(pattern_ids)} patterns")
|
|
2069
|
+
|
|
2070
|
+
# Log analysis summary if issues detected
|
|
2071
|
+
if analysis.get("summary", "").startswith("⚠️"):
|
|
2072
|
+
logger.info(
|
|
2073
|
+
f"Work item {feature_id} completed with issues: {analysis['summary']}"
|
|
2074
|
+
)
|
|
2075
|
+
|
|
2076
|
+
# Update node in graph with analysis
|
|
2077
|
+
graph.update(node)
|
|
2078
|
+
except Exception as e:
|
|
2079
|
+
logger.warning(f"Failed to analyze session on completion: {e}")
|
|
2080
|
+
|
|
1171
2081
|
return node
|
|
1172
2082
|
|
|
1173
2083
|
def set_primary_feature(
|
|
@@ -1335,40 +2245,12 @@ class SessionManager:
|
|
|
1335
2245
|
Returns:
|
|
1336
2246
|
Updated Node or None
|
|
1337
2247
|
"""
|
|
1338
|
-
|
|
1339
|
-
node = graph.get(feature_id)
|
|
1340
|
-
if not node:
|
|
1341
|
-
return None
|
|
1342
|
-
|
|
1343
|
-
# Check if already claimed by someone else
|
|
1344
|
-
if node.agent_assigned and node.agent_assigned != agent:
|
|
1345
|
-
# Check if session that claimed it is still active
|
|
1346
|
-
if node.claimed_by_session:
|
|
1347
|
-
session = self.get_session(node.claimed_by_session)
|
|
1348
|
-
if session and session.status == "active":
|
|
1349
|
-
raise ValueError(
|
|
1350
|
-
f"Feature '{feature_id}' is already claimed by {node.agent_assigned} "
|
|
1351
|
-
f"(session {node.claimed_by_session})"
|
|
1352
|
-
)
|
|
1353
|
-
|
|
1354
|
-
session = self._ensure_session_for_agent(agent)
|
|
1355
|
-
|
|
1356
|
-
node.agent_assigned = agent
|
|
1357
|
-
node.claimed_at = datetime.now()
|
|
1358
|
-
node.claimed_by_session = session.id
|
|
1359
|
-
node.updated = datetime.now()
|
|
1360
|
-
graph.update(node)
|
|
1361
|
-
|
|
1362
|
-
self._maybe_log_work_item_action(
|
|
1363
|
-
agent=agent,
|
|
1364
|
-
tool="FeatureClaim",
|
|
1365
|
-
summary=f"Claimed: {collection}/{feature_id}",
|
|
2248
|
+
return self.claiming_service.claim_feature(
|
|
1366
2249
|
feature_id=feature_id,
|
|
1367
|
-
|
|
2250
|
+
collection=collection,
|
|
2251
|
+
agent=agent,
|
|
1368
2252
|
)
|
|
1369
2253
|
|
|
1370
|
-
return node
|
|
1371
|
-
|
|
1372
2254
|
def release_feature(
|
|
1373
2255
|
self,
|
|
1374
2256
|
feature_id: str,
|
|
@@ -1387,30 +2269,12 @@ class SessionManager:
|
|
|
1387
2269
|
Returns:
|
|
1388
2270
|
Updated Node or None
|
|
1389
2271
|
"""
|
|
1390
|
-
|
|
1391
|
-
node = graph.get(feature_id)
|
|
1392
|
-
if not node:
|
|
1393
|
-
return None
|
|
1394
|
-
|
|
1395
|
-
if node.agent_assigned and node.agent_assigned != agent:
|
|
1396
|
-
raise ValueError(f"Feature '{feature_id}' is claimed by {node.agent_assigned}, not {agent}")
|
|
1397
|
-
|
|
1398
|
-
node.agent_assigned = None
|
|
1399
|
-
node.claimed_at = None
|
|
1400
|
-
node.claimed_by_session = None
|
|
1401
|
-
node.updated = datetime.now()
|
|
1402
|
-
graph.update(node)
|
|
1403
|
-
|
|
1404
|
-
self._maybe_log_work_item_action(
|
|
1405
|
-
agent=agent,
|
|
1406
|
-
tool="FeatureRelease",
|
|
1407
|
-
summary=f"Released: {collection}/{feature_id}",
|
|
2272
|
+
return self.claiming_service.release_feature(
|
|
1408
2273
|
feature_id=feature_id,
|
|
1409
|
-
|
|
2274
|
+
collection=collection,
|
|
2275
|
+
agent=agent,
|
|
1410
2276
|
)
|
|
1411
2277
|
|
|
1412
|
-
return node
|
|
1413
|
-
|
|
1414
2278
|
def auto_release_features(self, agent: str) -> list[str]:
|
|
1415
2279
|
"""
|
|
1416
2280
|
Release all features claimed by an agent.
|
|
@@ -1421,18 +2285,7 @@ class SessionManager:
|
|
|
1421
2285
|
Returns:
|
|
1422
2286
|
List of released feature IDs
|
|
1423
2287
|
"""
|
|
1424
|
-
|
|
1425
|
-
for collection in ["features", "bugs"]:
|
|
1426
|
-
graph = self._get_graph(collection)
|
|
1427
|
-
for node in graph:
|
|
1428
|
-
if node.agent_assigned == agent:
|
|
1429
|
-
node.agent_assigned = None
|
|
1430
|
-
node.claimed_at = None
|
|
1431
|
-
node.claimed_by_session = None
|
|
1432
|
-
node.updated = datetime.now()
|
|
1433
|
-
graph.update(node)
|
|
1434
|
-
released.append(node.id)
|
|
1435
|
-
return released
|
|
2288
|
+
return self.claiming_service.auto_release_features(agent)
|
|
1436
2289
|
|
|
1437
2290
|
def create_handoff(
|
|
1438
2291
|
self,
|
|
@@ -1470,7 +2323,9 @@ class SessionManager:
|
|
|
1470
2323
|
|
|
1471
2324
|
# Verify agent owns the feature
|
|
1472
2325
|
if node.agent_assigned and node.agent_assigned != agent:
|
|
1473
|
-
raise ValueError(
|
|
2326
|
+
raise ValueError(
|
|
2327
|
+
f"Feature '{feature_id}' is claimed by {node.agent_assigned}, not {agent}"
|
|
2328
|
+
)
|
|
1474
2329
|
|
|
1475
2330
|
# Set handoff fields
|
|
1476
2331
|
node.handoff_required = True
|
|
@@ -1522,13 +2377,17 @@ class SessionManager:
|
|
|
1522
2377
|
from htmlgraph.models import Edge
|
|
1523
2378
|
|
|
1524
2379
|
# Find the feature in either collection
|
|
1525
|
-
feature_node = self.features_graph.get(feature_id) or self.bugs_graph.get(
|
|
2380
|
+
feature_node = self.features_graph.get(feature_id) or self.bugs_graph.get(
|
|
2381
|
+
feature_id
|
|
2382
|
+
)
|
|
1526
2383
|
if not feature_node:
|
|
1527
2384
|
return
|
|
1528
2385
|
|
|
1529
2386
|
# Check if feature → session edge already exists
|
|
1530
2387
|
existing_sessions = feature_node.edges.get("implemented-in", [])
|
|
1531
|
-
feature_already_linked = any(
|
|
2388
|
+
feature_already_linked = any(
|
|
2389
|
+
edge.target_id == session_id for edge in existing_sessions
|
|
2390
|
+
)
|
|
1532
2391
|
|
|
1533
2392
|
if not feature_already_linked:
|
|
1534
2393
|
# Add feature → session edge
|
|
@@ -1557,6 +2416,72 @@ class SessionManager:
|
|
|
1557
2416
|
# Save the updated session
|
|
1558
2417
|
self.session_converter.save(session)
|
|
1559
2418
|
|
|
2419
|
+
def _link_transcript_to_feature(
|
|
2420
|
+
self,
|
|
2421
|
+
node: Node,
|
|
2422
|
+
transcript_id: str,
|
|
2423
|
+
graph: HtmlGraph,
|
|
2424
|
+
) -> None:
|
|
2425
|
+
"""
|
|
2426
|
+
Link a Claude Code transcript to a feature.
|
|
2427
|
+
|
|
2428
|
+
Adds an "implemented-by" edge to the feature pointing to the transcript.
|
|
2429
|
+
Also aggregates tool analytics from the transcript into feature properties.
|
|
2430
|
+
|
|
2431
|
+
Args:
|
|
2432
|
+
node: Feature node to link
|
|
2433
|
+
transcript_id: Claude Code transcript/agent session ID
|
|
2434
|
+
graph: Graph containing the node
|
|
2435
|
+
"""
|
|
2436
|
+
from htmlgraph.models import Edge
|
|
2437
|
+
|
|
2438
|
+
# Check if edge already exists
|
|
2439
|
+
existing_transcripts = node.edges.get("implemented-by", [])
|
|
2440
|
+
already_linked = any(
|
|
2441
|
+
edge.target_id == transcript_id for edge in existing_transcripts
|
|
2442
|
+
)
|
|
2443
|
+
|
|
2444
|
+
if already_linked:
|
|
2445
|
+
return
|
|
2446
|
+
|
|
2447
|
+
# Try to get transcript analytics
|
|
2448
|
+
tool_count = 0
|
|
2449
|
+
duration_seconds = 0
|
|
2450
|
+
tool_breakdown = {}
|
|
2451
|
+
|
|
2452
|
+
try:
|
|
2453
|
+
from htmlgraph.transcript import TranscriptReader
|
|
2454
|
+
|
|
2455
|
+
reader = TranscriptReader()
|
|
2456
|
+
transcript = reader.read_session(transcript_id)
|
|
2457
|
+
if transcript:
|
|
2458
|
+
tool_count = transcript.tool_call_count
|
|
2459
|
+
duration_seconds = int(transcript.duration_seconds or 0)
|
|
2460
|
+
tool_breakdown = transcript.tool_breakdown
|
|
2461
|
+
except Exception as e:
|
|
2462
|
+
logger.warning(
|
|
2463
|
+
f"Failed to get transcript analytics for {transcript_id}: {e}"
|
|
2464
|
+
)
|
|
2465
|
+
|
|
2466
|
+
# Add implemented-by edge with analytics
|
|
2467
|
+
edge = Edge(
|
|
2468
|
+
target_id=transcript_id,
|
|
2469
|
+
relationship="implemented-by",
|
|
2470
|
+
title=transcript_id,
|
|
2471
|
+
since=datetime.now(),
|
|
2472
|
+
properties={
|
|
2473
|
+
"tool_count": tool_count,
|
|
2474
|
+
"duration_seconds": duration_seconds,
|
|
2475
|
+
"tool_breakdown": tool_breakdown,
|
|
2476
|
+
},
|
|
2477
|
+
)
|
|
2478
|
+
node.add_edge(edge)
|
|
2479
|
+
|
|
2480
|
+
# Also store aggregated transcript analytics in properties
|
|
2481
|
+
if tool_count > 0:
|
|
2482
|
+
node.properties["transcript_tool_count"] = tool_count
|
|
2483
|
+
node.properties["transcript_duration_seconds"] = duration_seconds
|
|
2484
|
+
|
|
1560
2485
|
def _get_graph(self, collection: str) -> HtmlGraph:
|
|
1561
2486
|
"""Get graph for a collection."""
|
|
1562
2487
|
if collection == "bugs":
|
|
@@ -1573,6 +2498,7 @@ class SessionManager:
|
|
|
1573
2498
|
"""Get current git commit hash."""
|
|
1574
2499
|
try:
|
|
1575
2500
|
import subprocess
|
|
2501
|
+
|
|
1576
2502
|
result = subprocess.run(
|
|
1577
2503
|
["git", "rev-parse", "--short", "HEAD"],
|
|
1578
2504
|
capture_output=True,
|
|
@@ -1581,6 +2507,297 @@ class SessionManager:
|
|
|
1581
2507
|
)
|
|
1582
2508
|
if result.returncode == 0:
|
|
1583
2509
|
return result.stdout.strip()
|
|
1584
|
-
except Exception:
|
|
1585
|
-
|
|
2510
|
+
except Exception as e:
|
|
2511
|
+
logger.warning(f"Failed to get current git commit: {e}")
|
|
2512
|
+
return None
|
|
2513
|
+
|
|
2514
|
+
# =========================================================================
|
|
2515
|
+
# Claude Code Transcript Integration
|
|
2516
|
+
# =========================================================================
|
|
2517
|
+
|
|
2518
|
+
def link_transcript(
|
|
2519
|
+
self,
|
|
2520
|
+
session_id: str,
|
|
2521
|
+
transcript_id: str,
|
|
2522
|
+
transcript_path: str | None = None,
|
|
2523
|
+
git_branch: str | None = None,
|
|
2524
|
+
) -> Session | None:
|
|
2525
|
+
"""
|
|
2526
|
+
Link a Claude Code transcript to an HtmlGraph session.
|
|
2527
|
+
|
|
2528
|
+
Args:
|
|
2529
|
+
session_id: HtmlGraph session ID
|
|
2530
|
+
transcript_id: Claude Code session UUID (from JSONL filename)
|
|
2531
|
+
transcript_path: Path to the JSONL file
|
|
2532
|
+
git_branch: Git branch from transcript metadata
|
|
2533
|
+
|
|
2534
|
+
Returns:
|
|
2535
|
+
Updated Session or None if not found
|
|
2536
|
+
"""
|
|
2537
|
+
session = self.get_session(session_id)
|
|
2538
|
+
if not session:
|
|
2539
|
+
return None
|
|
2540
|
+
|
|
2541
|
+
session.transcript_id = transcript_id
|
|
2542
|
+
session.transcript_path = transcript_path
|
|
2543
|
+
session.transcript_synced_at = datetime.now()
|
|
2544
|
+
if git_branch:
|
|
2545
|
+
session.transcript_git_branch = git_branch
|
|
2546
|
+
|
|
2547
|
+
self.session_converter.save(session)
|
|
2548
|
+
return session
|
|
2549
|
+
|
|
2550
|
+
def find_session_by_transcript(
|
|
2551
|
+
self,
|
|
2552
|
+
transcript_id: str,
|
|
2553
|
+
) -> Session | None:
|
|
2554
|
+
"""
|
|
2555
|
+
Find an HtmlGraph session linked to a transcript.
|
|
2556
|
+
|
|
2557
|
+
Args:
|
|
2558
|
+
transcript_id: Claude Code session UUID
|
|
2559
|
+
|
|
2560
|
+
Returns:
|
|
2561
|
+
Session or None if not found
|
|
2562
|
+
"""
|
|
2563
|
+
for session in self.session_converter.load_all():
|
|
2564
|
+
if session.transcript_id == transcript_id:
|
|
2565
|
+
return session
|
|
1586
2566
|
return None
|
|
2567
|
+
|
|
2568
|
+
def import_transcript_events(
|
|
2569
|
+
self,
|
|
2570
|
+
session_id: str,
|
|
2571
|
+
transcript_session: Any, # TranscriptSession from transcript module
|
|
2572
|
+
overwrite: bool = False,
|
|
2573
|
+
) -> dict[str, int | str]:
|
|
2574
|
+
"""
|
|
2575
|
+
Import events from a Claude Code transcript into an HtmlGraph session.
|
|
2576
|
+
|
|
2577
|
+
This extracts tool uses and user messages from the transcript
|
|
2578
|
+
and adds them to the session's activity log.
|
|
2579
|
+
|
|
2580
|
+
Args:
|
|
2581
|
+
session_id: HtmlGraph session ID to import into
|
|
2582
|
+
transcript_session: TranscriptSession object from transcript module
|
|
2583
|
+
overwrite: If True, clear existing activities before import
|
|
2584
|
+
|
|
2585
|
+
Returns:
|
|
2586
|
+
Dict with import statistics
|
|
2587
|
+
"""
|
|
2588
|
+
session = self.get_session(session_id)
|
|
2589
|
+
if not session:
|
|
2590
|
+
return {"error": "session_not_found", "imported": 0}
|
|
2591
|
+
|
|
2592
|
+
if overwrite:
|
|
2593
|
+
session.activity_log = []
|
|
2594
|
+
session.event_count = 0
|
|
2595
|
+
|
|
2596
|
+
imported = 0
|
|
2597
|
+
skipped = 0
|
|
2598
|
+
|
|
2599
|
+
for entry in transcript_session.entries:
|
|
2600
|
+
# Skip non-actionable entries
|
|
2601
|
+
if entry.entry_type not in ("user", "tool_use"):
|
|
2602
|
+
skipped += 1
|
|
2603
|
+
continue
|
|
2604
|
+
|
|
2605
|
+
# Create ActivityEntry from transcript entry
|
|
2606
|
+
if entry.entry_type == "user":
|
|
2607
|
+
activity = ActivityEntry(
|
|
2608
|
+
id=f"tx-{entry.uuid[:8]}",
|
|
2609
|
+
timestamp=entry.timestamp,
|
|
2610
|
+
tool="UserQuery",
|
|
2611
|
+
summary=entry.to_summary(),
|
|
2612
|
+
success=True,
|
|
2613
|
+
payload={
|
|
2614
|
+
"source": "transcript",
|
|
2615
|
+
"transcript_uuid": entry.uuid,
|
|
2616
|
+
"message_content": entry.message_content,
|
|
2617
|
+
},
|
|
2618
|
+
)
|
|
2619
|
+
elif entry.entry_type == "tool_use":
|
|
2620
|
+
activity = ActivityEntry(
|
|
2621
|
+
id=f"tx-{entry.uuid[:8]}",
|
|
2622
|
+
timestamp=entry.timestamp,
|
|
2623
|
+
tool=entry.tool_name or "Unknown",
|
|
2624
|
+
summary=entry.to_summary(),
|
|
2625
|
+
success=True, # Assume success unless we have result
|
|
2626
|
+
payload={
|
|
2627
|
+
"source": "transcript",
|
|
2628
|
+
"transcript_uuid": entry.uuid,
|
|
2629
|
+
"tool_input": entry.tool_input,
|
|
2630
|
+
"thinking": entry.thinking,
|
|
2631
|
+
},
|
|
2632
|
+
)
|
|
2633
|
+
else:
|
|
2634
|
+
continue
|
|
2635
|
+
|
|
2636
|
+
session.add_activity(activity)
|
|
2637
|
+
imported += 1
|
|
2638
|
+
|
|
2639
|
+
# Also append to JSONL event log
|
|
2640
|
+
try:
|
|
2641
|
+
from htmlgraph.work_type_utils import infer_work_type_from_id
|
|
2642
|
+
|
|
2643
|
+
work_type = infer_work_type_from_id(activity.feature_id)
|
|
2644
|
+
|
|
2645
|
+
self.event_log.append(
|
|
2646
|
+
EventRecord(
|
|
2647
|
+
event_id=activity.id or "",
|
|
2648
|
+
timestamp=activity.timestamp,
|
|
2649
|
+
session_id=session_id,
|
|
2650
|
+
agent=session.agent,
|
|
2651
|
+
tool=activity.tool,
|
|
2652
|
+
summary=activity.summary,
|
|
2653
|
+
success=activity.success,
|
|
2654
|
+
feature_id=activity.feature_id,
|
|
2655
|
+
drift_score=None,
|
|
2656
|
+
start_commit=session.start_commit,
|
|
2657
|
+
continued_from=session.continued_from,
|
|
2658
|
+
work_type=work_type,
|
|
2659
|
+
session_status=session.status,
|
|
2660
|
+
payload=activity.payload
|
|
2661
|
+
if isinstance(activity.payload, dict)
|
|
2662
|
+
else None,
|
|
2663
|
+
)
|
|
2664
|
+
)
|
|
2665
|
+
except Exception as e:
|
|
2666
|
+
logger.warning(f"Failed to append transcript event to event log: {e}")
|
|
2667
|
+
|
|
2668
|
+
# Update transcript link
|
|
2669
|
+
session.transcript_id = transcript_session.session_id
|
|
2670
|
+
session.transcript_path = str(transcript_session.path)
|
|
2671
|
+
session.transcript_synced_at = datetime.now()
|
|
2672
|
+
if transcript_session.git_branch:
|
|
2673
|
+
session.transcript_git_branch = transcript_session.git_branch
|
|
2674
|
+
|
|
2675
|
+
self.session_converter.save(session)
|
|
2676
|
+
|
|
2677
|
+
return {
|
|
2678
|
+
"imported": imported,
|
|
2679
|
+
"skipped": skipped,
|
|
2680
|
+
"total_entries": len(transcript_session.entries),
|
|
2681
|
+
}
|
|
2682
|
+
|
|
2683
|
+
def auto_link_transcript_by_branch(
|
|
2684
|
+
self,
|
|
2685
|
+
git_branch: str,
|
|
2686
|
+
agent: str | None = None,
|
|
2687
|
+
) -> list[tuple[str, str]]:
|
|
2688
|
+
"""
|
|
2689
|
+
Auto-link HtmlGraph sessions to transcripts based on git branch.
|
|
2690
|
+
|
|
2691
|
+
This finds sessions and transcripts that share the same git branch
|
|
2692
|
+
and links them together.
|
|
2693
|
+
|
|
2694
|
+
Args:
|
|
2695
|
+
git_branch: Git branch to match
|
|
2696
|
+
agent: Optional agent filter
|
|
2697
|
+
|
|
2698
|
+
Returns:
|
|
2699
|
+
List of (session_id, transcript_id) tuples that were linked
|
|
2700
|
+
"""
|
|
2701
|
+
from htmlgraph.transcript import TranscriptReader
|
|
2702
|
+
|
|
2703
|
+
linked: list[tuple[str, str]] = []
|
|
2704
|
+
reader = TranscriptReader()
|
|
2705
|
+
|
|
2706
|
+
# Find transcripts for this branch
|
|
2707
|
+
project_path = self.graph_dir.parent
|
|
2708
|
+
transcripts = reader.find_sessions_for_branch(git_branch, project_path)
|
|
2709
|
+
|
|
2710
|
+
if not transcripts:
|
|
2711
|
+
return linked
|
|
2712
|
+
|
|
2713
|
+
# Find sessions that might match
|
|
2714
|
+
sessions = self.session_converter.load_all()
|
|
2715
|
+
if agent:
|
|
2716
|
+
sessions = [s for s in sessions if s.agent == agent]
|
|
2717
|
+
|
|
2718
|
+
# Helper to normalize datetimes for comparison
|
|
2719
|
+
# (handles timezone-aware vs timezone-naive)
|
|
2720
|
+
def normalize_dt(dt: datetime | None) -> datetime | None:
|
|
2721
|
+
if dt is None:
|
|
2722
|
+
return None
|
|
2723
|
+
# If timezone-aware, convert to naive UTC
|
|
2724
|
+
if dt.tzinfo is not None:
|
|
2725
|
+
return dt.astimezone(timezone.utc).replace(tzinfo=None)
|
|
2726
|
+
return dt
|
|
2727
|
+
|
|
2728
|
+
# Match by time overlap and git branch
|
|
2729
|
+
for transcript in transcripts:
|
|
2730
|
+
if not transcript.started_at:
|
|
2731
|
+
continue
|
|
2732
|
+
|
|
2733
|
+
transcript_start = normalize_dt(transcript.started_at)
|
|
2734
|
+
transcript_end = normalize_dt(transcript.ended_at)
|
|
2735
|
+
|
|
2736
|
+
for session in sessions:
|
|
2737
|
+
# Skip if already linked
|
|
2738
|
+
if session.transcript_id:
|
|
2739
|
+
continue
|
|
2740
|
+
|
|
2741
|
+
session_start = normalize_dt(session.started_at)
|
|
2742
|
+
session_end = normalize_dt(session.ended_at)
|
|
2743
|
+
|
|
2744
|
+
# Check if session overlaps with transcript time
|
|
2745
|
+
if session_start and transcript_end:
|
|
2746
|
+
if session_start > transcript_end:
|
|
2747
|
+
continue # Session started after transcript ended
|
|
2748
|
+
|
|
2749
|
+
if session_end and transcript_start:
|
|
2750
|
+
if session_end < transcript_start:
|
|
2751
|
+
continue # Session ended before transcript started
|
|
2752
|
+
|
|
2753
|
+
# Link them
|
|
2754
|
+
self.link_transcript(
|
|
2755
|
+
session_id=session.id,
|
|
2756
|
+
transcript_id=transcript.session_id,
|
|
2757
|
+
transcript_path=str(transcript.path),
|
|
2758
|
+
git_branch=git_branch,
|
|
2759
|
+
)
|
|
2760
|
+
linked.append((session.id, transcript.session_id))
|
|
2761
|
+
break # One transcript per session
|
|
2762
|
+
|
|
2763
|
+
return linked
|
|
2764
|
+
|
|
2765
|
+
def get_transcript_stats(self, session_id: str) -> dict[str, Any] | None:
|
|
2766
|
+
"""
|
|
2767
|
+
Get transcript statistics for a session.
|
|
2768
|
+
|
|
2769
|
+
Args:
|
|
2770
|
+
session_id: HtmlGraph session ID
|
|
2771
|
+
|
|
2772
|
+
Returns:
|
|
2773
|
+
Dict with transcript stats or None if no transcript linked
|
|
2774
|
+
"""
|
|
2775
|
+
session = self.get_session(session_id)
|
|
2776
|
+
if not session or not session.transcript_id:
|
|
2777
|
+
return None
|
|
2778
|
+
|
|
2779
|
+
from htmlgraph.transcript import TranscriptReader
|
|
2780
|
+
|
|
2781
|
+
reader = TranscriptReader()
|
|
2782
|
+
transcript = reader.read_session(session.transcript_id)
|
|
2783
|
+
|
|
2784
|
+
if not transcript:
|
|
2785
|
+
return {
|
|
2786
|
+
"transcript_id": session.transcript_id,
|
|
2787
|
+
"error": "transcript_not_found",
|
|
2788
|
+
}
|
|
2789
|
+
|
|
2790
|
+
return {
|
|
2791
|
+
"transcript_id": session.transcript_id,
|
|
2792
|
+
"transcript_path": session.transcript_path,
|
|
2793
|
+
"synced_at": session.transcript_synced_at.isoformat()
|
|
2794
|
+
if session.transcript_synced_at
|
|
2795
|
+
else None,
|
|
2796
|
+
"git_branch": session.transcript_git_branch,
|
|
2797
|
+
"user_messages": transcript.user_message_count,
|
|
2798
|
+
"tool_calls": transcript.tool_call_count,
|
|
2799
|
+
"tool_breakdown": transcript.tool_breakdown,
|
|
2800
|
+
"duration_seconds": transcript.duration_seconds,
|
|
2801
|
+
"has_thinking_traces": transcript.has_thinking_traces(),
|
|
2802
|
+
"entry_count": len(transcript.entries),
|
|
2803
|
+
}
|