htmlgraph 0.20.1__py3-none-any.whl → 0.27.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- htmlgraph/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/.htmlgraph/agents.json +72 -0
- htmlgraph/.htmlgraph/htmlgraph.db +0 -0
- htmlgraph/__init__.py +51 -1
- htmlgraph/__init__.pyi +123 -0
- htmlgraph/agent_detection.py +26 -10
- htmlgraph/agent_registry.py +2 -1
- htmlgraph/analytics/__init__.py +8 -1
- htmlgraph/analytics/cli.py +86 -20
- htmlgraph/analytics/cost_analyzer.py +391 -0
- htmlgraph/analytics/cost_monitor.py +664 -0
- htmlgraph/analytics/cost_reporter.py +675 -0
- htmlgraph/analytics/cross_session.py +617 -0
- htmlgraph/analytics/dependency.py +10 -6
- htmlgraph/analytics/pattern_learning.py +771 -0
- htmlgraph/analytics/session_graph.py +707 -0
- htmlgraph/analytics/strategic/__init__.py +80 -0
- htmlgraph/analytics/strategic/cost_optimizer.py +611 -0
- htmlgraph/analytics/strategic/pattern_detector.py +876 -0
- htmlgraph/analytics/strategic/preference_manager.py +709 -0
- htmlgraph/analytics/strategic/suggestion_engine.py +747 -0
- htmlgraph/analytics/work_type.py +67 -27
- htmlgraph/analytics_index.py +53 -20
- htmlgraph/api/__init__.py +3 -0
- htmlgraph/api/cost_alerts_websocket.py +416 -0
- htmlgraph/api/main.py +2498 -0
- htmlgraph/api/static/htmx.min.js +1 -0
- htmlgraph/api/static/style-redesign.css +1344 -0
- htmlgraph/api/static/style.css +1079 -0
- htmlgraph/api/templates/dashboard-redesign.html +1366 -0
- htmlgraph/api/templates/dashboard.html +794 -0
- htmlgraph/api/templates/partials/activity-feed-hierarchical.html +326 -0
- htmlgraph/api/templates/partials/activity-feed.html +1100 -0
- htmlgraph/api/templates/partials/agents-redesign.html +317 -0
- htmlgraph/api/templates/partials/agents.html +317 -0
- htmlgraph/api/templates/partials/event-traces.html +373 -0
- htmlgraph/api/templates/partials/features-kanban-redesign.html +509 -0
- htmlgraph/api/templates/partials/features.html +578 -0
- htmlgraph/api/templates/partials/metrics-redesign.html +346 -0
- htmlgraph/api/templates/partials/metrics.html +346 -0
- htmlgraph/api/templates/partials/orchestration-redesign.html +443 -0
- htmlgraph/api/templates/partials/orchestration.html +198 -0
- htmlgraph/api/templates/partials/spawners.html +375 -0
- htmlgraph/api/templates/partials/work-items.html +613 -0
- htmlgraph/api/websocket.py +538 -0
- htmlgraph/archive/__init__.py +24 -0
- htmlgraph/archive/bloom.py +234 -0
- htmlgraph/archive/fts.py +297 -0
- htmlgraph/archive/manager.py +583 -0
- htmlgraph/archive/search.py +244 -0
- htmlgraph/atomic_ops.py +560 -0
- htmlgraph/attribute_index.py +2 -1
- htmlgraph/bounded_paths.py +539 -0
- htmlgraph/builders/base.py +57 -2
- htmlgraph/builders/bug.py +19 -3
- htmlgraph/builders/chore.py +19 -3
- htmlgraph/builders/epic.py +19 -3
- htmlgraph/builders/feature.py +27 -3
- htmlgraph/builders/insight.py +2 -1
- htmlgraph/builders/metric.py +2 -1
- htmlgraph/builders/pattern.py +2 -1
- htmlgraph/builders/phase.py +19 -3
- htmlgraph/builders/spike.py +29 -3
- htmlgraph/builders/track.py +42 -1
- htmlgraph/cigs/__init__.py +81 -0
- htmlgraph/cigs/autonomy.py +385 -0
- htmlgraph/cigs/cost.py +475 -0
- htmlgraph/cigs/messages_basic.py +472 -0
- htmlgraph/cigs/messaging.py +365 -0
- htmlgraph/cigs/models.py +771 -0
- htmlgraph/cigs/pattern_storage.py +427 -0
- htmlgraph/cigs/patterns.py +503 -0
- htmlgraph/cigs/posttool_analyzer.py +234 -0
- htmlgraph/cigs/reporter.py +818 -0
- htmlgraph/cigs/tracker.py +317 -0
- htmlgraph/cli/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/cli/.htmlgraph/agents.json +72 -0
- htmlgraph/cli/.htmlgraph/htmlgraph.db +0 -0
- htmlgraph/cli/__init__.py +42 -0
- htmlgraph/cli/__main__.py +6 -0
- htmlgraph/cli/analytics.py +1424 -0
- htmlgraph/cli/base.py +685 -0
- htmlgraph/cli/constants.py +206 -0
- htmlgraph/cli/core.py +954 -0
- htmlgraph/cli/main.py +147 -0
- htmlgraph/cli/models.py +475 -0
- htmlgraph/cli/templates/__init__.py +1 -0
- htmlgraph/cli/templates/cost_dashboard.py +399 -0
- htmlgraph/cli/work/__init__.py +239 -0
- htmlgraph/cli/work/browse.py +115 -0
- htmlgraph/cli/work/features.py +568 -0
- htmlgraph/cli/work/orchestration.py +676 -0
- htmlgraph/cli/work/report.py +728 -0
- htmlgraph/cli/work/sessions.py +466 -0
- htmlgraph/cli/work/snapshot.py +559 -0
- htmlgraph/cli/work/tracks.py +486 -0
- htmlgraph/cli_commands/__init__.py +1 -0
- htmlgraph/cli_commands/feature.py +195 -0
- htmlgraph/cli_framework.py +115 -0
- htmlgraph/collections/__init__.py +2 -0
- htmlgraph/collections/base.py +197 -14
- htmlgraph/collections/bug.py +2 -1
- htmlgraph/collections/chore.py +2 -1
- htmlgraph/collections/epic.py +2 -1
- htmlgraph/collections/feature.py +2 -1
- htmlgraph/collections/insight.py +2 -1
- htmlgraph/collections/metric.py +2 -1
- htmlgraph/collections/pattern.py +2 -1
- htmlgraph/collections/phase.py +2 -1
- htmlgraph/collections/session.py +194 -0
- htmlgraph/collections/spike.py +13 -2
- htmlgraph/collections/task_delegation.py +241 -0
- htmlgraph/collections/todo.py +14 -1
- htmlgraph/collections/traces.py +487 -0
- htmlgraph/config/cost_models.json +56 -0
- htmlgraph/config.py +190 -0
- htmlgraph/context_analytics.py +2 -1
- htmlgraph/converter.py +116 -7
- htmlgraph/cost_analysis/__init__.py +5 -0
- htmlgraph/cost_analysis/analyzer.py +438 -0
- htmlgraph/dashboard.html +2246 -248
- htmlgraph/dashboard.html.backup +6592 -0
- htmlgraph/dashboard.html.bak +7181 -0
- htmlgraph/dashboard.html.bak2 +7231 -0
- htmlgraph/dashboard.html.bak3 +7232 -0
- htmlgraph/db/__init__.py +38 -0
- htmlgraph/db/queries.py +790 -0
- htmlgraph/db/schema.py +1788 -0
- htmlgraph/decorators.py +317 -0
- htmlgraph/dependency_models.py +2 -1
- htmlgraph/deploy.py +26 -27
- htmlgraph/docs/API_REFERENCE.md +841 -0
- htmlgraph/docs/HTTP_API.md +750 -0
- htmlgraph/docs/INTEGRATION_GUIDE.md +752 -0
- htmlgraph/docs/ORCHESTRATION_PATTERNS.md +717 -0
- htmlgraph/docs/README.md +532 -0
- htmlgraph/docs/__init__.py +77 -0
- htmlgraph/docs/docs_version.py +55 -0
- htmlgraph/docs/metadata.py +93 -0
- htmlgraph/docs/migrations.py +232 -0
- htmlgraph/docs/template_engine.py +143 -0
- htmlgraph/docs/templates/_sections/cli_reference.md.j2 +52 -0
- htmlgraph/docs/templates/_sections/core_concepts.md.j2 +29 -0
- htmlgraph/docs/templates/_sections/sdk_basics.md.j2 +69 -0
- htmlgraph/docs/templates/base_agents.md.j2 +78 -0
- htmlgraph/docs/templates/example_user_override.md.j2 +47 -0
- htmlgraph/docs/version_check.py +163 -0
- htmlgraph/edge_index.py +2 -1
- htmlgraph/error_handler.py +544 -0
- htmlgraph/event_log.py +86 -37
- htmlgraph/event_migration.py +2 -1
- htmlgraph/file_watcher.py +12 -8
- htmlgraph/find_api.py +2 -1
- htmlgraph/git_events.py +67 -9
- htmlgraph/hooks/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/hooks/.htmlgraph/agents.json +72 -0
- htmlgraph/hooks/.htmlgraph/index.sqlite +0 -0
- htmlgraph/hooks/__init__.py +8 -0
- htmlgraph/hooks/bootstrap.py +169 -0
- htmlgraph/hooks/cigs_pretool_enforcer.py +354 -0
- htmlgraph/hooks/concurrent_sessions.py +208 -0
- htmlgraph/hooks/context.py +350 -0
- htmlgraph/hooks/drift_handler.py +525 -0
- htmlgraph/hooks/event_tracker.py +790 -99
- htmlgraph/hooks/git_commands.py +175 -0
- htmlgraph/hooks/installer.py +5 -1
- htmlgraph/hooks/orchestrator.py +327 -76
- htmlgraph/hooks/orchestrator_reflector.py +31 -4
- htmlgraph/hooks/post_tool_use_failure.py +32 -7
- htmlgraph/hooks/post_tool_use_handler.py +257 -0
- htmlgraph/hooks/posttooluse.py +92 -19
- htmlgraph/hooks/pretooluse.py +527 -7
- htmlgraph/hooks/prompt_analyzer.py +637 -0
- htmlgraph/hooks/session_handler.py +668 -0
- htmlgraph/hooks/session_summary.py +395 -0
- htmlgraph/hooks/state_manager.py +504 -0
- htmlgraph/hooks/subagent_detection.py +202 -0
- htmlgraph/hooks/subagent_stop.py +369 -0
- htmlgraph/hooks/task_enforcer.py +99 -4
- htmlgraph/hooks/validator.py +212 -91
- htmlgraph/ids.py +2 -1
- htmlgraph/learning.py +125 -100
- htmlgraph/mcp_server.py +2 -1
- htmlgraph/models.py +217 -18
- htmlgraph/operations/README.md +62 -0
- htmlgraph/operations/__init__.py +79 -0
- htmlgraph/operations/analytics.py +339 -0
- htmlgraph/operations/bootstrap.py +289 -0
- htmlgraph/operations/events.py +244 -0
- htmlgraph/operations/fastapi_server.py +231 -0
- htmlgraph/operations/hooks.py +350 -0
- htmlgraph/operations/initialization.py +597 -0
- htmlgraph/operations/initialization.py.backup +228 -0
- htmlgraph/operations/server.py +303 -0
- htmlgraph/orchestration/__init__.py +58 -0
- htmlgraph/orchestration/claude_launcher.py +179 -0
- htmlgraph/orchestration/command_builder.py +72 -0
- htmlgraph/orchestration/headless_spawner.py +281 -0
- htmlgraph/orchestration/live_events.py +377 -0
- htmlgraph/orchestration/model_selection.py +327 -0
- htmlgraph/orchestration/plugin_manager.py +140 -0
- htmlgraph/orchestration/prompts.py +137 -0
- htmlgraph/orchestration/spawner_event_tracker.py +383 -0
- htmlgraph/orchestration/spawners/__init__.py +16 -0
- htmlgraph/orchestration/spawners/base.py +194 -0
- htmlgraph/orchestration/spawners/claude.py +173 -0
- htmlgraph/orchestration/spawners/codex.py +435 -0
- htmlgraph/orchestration/spawners/copilot.py +294 -0
- htmlgraph/orchestration/spawners/gemini.py +471 -0
- htmlgraph/orchestration/subprocess_runner.py +36 -0
- htmlgraph/{orchestration.py → orchestration/task_coordination.py} +16 -8
- htmlgraph/orchestration.md +563 -0
- htmlgraph/orchestrator-system-prompt-optimized.txt +863 -0
- htmlgraph/orchestrator.py +2 -1
- htmlgraph/orchestrator_config.py +357 -0
- htmlgraph/orchestrator_mode.py +115 -4
- htmlgraph/parallel.py +2 -1
- htmlgraph/parser.py +86 -6
- htmlgraph/path_query.py +608 -0
- htmlgraph/pattern_matcher.py +636 -0
- htmlgraph/pydantic_models.py +476 -0
- htmlgraph/quality_gates.py +350 -0
- htmlgraph/query_builder.py +2 -1
- htmlgraph/query_composer.py +509 -0
- htmlgraph/reflection.py +443 -0
- htmlgraph/refs.py +344 -0
- htmlgraph/repo_hash.py +512 -0
- htmlgraph/repositories/__init__.py +292 -0
- htmlgraph/repositories/analytics_repository.py +455 -0
- htmlgraph/repositories/analytics_repository_standard.py +628 -0
- htmlgraph/repositories/feature_repository.py +581 -0
- htmlgraph/repositories/feature_repository_htmlfile.py +668 -0
- htmlgraph/repositories/feature_repository_memory.py +607 -0
- htmlgraph/repositories/feature_repository_sqlite.py +858 -0
- htmlgraph/repositories/filter_service.py +620 -0
- htmlgraph/repositories/filter_service_standard.py +445 -0
- htmlgraph/repositories/shared_cache.py +621 -0
- htmlgraph/repositories/shared_cache_memory.py +395 -0
- htmlgraph/repositories/track_repository.py +552 -0
- htmlgraph/repositories/track_repository_htmlfile.py +619 -0
- htmlgraph/repositories/track_repository_memory.py +508 -0
- htmlgraph/repositories/track_repository_sqlite.py +711 -0
- htmlgraph/sdk/__init__.py +398 -0
- htmlgraph/sdk/__init__.pyi +14 -0
- htmlgraph/sdk/analytics/__init__.py +19 -0
- htmlgraph/sdk/analytics/engine.py +155 -0
- htmlgraph/sdk/analytics/helpers.py +178 -0
- htmlgraph/sdk/analytics/registry.py +109 -0
- htmlgraph/sdk/base.py +484 -0
- htmlgraph/sdk/constants.py +216 -0
- htmlgraph/sdk/core.pyi +308 -0
- htmlgraph/sdk/discovery.py +120 -0
- htmlgraph/sdk/help/__init__.py +12 -0
- htmlgraph/sdk/help/mixin.py +699 -0
- htmlgraph/sdk/mixins/__init__.py +15 -0
- htmlgraph/sdk/mixins/attribution.py +113 -0
- htmlgraph/sdk/mixins/mixin.py +410 -0
- htmlgraph/sdk/operations/__init__.py +12 -0
- htmlgraph/sdk/operations/mixin.py +427 -0
- htmlgraph/sdk/orchestration/__init__.py +17 -0
- htmlgraph/sdk/orchestration/coordinator.py +203 -0
- htmlgraph/sdk/orchestration/spawner.py +204 -0
- htmlgraph/sdk/planning/__init__.py +19 -0
- htmlgraph/sdk/planning/bottlenecks.py +93 -0
- htmlgraph/sdk/planning/mixin.py +211 -0
- htmlgraph/sdk/planning/parallel.py +186 -0
- htmlgraph/sdk/planning/queue.py +210 -0
- htmlgraph/sdk/planning/recommendations.py +87 -0
- htmlgraph/sdk/planning/smart_planning.py +319 -0
- htmlgraph/sdk/session/__init__.py +19 -0
- htmlgraph/sdk/session/continuity.py +57 -0
- htmlgraph/sdk/session/handoff.py +110 -0
- htmlgraph/sdk/session/info.py +309 -0
- htmlgraph/sdk/session/manager.py +103 -0
- htmlgraph/sdk/strategic/__init__.py +26 -0
- htmlgraph/sdk/strategic/mixin.py +563 -0
- htmlgraph/server.py +295 -107
- htmlgraph/session_hooks.py +300 -0
- htmlgraph/session_manager.py +285 -3
- htmlgraph/session_registry.py +587 -0
- htmlgraph/session_state.py +436 -0
- htmlgraph/session_warning.py +2 -1
- htmlgraph/sessions/__init__.py +23 -0
- htmlgraph/sessions/handoff.py +756 -0
- htmlgraph/system_prompts.py +450 -0
- htmlgraph/templates/orchestration-view.html +350 -0
- htmlgraph/track_builder.py +33 -1
- htmlgraph/track_manager.py +38 -0
- htmlgraph/transcript.py +18 -5
- htmlgraph/validation.py +115 -0
- htmlgraph/watch.py +2 -1
- htmlgraph/work_type_utils.py +2 -1
- {htmlgraph-0.20.1.data → htmlgraph-0.27.5.data}/data/htmlgraph/dashboard.html +2246 -248
- {htmlgraph-0.20.1.dist-info → htmlgraph-0.27.5.dist-info}/METADATA +95 -64
- htmlgraph-0.27.5.dist-info/RECORD +337 -0
- {htmlgraph-0.20.1.dist-info → htmlgraph-0.27.5.dist-info}/entry_points.txt +1 -1
- htmlgraph/cli.py +0 -4839
- htmlgraph/sdk.py +0 -2359
- htmlgraph-0.20.1.dist-info/RECORD +0 -118
- {htmlgraph-0.20.1.data → htmlgraph-0.27.5.data}/data/htmlgraph/styles.css +0 -0
- {htmlgraph-0.20.1.data → htmlgraph-0.27.5.data}/data/htmlgraph/templates/AGENTS.md.template +0 -0
- {htmlgraph-0.20.1.data → htmlgraph-0.27.5.data}/data/htmlgraph/templates/CLAUDE.md.template +0 -0
- {htmlgraph-0.20.1.data → htmlgraph-0.27.5.data}/data/htmlgraph/templates/GEMINI.md.template +0 -0
- {htmlgraph-0.20.1.dist-info → htmlgraph-0.27.5.dist-info}/WHEEL +0 -0
htmlgraph/db/schema.py
ADDED
|
@@ -0,0 +1,1788 @@
|
|
|
1
|
+
"""
|
|
2
|
+
HtmlGraph SQLite Schema - Phase 1 Backend Storage
|
|
3
|
+
|
|
4
|
+
This module defines the comprehensive SQLite schema for HtmlGraph agent observability,
|
|
5
|
+
replacing HTML file storage with structured relational database.
|
|
6
|
+
|
|
7
|
+
Key design principles:
|
|
8
|
+
- Normalize data while preserving flexibility via JSON columns
|
|
9
|
+
- Index frequently queried fields for performance
|
|
10
|
+
- Track audit trails (created_at, updated_at)
|
|
11
|
+
- Support graph relationships via edge tracking
|
|
12
|
+
- Enable full observability of agent activities
|
|
13
|
+
|
|
14
|
+
Tables:
|
|
15
|
+
- agent_events: All agent tool calls, results, errors, delegations
|
|
16
|
+
- features: Feature/bug/spike/chore/epic work items
|
|
17
|
+
- sessions: Agent session tracking with metrics
|
|
18
|
+
- tracks: Multi-feature initiatives
|
|
19
|
+
- agent_collaboration: Handoffs and parallel work
|
|
20
|
+
- graph_edges: General relationship tracking
|
|
21
|
+
- event_log_archive: Historical event log for querying
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
import json
|
|
25
|
+
import logging
|
|
26
|
+
import sqlite3
|
|
27
|
+
from datetime import datetime, timedelta, timezone
|
|
28
|
+
from pathlib import Path
|
|
29
|
+
from typing import Any
|
|
30
|
+
|
|
31
|
+
logger = logging.getLogger(__name__)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class HtmlGraphDB:
|
|
35
|
+
"""
|
|
36
|
+
SQLite database manager for HtmlGraph observability backend.
|
|
37
|
+
|
|
38
|
+
Provides schema creation, migrations, and query helpers for storing
|
|
39
|
+
and retrieving agent events, features, sessions, and collaborations.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
def __init__(self, db_path: str | None = None):
|
|
43
|
+
"""
|
|
44
|
+
Initialize HtmlGraph database.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
db_path: Path to SQLite database file. If None, uses default location.
|
|
48
|
+
"""
|
|
49
|
+
if db_path is None:
|
|
50
|
+
# Default: .htmlgraph/htmlgraph.db in project root
|
|
51
|
+
db_path = str(Path.home() / ".htmlgraph" / "htmlgraph.db")
|
|
52
|
+
|
|
53
|
+
self.db_path = Path(db_path)
|
|
54
|
+
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
55
|
+
self.connection: sqlite3.Connection | None = None
|
|
56
|
+
|
|
57
|
+
# Auto-initialize schema on first instantiation
|
|
58
|
+
self.connect()
|
|
59
|
+
self.create_tables()
|
|
60
|
+
|
|
61
|
+
def connect(self) -> sqlite3.Connection:
|
|
62
|
+
"""
|
|
63
|
+
Connect to SQLite database, creating it if needed.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
SQLite connection object
|
|
67
|
+
"""
|
|
68
|
+
self.connection = sqlite3.connect(str(self.db_path))
|
|
69
|
+
self.connection.row_factory = sqlite3.Row
|
|
70
|
+
# Enable foreign keys
|
|
71
|
+
self.connection.execute("PRAGMA foreign_keys = ON")
|
|
72
|
+
return self.connection
|
|
73
|
+
|
|
74
|
+
def disconnect(self) -> None:
|
|
75
|
+
"""Close database connection."""
|
|
76
|
+
if self.connection:
|
|
77
|
+
self.connection.close()
|
|
78
|
+
self.connection = None
|
|
79
|
+
|
|
80
|
+
def _migrate_agent_events_table(self, cursor: sqlite3.Cursor) -> None:
|
|
81
|
+
"""
|
|
82
|
+
Migrate agent_events table to add missing columns.
|
|
83
|
+
|
|
84
|
+
Adds columns that may be missing from older database versions.
|
|
85
|
+
"""
|
|
86
|
+
# Check if agent_events table exists
|
|
87
|
+
cursor.execute(
|
|
88
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='agent_events'"
|
|
89
|
+
)
|
|
90
|
+
if not cursor.fetchone():
|
|
91
|
+
return # Table doesn't exist yet, will be created fresh
|
|
92
|
+
|
|
93
|
+
# Get current columns
|
|
94
|
+
cursor.execute("PRAGMA table_info(agent_events)")
|
|
95
|
+
columns = {row[1] for row in cursor.fetchall()}
|
|
96
|
+
|
|
97
|
+
# Add missing columns with defaults
|
|
98
|
+
migrations = [
|
|
99
|
+
("feature_id", "TEXT"),
|
|
100
|
+
("subagent_type", "TEXT"),
|
|
101
|
+
("child_spike_count", "INTEGER DEFAULT 0"),
|
|
102
|
+
("cost_tokens", "INTEGER DEFAULT 0"),
|
|
103
|
+
("execution_duration_seconds", "REAL DEFAULT 0.0"),
|
|
104
|
+
("status", "TEXT DEFAULT 'recorded'"),
|
|
105
|
+
("created_at", "DATETIME DEFAULT CURRENT_TIMESTAMP"),
|
|
106
|
+
("updated_at", "DATETIME DEFAULT CURRENT_TIMESTAMP"),
|
|
107
|
+
("model", "TEXT"),
|
|
108
|
+
("claude_task_id", "TEXT"),
|
|
109
|
+
]
|
|
110
|
+
|
|
111
|
+
for col_name, col_type in migrations:
|
|
112
|
+
if col_name not in columns:
|
|
113
|
+
try:
|
|
114
|
+
cursor.execute(
|
|
115
|
+
f"ALTER TABLE agent_events ADD COLUMN {col_name} {col_type}"
|
|
116
|
+
)
|
|
117
|
+
logger.info(f"Added column agent_events.{col_name}")
|
|
118
|
+
except sqlite3.OperationalError as e:
|
|
119
|
+
# Column may already exist
|
|
120
|
+
logger.debug(f"Could not add {col_name}: {e}")
|
|
121
|
+
|
|
122
|
+
def _migrate_sessions_table(self, cursor: sqlite3.Cursor) -> None:
|
|
123
|
+
"""
|
|
124
|
+
Migrate sessions table from old schema to new schema.
|
|
125
|
+
|
|
126
|
+
Old schema had columns: session_id, agent, start_commit, continued_from,
|
|
127
|
+
status, started_at, ended_at
|
|
128
|
+
New schema expects: session_id, agent_assigned, parent_session_id,
|
|
129
|
+
parent_event_id, created_at, etc.
|
|
130
|
+
"""
|
|
131
|
+
# Check if sessions table exists with old schema
|
|
132
|
+
cursor.execute(
|
|
133
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='sessions'"
|
|
134
|
+
)
|
|
135
|
+
if not cursor.fetchone():
|
|
136
|
+
return # Table doesn't exist yet, will be created fresh
|
|
137
|
+
|
|
138
|
+
# Get current columns
|
|
139
|
+
cursor.execute("PRAGMA table_info(sessions)")
|
|
140
|
+
columns = {row[1] for row in cursor.fetchall()}
|
|
141
|
+
|
|
142
|
+
# Migration: rename 'agent' to 'agent_assigned' if needed
|
|
143
|
+
if "agent" in columns and "agent_assigned" not in columns:
|
|
144
|
+
try:
|
|
145
|
+
cursor.execute(
|
|
146
|
+
"ALTER TABLE sessions RENAME COLUMN agent TO agent_assigned"
|
|
147
|
+
)
|
|
148
|
+
logger.info("Migrated sessions.agent -> sessions.agent_assigned")
|
|
149
|
+
except sqlite3.OperationalError as e:
|
|
150
|
+
logger.debug(f"Could not rename column: {e}")
|
|
151
|
+
# Column may already exist
|
|
152
|
+
pass
|
|
153
|
+
|
|
154
|
+
# Add missing columns with defaults
|
|
155
|
+
# Note: SQLite doesn't allow CURRENT_TIMESTAMP in ALTER TABLE, so we use NULL
|
|
156
|
+
migrations = [
|
|
157
|
+
("parent_session_id", "TEXT"),
|
|
158
|
+
("parent_event_id", "TEXT"),
|
|
159
|
+
("created_at", "DATETIME"), # Can't use DEFAULT CURRENT_TIMESTAMP in ALTER
|
|
160
|
+
("is_subagent", "INTEGER DEFAULT 0"),
|
|
161
|
+
("total_events", "INTEGER DEFAULT 0"),
|
|
162
|
+
("total_tokens_used", "INTEGER DEFAULT 0"),
|
|
163
|
+
("context_drift", "REAL DEFAULT 0.0"),
|
|
164
|
+
("transcript_id", "TEXT"),
|
|
165
|
+
("transcript_path", "TEXT"),
|
|
166
|
+
("transcript_synced", "INTEGER DEFAULT 0"),
|
|
167
|
+
("end_commit", "TEXT"),
|
|
168
|
+
("features_worked_on", "TEXT"),
|
|
169
|
+
("metadata", "TEXT"),
|
|
170
|
+
("completed_at", "DATETIME"),
|
|
171
|
+
("last_user_query_at", "DATETIME"),
|
|
172
|
+
("last_user_query", "TEXT"),
|
|
173
|
+
# Phase 2 Feature 3: Cross-Session Continuity handoff fields
|
|
174
|
+
("handoff_notes", "TEXT"),
|
|
175
|
+
("recommended_next", "TEXT"),
|
|
176
|
+
("blockers", "TEXT"), # JSON array of blocker strings
|
|
177
|
+
("recommended_context", "TEXT"), # JSON array of file paths
|
|
178
|
+
("continued_from", "TEXT"), # Previous session ID
|
|
179
|
+
# Phase 3.1: Real-time cost monitoring
|
|
180
|
+
("cost_budget", "REAL"), # Budget in USD for this session
|
|
181
|
+
("cost_threshold_breached", "INTEGER DEFAULT 0"), # Whether budget exceeded
|
|
182
|
+
("predicted_cost", "REAL DEFAULT 0.0"), # Predicted final cost
|
|
183
|
+
]
|
|
184
|
+
|
|
185
|
+
# Refresh columns after potential rename
|
|
186
|
+
cursor.execute("PRAGMA table_info(sessions)")
|
|
187
|
+
columns = {row[1] for row in cursor.fetchall()}
|
|
188
|
+
|
|
189
|
+
for col_name, col_type in migrations:
|
|
190
|
+
if col_name not in columns:
|
|
191
|
+
try:
|
|
192
|
+
cursor.execute(
|
|
193
|
+
f"ALTER TABLE sessions ADD COLUMN {col_name} {col_type}"
|
|
194
|
+
)
|
|
195
|
+
logger.info(f"Added column sessions.{col_name}")
|
|
196
|
+
except sqlite3.OperationalError as e:
|
|
197
|
+
# Column may already exist
|
|
198
|
+
logger.debug(f"Could not add {col_name}: {e}")
|
|
199
|
+
|
|
200
|
+
def create_tables(self) -> None:
|
|
201
|
+
"""
|
|
202
|
+
Create all required tables in SQLite database.
|
|
203
|
+
|
|
204
|
+
Tables created:
|
|
205
|
+
1. agent_events - Core event tracking
|
|
206
|
+
2. features - Work items (features, bugs, spikes, etc.)
|
|
207
|
+
3. sessions - Agent sessions with metrics
|
|
208
|
+
4. tracks - Multi-feature initiatives
|
|
209
|
+
5. agent_collaboration - Handoffs and parallel work
|
|
210
|
+
6. graph_edges - Flexible relationship tracking
|
|
211
|
+
7. event_log_archive - Historical event log
|
|
212
|
+
8. indexes - Performance optimization
|
|
213
|
+
"""
|
|
214
|
+
if not self.connection:
|
|
215
|
+
self.connect()
|
|
216
|
+
|
|
217
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
218
|
+
|
|
219
|
+
# Run migrations for existing tables before creating new ones
|
|
220
|
+
self._migrate_agent_events_table(cursor)
|
|
221
|
+
self._migrate_sessions_table(cursor)
|
|
222
|
+
|
|
223
|
+
# 1. AGENT_EVENTS TABLE - Core event tracking
|
|
224
|
+
cursor.execute("""
|
|
225
|
+
CREATE TABLE IF NOT EXISTS agent_events (
|
|
226
|
+
event_id TEXT PRIMARY KEY,
|
|
227
|
+
agent_id TEXT NOT NULL,
|
|
228
|
+
event_type TEXT NOT NULL CHECK(
|
|
229
|
+
event_type IN ('tool_call', 'tool_result', 'error', 'delegation',
|
|
230
|
+
'completion', 'start', 'end', 'check_point', 'task_delegation')
|
|
231
|
+
),
|
|
232
|
+
timestamp DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
233
|
+
tool_name TEXT,
|
|
234
|
+
input_summary TEXT,
|
|
235
|
+
output_summary TEXT,
|
|
236
|
+
context JSON,
|
|
237
|
+
session_id TEXT NOT NULL,
|
|
238
|
+
feature_id TEXT,
|
|
239
|
+
parent_agent_id TEXT,
|
|
240
|
+
parent_event_id TEXT,
|
|
241
|
+
subagent_type TEXT,
|
|
242
|
+
child_spike_count INTEGER DEFAULT 0,
|
|
243
|
+
cost_tokens INTEGER DEFAULT 0,
|
|
244
|
+
execution_duration_seconds REAL DEFAULT 0.0,
|
|
245
|
+
status TEXT DEFAULT 'recorded',
|
|
246
|
+
model TEXT,
|
|
247
|
+
claude_task_id TEXT,
|
|
248
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
249
|
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
250
|
+
FOREIGN KEY (session_id) REFERENCES sessions(session_id) ON DELETE CASCADE ON UPDATE CASCADE,
|
|
251
|
+
FOREIGN KEY (parent_event_id) REFERENCES agent_events(event_id) ON DELETE SET NULL ON UPDATE CASCADE,
|
|
252
|
+
FOREIGN KEY (feature_id) REFERENCES features(id) ON DELETE SET NULL ON UPDATE CASCADE
|
|
253
|
+
)
|
|
254
|
+
""")
|
|
255
|
+
|
|
256
|
+
# 2. FEATURES TABLE - Work items (features, bugs, spikes, chores, epics)
|
|
257
|
+
cursor.execute("""
|
|
258
|
+
CREATE TABLE IF NOT EXISTS features (
|
|
259
|
+
id TEXT PRIMARY KEY,
|
|
260
|
+
type TEXT NOT NULL CHECK(
|
|
261
|
+
type IN ('feature', 'bug', 'spike', 'chore', 'epic', 'task')
|
|
262
|
+
),
|
|
263
|
+
title TEXT NOT NULL,
|
|
264
|
+
description TEXT,
|
|
265
|
+
status TEXT NOT NULL DEFAULT 'todo' CHECK(
|
|
266
|
+
status IN ('todo', 'in-progress', 'blocked', 'done', 'active', 'ended', 'stale')
|
|
267
|
+
),
|
|
268
|
+
priority TEXT DEFAULT 'medium' CHECK(
|
|
269
|
+
priority IN ('low', 'medium', 'high', 'critical')
|
|
270
|
+
),
|
|
271
|
+
assigned_to TEXT,
|
|
272
|
+
track_id TEXT,
|
|
273
|
+
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
274
|
+
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
275
|
+
completed_at DATETIME,
|
|
276
|
+
steps_total INTEGER DEFAULT 0,
|
|
277
|
+
steps_completed INTEGER DEFAULT 0,
|
|
278
|
+
parent_feature_id TEXT,
|
|
279
|
+
tags JSON,
|
|
280
|
+
metadata JSON,
|
|
281
|
+
FOREIGN KEY (track_id) REFERENCES tracks(id),
|
|
282
|
+
FOREIGN KEY (parent_feature_id) REFERENCES features(id)
|
|
283
|
+
)
|
|
284
|
+
""")
|
|
285
|
+
|
|
286
|
+
# 3. SESSIONS TABLE - Agent sessions with metrics
|
|
287
|
+
cursor.execute("""
|
|
288
|
+
CREATE TABLE IF NOT EXISTS sessions (
|
|
289
|
+
session_id TEXT PRIMARY KEY,
|
|
290
|
+
agent_assigned TEXT NOT NULL,
|
|
291
|
+
parent_session_id TEXT,
|
|
292
|
+
parent_event_id TEXT,
|
|
293
|
+
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
294
|
+
completed_at DATETIME,
|
|
295
|
+
total_events INTEGER DEFAULT 0,
|
|
296
|
+
total_tokens_used INTEGER DEFAULT 0,
|
|
297
|
+
context_drift REAL DEFAULT 0.0,
|
|
298
|
+
status TEXT NOT NULL DEFAULT 'active' CHECK(
|
|
299
|
+
status IN ('active', 'completed', 'paused', 'failed')
|
|
300
|
+
),
|
|
301
|
+
transcript_id TEXT,
|
|
302
|
+
transcript_path TEXT,
|
|
303
|
+
transcript_synced DATETIME,
|
|
304
|
+
start_commit TEXT,
|
|
305
|
+
end_commit TEXT,
|
|
306
|
+
is_subagent BOOLEAN DEFAULT FALSE,
|
|
307
|
+
features_worked_on JSON,
|
|
308
|
+
metadata JSON,
|
|
309
|
+
last_user_query_at DATETIME,
|
|
310
|
+
last_user_query TEXT,
|
|
311
|
+
handoff_notes TEXT,
|
|
312
|
+
recommended_next TEXT,
|
|
313
|
+
blockers JSON,
|
|
314
|
+
recommended_context JSON,
|
|
315
|
+
continued_from TEXT,
|
|
316
|
+
cost_budget REAL,
|
|
317
|
+
cost_threshold_breached INTEGER DEFAULT 0,
|
|
318
|
+
predicted_cost REAL DEFAULT 0.0,
|
|
319
|
+
FOREIGN KEY (parent_session_id) REFERENCES sessions(session_id) ON DELETE SET NULL ON UPDATE CASCADE,
|
|
320
|
+
FOREIGN KEY (parent_event_id) REFERENCES agent_events(event_id) ON DELETE SET NULL ON UPDATE CASCADE,
|
|
321
|
+
FOREIGN KEY (continued_from) REFERENCES sessions(session_id) ON DELETE SET NULL ON UPDATE CASCADE
|
|
322
|
+
)
|
|
323
|
+
""")
|
|
324
|
+
|
|
325
|
+
# 4. TRACKS TABLE - Multi-feature initiatives
|
|
326
|
+
cursor.execute("""
|
|
327
|
+
CREATE TABLE IF NOT EXISTS tracks (
|
|
328
|
+
id TEXT PRIMARY KEY,
|
|
329
|
+
type TEXT DEFAULT 'track',
|
|
330
|
+
title TEXT NOT NULL,
|
|
331
|
+
description TEXT,
|
|
332
|
+
priority TEXT DEFAULT 'medium' CHECK(
|
|
333
|
+
priority IN ('low', 'medium', 'high', 'critical')
|
|
334
|
+
),
|
|
335
|
+
status TEXT NOT NULL DEFAULT 'todo' CHECK(
|
|
336
|
+
status IN ('todo', 'in-progress', 'blocked', 'done', 'active', 'ended', 'stale')
|
|
337
|
+
),
|
|
338
|
+
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
339
|
+
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
340
|
+
completed_at DATETIME,
|
|
341
|
+
features JSON,
|
|
342
|
+
metadata JSON
|
|
343
|
+
)
|
|
344
|
+
""")
|
|
345
|
+
|
|
346
|
+
# 5. AGENT_COLLABORATION TABLE - Handoffs and parallel work
|
|
347
|
+
cursor.execute("""
|
|
348
|
+
CREATE TABLE IF NOT EXISTS agent_collaboration (
|
|
349
|
+
handoff_id TEXT PRIMARY KEY,
|
|
350
|
+
from_agent TEXT NOT NULL,
|
|
351
|
+
to_agent TEXT NOT NULL,
|
|
352
|
+
timestamp DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
353
|
+
feature_id TEXT,
|
|
354
|
+
session_id TEXT,
|
|
355
|
+
handoff_type TEXT CHECK(
|
|
356
|
+
handoff_type IN ('delegation', 'parallel', 'sequential', 'fallback')
|
|
357
|
+
),
|
|
358
|
+
status TEXT DEFAULT 'pending' CHECK(
|
|
359
|
+
status IN ('pending', 'accepted', 'rejected', 'completed', 'failed')
|
|
360
|
+
),
|
|
361
|
+
reason TEXT,
|
|
362
|
+
context JSON,
|
|
363
|
+
result JSON,
|
|
364
|
+
FOREIGN KEY (feature_id) REFERENCES features(id),
|
|
365
|
+
FOREIGN KEY (session_id) REFERENCES sessions(session_id)
|
|
366
|
+
)
|
|
367
|
+
""")
|
|
368
|
+
|
|
369
|
+
# 6. GRAPH_EDGES TABLE - Flexible relationship tracking
|
|
370
|
+
cursor.execute("""
|
|
371
|
+
CREATE TABLE IF NOT EXISTS graph_edges (
|
|
372
|
+
edge_id TEXT PRIMARY KEY,
|
|
373
|
+
from_node_id TEXT NOT NULL,
|
|
374
|
+
from_node_type TEXT NOT NULL,
|
|
375
|
+
to_node_id TEXT NOT NULL,
|
|
376
|
+
to_node_type TEXT NOT NULL,
|
|
377
|
+
relationship_type TEXT NOT NULL,
|
|
378
|
+
weight REAL DEFAULT 1.0,
|
|
379
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
380
|
+
metadata JSON
|
|
381
|
+
)
|
|
382
|
+
""")
|
|
383
|
+
|
|
384
|
+
# 7. EVENT_LOG_ARCHIVE TABLE - Historical event queries
|
|
385
|
+
cursor.execute("""
|
|
386
|
+
CREATE TABLE IF NOT EXISTS event_log_archive (
|
|
387
|
+
archive_id TEXT PRIMARY KEY,
|
|
388
|
+
session_id TEXT NOT NULL,
|
|
389
|
+
agent_id TEXT NOT NULL,
|
|
390
|
+
event_date DATE NOT NULL,
|
|
391
|
+
event_count INTEGER DEFAULT 0,
|
|
392
|
+
total_tokens INTEGER DEFAULT 0,
|
|
393
|
+
summary TEXT,
|
|
394
|
+
archived_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
395
|
+
FOREIGN KEY (session_id) REFERENCES sessions(session_id)
|
|
396
|
+
)
|
|
397
|
+
""")
|
|
398
|
+
|
|
399
|
+
# 8. LIVE_EVENTS TABLE - Real-time event streaming buffer
|
|
400
|
+
# Events are inserted here for WebSocket broadcasting, then auto-cleaned after broadcast
|
|
401
|
+
cursor.execute("""
|
|
402
|
+
CREATE TABLE IF NOT EXISTS live_events (
|
|
403
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
404
|
+
event_type TEXT NOT NULL,
|
|
405
|
+
event_data TEXT NOT NULL,
|
|
406
|
+
parent_event_id TEXT,
|
|
407
|
+
session_id TEXT,
|
|
408
|
+
spawner_type TEXT,
|
|
409
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
410
|
+
broadcast_at TIMESTAMP
|
|
411
|
+
)
|
|
412
|
+
""")
|
|
413
|
+
|
|
414
|
+
# 9. TOOL_TRACES TABLE - Detailed tool execution tracing
|
|
415
|
+
cursor.execute("""
|
|
416
|
+
CREATE TABLE IF NOT EXISTS tool_traces (
|
|
417
|
+
tool_use_id TEXT PRIMARY KEY,
|
|
418
|
+
trace_id TEXT NOT NULL,
|
|
419
|
+
session_id TEXT NOT NULL,
|
|
420
|
+
tool_name TEXT NOT NULL,
|
|
421
|
+
tool_input JSON,
|
|
422
|
+
tool_output JSON,
|
|
423
|
+
start_time TIMESTAMP NOT NULL,
|
|
424
|
+
end_time TIMESTAMP,
|
|
425
|
+
duration_ms INTEGER,
|
|
426
|
+
status TEXT NOT NULL DEFAULT 'started' CHECK(
|
|
427
|
+
status IN ('started', 'completed', 'failed', 'timeout', 'cancelled')
|
|
428
|
+
),
|
|
429
|
+
error_message TEXT,
|
|
430
|
+
parent_tool_use_id TEXT,
|
|
431
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
432
|
+
FOREIGN KEY (session_id) REFERENCES sessions(session_id),
|
|
433
|
+
FOREIGN KEY (parent_tool_use_id) REFERENCES tool_traces(tool_use_id)
|
|
434
|
+
)
|
|
435
|
+
""")
|
|
436
|
+
|
|
437
|
+
# 10. HANDOFF_TRACKING TABLE - Phase 2 Feature 3: Track handoff effectiveness
|
|
438
|
+
cursor.execute("""
|
|
439
|
+
CREATE TABLE IF NOT EXISTS handoff_tracking (
|
|
440
|
+
handoff_id TEXT PRIMARY KEY,
|
|
441
|
+
from_session_id TEXT NOT NULL,
|
|
442
|
+
to_session_id TEXT,
|
|
443
|
+
items_in_context INTEGER DEFAULT 0,
|
|
444
|
+
items_accessed INTEGER DEFAULT 0,
|
|
445
|
+
time_to_resume_seconds INTEGER DEFAULT 0,
|
|
446
|
+
user_rating INTEGER CHECK(user_rating BETWEEN 1 AND 5),
|
|
447
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
448
|
+
resumed_at DATETIME,
|
|
449
|
+
FOREIGN KEY (from_session_id) REFERENCES sessions(session_id) ON DELETE CASCADE,
|
|
450
|
+
FOREIGN KEY (to_session_id) REFERENCES sessions(session_id) ON DELETE SET NULL
|
|
451
|
+
)
|
|
452
|
+
""")
|
|
453
|
+
|
|
454
|
+
# 11. COST_EVENTS TABLE - Phase 3.1: Real-time cost monitoring & alerts
|
|
455
|
+
cursor.execute("""
|
|
456
|
+
CREATE TABLE IF NOT EXISTS cost_events (
|
|
457
|
+
event_id TEXT PRIMARY KEY,
|
|
458
|
+
session_id TEXT NOT NULL,
|
|
459
|
+
timestamp DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
460
|
+
|
|
461
|
+
-- Token tracking
|
|
462
|
+
tool_name TEXT,
|
|
463
|
+
model TEXT,
|
|
464
|
+
input_tokens INTEGER DEFAULT 0,
|
|
465
|
+
output_tokens INTEGER DEFAULT 0,
|
|
466
|
+
total_tokens INTEGER DEFAULT 0,
|
|
467
|
+
cost_usd REAL DEFAULT 0.0,
|
|
468
|
+
|
|
469
|
+
-- Agent tracking
|
|
470
|
+
agent_id TEXT,
|
|
471
|
+
subagent_type TEXT,
|
|
472
|
+
|
|
473
|
+
-- Alert tracking
|
|
474
|
+
alert_type TEXT,
|
|
475
|
+
message TEXT,
|
|
476
|
+
current_cost_usd REAL,
|
|
477
|
+
budget_usd REAL,
|
|
478
|
+
predicted_cost_usd REAL,
|
|
479
|
+
severity TEXT,
|
|
480
|
+
acknowledged BOOLEAN DEFAULT 0,
|
|
481
|
+
|
|
482
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
483
|
+
FOREIGN KEY (session_id) REFERENCES sessions(session_id) ON DELETE CASCADE
|
|
484
|
+
)
|
|
485
|
+
""")
|
|
486
|
+
|
|
487
|
+
# 9. Create indexes for performance
|
|
488
|
+
self._create_indexes(cursor)
|
|
489
|
+
|
|
490
|
+
if self.connection:
|
|
491
|
+
self.connection.commit()
|
|
492
|
+
logger.info(f"SQLite schema created at {self.db_path}")
|
|
493
|
+
|
|
494
|
+
def _create_indexes(self, cursor: sqlite3.Cursor) -> None:
|
|
495
|
+
"""
|
|
496
|
+
Create indexes on frequently queried fields.
|
|
497
|
+
|
|
498
|
+
OPTIMIZATION STRATEGY:
|
|
499
|
+
- Composite indexes for most common query patterns (session+timestamp, agent+timestamp)
|
|
500
|
+
- Single-column indexes for individual filters and sorts
|
|
501
|
+
- DESC indexes for reverse-order queries (e.g., activity feed, timelines)
|
|
502
|
+
- Covering indexes where beneficial to reduce table lookups
|
|
503
|
+
|
|
504
|
+
Args:
|
|
505
|
+
cursor: SQLite cursor for executing queries
|
|
506
|
+
"""
|
|
507
|
+
indexes = [
|
|
508
|
+
# agent_events indexes - optimized for common query patterns
|
|
509
|
+
# Pattern: WHERE session_id ORDER BY timestamp DESC (activity feed)
|
|
510
|
+
"CREATE INDEX IF NOT EXISTS idx_agent_events_session_ts_desc ON agent_events(session_id, timestamp DESC)",
|
|
511
|
+
# Pattern: WHERE agent_id ORDER BY timestamp DESC (agent timeline)
|
|
512
|
+
"CREATE INDEX IF NOT EXISTS idx_agent_events_agent_ts_desc ON agent_events(agent_id, timestamp DESC)",
|
|
513
|
+
# Pattern: GROUP BY agent_id (agent statistics)
|
|
514
|
+
"CREATE INDEX IF NOT EXISTS idx_agent_events_agent ON agent_events(agent_id)",
|
|
515
|
+
# Pattern: WHERE event_type = 'error' (error tracking)
|
|
516
|
+
"CREATE INDEX IF NOT EXISTS idx_agent_events_type ON agent_events(event_type)",
|
|
517
|
+
# Pattern: WHERE parent_event_id (hierarchical queries)
|
|
518
|
+
"CREATE INDEX IF NOT EXISTS idx_agent_events_parent_event ON agent_events(parent_event_id)",
|
|
519
|
+
# Pattern: WHERE event_type = 'task_delegation' (task delegation queries)
|
|
520
|
+
"CREATE INDEX IF NOT EXISTS idx_agent_events_task_delegation ON agent_events(event_type, subagent_type, timestamp DESC)",
|
|
521
|
+
# Pattern: Tool usage summary GROUP BY tool_name WHERE session_id
|
|
522
|
+
"CREATE INDEX IF NOT EXISTS idx_agent_events_session_tool ON agent_events(session_id, tool_name)",
|
|
523
|
+
# Pattern: Timestamp range queries
|
|
524
|
+
"CREATE INDEX IF NOT EXISTS idx_agent_events_timestamp ON agent_events(timestamp DESC)",
|
|
525
|
+
# Pattern: WHERE claude_task_id (task attribution queries)
|
|
526
|
+
"CREATE INDEX IF NOT EXISTS idx_agent_events_claude_task_id ON agent_events(claude_task_id)",
|
|
527
|
+
# features indexes - optimized for kanban/filtering
|
|
528
|
+
# Pattern: WHERE status ORDER BY priority DESC (feature list views)
|
|
529
|
+
"CREATE INDEX IF NOT EXISTS idx_features_status_priority ON features(status, priority DESC, created_at DESC)",
|
|
530
|
+
# Pattern: WHERE track_id ORDER BY priority (track features)
|
|
531
|
+
"CREATE INDEX IF NOT EXISTS idx_features_track_priority ON features(track_id, priority DESC, created_at DESC)",
|
|
532
|
+
# Pattern: WHERE assigned_to (agent workload)
|
|
533
|
+
"CREATE INDEX IF NOT EXISTS idx_features_assigned ON features(assigned_to)",
|
|
534
|
+
# Pattern: WHERE parent_feature_id (feature tree)
|
|
535
|
+
"CREATE INDEX IF NOT EXISTS idx_features_parent ON features(parent_feature_id)",
|
|
536
|
+
# Pattern: WHERE type (filtering by type)
|
|
537
|
+
"CREATE INDEX IF NOT EXISTS idx_features_type ON features(type)",
|
|
538
|
+
# Pattern: Created timestamp range queries
|
|
539
|
+
"CREATE INDEX IF NOT EXISTS idx_features_created ON features(created_at DESC)",
|
|
540
|
+
# sessions indexes - optimized for session analysis
|
|
541
|
+
# Pattern: WHERE agent_assigned ORDER BY created_at DESC
|
|
542
|
+
"CREATE INDEX IF NOT EXISTS idx_sessions_agent_created ON sessions(agent_assigned, created_at DESC)",
|
|
543
|
+
# Pattern: WHERE status (active sessions query)
|
|
544
|
+
"CREATE INDEX IF NOT EXISTS idx_sessions_status_created ON sessions(status, created_at DESC)",
|
|
545
|
+
# Pattern: WHERE parent_session_id (subagent queries)
|
|
546
|
+
"CREATE INDEX IF NOT EXISTS idx_sessions_parent ON sessions(parent_session_id, created_at DESC)",
|
|
547
|
+
# Pattern: Timestamp ordering for metrics
|
|
548
|
+
"CREATE INDEX IF NOT EXISTS idx_sessions_created ON sessions(created_at DESC)",
|
|
549
|
+
# tracks indexes - optimized for track queries
|
|
550
|
+
# Pattern: WHERE status GROUP BY track_id
|
|
551
|
+
"CREATE INDEX IF NOT EXISTS idx_tracks_status_created ON tracks(status, created_at DESC)",
|
|
552
|
+
# Pattern: Ordering by priority
|
|
553
|
+
"CREATE INDEX IF NOT EXISTS idx_tracks_priority ON tracks(priority DESC)",
|
|
554
|
+
# collaboration indexes - optimized for handoff queries
|
|
555
|
+
# Pattern: WHERE session_id, WHERE from_agent, WHERE to_agent
|
|
556
|
+
"CREATE INDEX IF NOT EXISTS idx_collaboration_session ON agent_collaboration(session_id, timestamp DESC)",
|
|
557
|
+
"CREATE INDEX IF NOT EXISTS idx_collaboration_from_agent ON agent_collaboration(from_agent)",
|
|
558
|
+
"CREATE INDEX IF NOT EXISTS idx_collaboration_to_agent ON agent_collaboration(to_agent)",
|
|
559
|
+
# Pattern: GROUP BY from_agent, to_agent
|
|
560
|
+
"CREATE INDEX IF NOT EXISTS idx_collaboration_agents ON agent_collaboration(from_agent, to_agent)",
|
|
561
|
+
"CREATE INDEX IF NOT EXISTS idx_collaboration_feature ON agent_collaboration(feature_id)",
|
|
562
|
+
"CREATE INDEX IF NOT EXISTS idx_collaboration_handoff_type ON agent_collaboration(handoff_type, timestamp DESC)",
|
|
563
|
+
# graph_edges indexes - optimized for graph traversal
|
|
564
|
+
"CREATE INDEX IF NOT EXISTS idx_edges_from ON graph_edges(from_node_id)",
|
|
565
|
+
"CREATE INDEX IF NOT EXISTS idx_edges_to ON graph_edges(to_node_id)",
|
|
566
|
+
"CREATE INDEX IF NOT EXISTS idx_edges_type ON graph_edges(relationship_type)",
|
|
567
|
+
# tool_traces indexes - optimized for tool performance analysis
|
|
568
|
+
"CREATE INDEX IF NOT EXISTS idx_tool_traces_trace_id ON tool_traces(trace_id, start_time DESC)",
|
|
569
|
+
"CREATE INDEX IF NOT EXISTS idx_tool_traces_session ON tool_traces(session_id, start_time DESC)",
|
|
570
|
+
"CREATE INDEX IF NOT EXISTS idx_tool_traces_tool_name ON tool_traces(tool_name, status)",
|
|
571
|
+
"CREATE INDEX IF NOT EXISTS idx_tool_traces_status ON tool_traces(status, start_time DESC)",
|
|
572
|
+
"CREATE INDEX IF NOT EXISTS idx_tool_traces_start_time ON tool_traces(start_time DESC)",
|
|
573
|
+
# live_events indexes - optimized for real-time WebSocket streaming
|
|
574
|
+
"CREATE INDEX IF NOT EXISTS idx_live_events_pending ON live_events(broadcast_at) WHERE broadcast_at IS NULL",
|
|
575
|
+
"CREATE INDEX IF NOT EXISTS idx_live_events_created ON live_events(created_at DESC)",
|
|
576
|
+
# handoff_tracking indexes - optimized for handoff effectiveness queries
|
|
577
|
+
"CREATE INDEX IF NOT EXISTS idx_handoff_from_session ON handoff_tracking(from_session_id, created_at DESC)",
|
|
578
|
+
"CREATE INDEX IF NOT EXISTS idx_handoff_to_session ON handoff_tracking(to_session_id, resumed_at DESC)",
|
|
579
|
+
"CREATE INDEX IF NOT EXISTS idx_handoff_rating ON handoff_tracking(user_rating, created_at DESC)",
|
|
580
|
+
# cost_events indexes - optimized for real-time cost monitoring & alerts
|
|
581
|
+
# Pattern: WHERE session_id ORDER BY timestamp DESC (cost timeline)
|
|
582
|
+
"CREATE INDEX IF NOT EXISTS idx_cost_events_session_ts ON cost_events(session_id, timestamp DESC)",
|
|
583
|
+
# Pattern: WHERE alert_type (alert filtering)
|
|
584
|
+
"CREATE INDEX IF NOT EXISTS idx_cost_events_alert_type ON cost_events(alert_type, timestamp DESC)",
|
|
585
|
+
# Pattern: WHERE model GROUP BY (cost breakdown)
|
|
586
|
+
"CREATE INDEX IF NOT EXISTS idx_cost_events_model ON cost_events(model, session_id)",
|
|
587
|
+
# Pattern: WHERE tool_name GROUP BY (tool cost analysis)
|
|
588
|
+
"CREATE INDEX IF NOT EXISTS idx_cost_events_tool ON cost_events(tool_name, session_id)",
|
|
589
|
+
# Pattern: WHERE severity (alert severity filtering)
|
|
590
|
+
"CREATE INDEX IF NOT EXISTS idx_cost_events_severity ON cost_events(severity, timestamp DESC)",
|
|
591
|
+
# Pattern: Timestamp range queries for predictions
|
|
592
|
+
"CREATE INDEX IF NOT EXISTS idx_cost_events_timestamp ON cost_events(timestamp DESC)",
|
|
593
|
+
]
|
|
594
|
+
|
|
595
|
+
for index_sql in indexes:
|
|
596
|
+
try:
|
|
597
|
+
cursor.execute(index_sql)
|
|
598
|
+
except sqlite3.OperationalError as e:
|
|
599
|
+
logger.warning(f"Index creation warning: {e}")
|
|
600
|
+
|
|
601
|
+
def insert_event(
|
|
602
|
+
self,
|
|
603
|
+
event_id: str,
|
|
604
|
+
agent_id: str,
|
|
605
|
+
event_type: str,
|
|
606
|
+
session_id: str,
|
|
607
|
+
tool_name: str | None = None,
|
|
608
|
+
input_summary: str | None = None,
|
|
609
|
+
output_summary: str | None = None,
|
|
610
|
+
context: dict[str, Any] | None = None,
|
|
611
|
+
parent_agent_id: str | None = None,
|
|
612
|
+
parent_event_id: str | None = None,
|
|
613
|
+
cost_tokens: int = 0,
|
|
614
|
+
execution_duration_seconds: float = 0.0,
|
|
615
|
+
subagent_type: str | None = None,
|
|
616
|
+
model: str | None = None,
|
|
617
|
+
feature_id: str | None = None,
|
|
618
|
+
claude_task_id: str | None = None,
|
|
619
|
+
) -> bool:
|
|
620
|
+
"""
|
|
621
|
+
Insert an agent event into the database.
|
|
622
|
+
|
|
623
|
+
Gracefully handles FOREIGN KEY constraint failures by retrying without
|
|
624
|
+
the parent_event_id reference. This allows events to be recorded even if
|
|
625
|
+
the parent event doesn't exist yet (useful for cross-process or distributed
|
|
626
|
+
event tracking).
|
|
627
|
+
|
|
628
|
+
Args:
|
|
629
|
+
event_id: Unique event identifier
|
|
630
|
+
agent_id: Agent that generated this event
|
|
631
|
+
event_type: Type of event (tool_call, tool_result, error, etc.)
|
|
632
|
+
session_id: Session this event belongs to
|
|
633
|
+
tool_name: Tool that was called (optional)
|
|
634
|
+
input_summary: Summary of tool input (optional)
|
|
635
|
+
output_summary: Summary of tool output (optional)
|
|
636
|
+
context: Additional metadata as JSON (optional)
|
|
637
|
+
parent_agent_id: Parent agent if delegated (optional)
|
|
638
|
+
parent_event_id: Parent event if nested (optional)
|
|
639
|
+
cost_tokens: Token usage estimate (optional)
|
|
640
|
+
execution_duration_seconds: Execution time in seconds (optional)
|
|
641
|
+
subagent_type: Subagent type for Task delegations (optional)
|
|
642
|
+
model: Claude model name (e.g., claude-haiku, claude-opus, claude-sonnet) (optional)
|
|
643
|
+
claude_task_id: Claude Code's internal task ID for tool attribution (optional)
|
|
644
|
+
|
|
645
|
+
Returns:
|
|
646
|
+
True if insert successful, False otherwise
|
|
647
|
+
"""
|
|
648
|
+
if not self.connection:
|
|
649
|
+
self.connect()
|
|
650
|
+
|
|
651
|
+
try:
|
|
652
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
653
|
+
# Temporarily disable foreign key constraints to allow inserting
|
|
654
|
+
# events even if parent_event_id or session_id don't exist yet
|
|
655
|
+
# (useful for cross-process event tracking where sessions are created asynchronously)
|
|
656
|
+
cursor.execute("PRAGMA foreign_keys=OFF")
|
|
657
|
+
cursor.execute(
|
|
658
|
+
"""
|
|
659
|
+
INSERT INTO agent_events
|
|
660
|
+
(event_id, agent_id, event_type, session_id, feature_id, tool_name,
|
|
661
|
+
input_summary, output_summary, context, parent_agent_id,
|
|
662
|
+
parent_event_id, cost_tokens, execution_duration_seconds, subagent_type, model, claude_task_id)
|
|
663
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
664
|
+
""",
|
|
665
|
+
(
|
|
666
|
+
event_id,
|
|
667
|
+
agent_id,
|
|
668
|
+
event_type,
|
|
669
|
+
session_id,
|
|
670
|
+
feature_id,
|
|
671
|
+
tool_name,
|
|
672
|
+
input_summary,
|
|
673
|
+
output_summary,
|
|
674
|
+
json.dumps(context) if context else None,
|
|
675
|
+
parent_agent_id,
|
|
676
|
+
parent_event_id,
|
|
677
|
+
cost_tokens,
|
|
678
|
+
execution_duration_seconds,
|
|
679
|
+
subagent_type,
|
|
680
|
+
model,
|
|
681
|
+
claude_task_id,
|
|
682
|
+
),
|
|
683
|
+
)
|
|
684
|
+
# Re-enable foreign key constraints
|
|
685
|
+
cursor.execute("PRAGMA foreign_keys=ON")
|
|
686
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
687
|
+
return True
|
|
688
|
+
except sqlite3.IntegrityError as e:
|
|
689
|
+
# Other integrity errors (unique constraint, etc.)
|
|
690
|
+
logger.error(f"Error inserting event: {e}")
|
|
691
|
+
return False
|
|
692
|
+
except sqlite3.Error as e:
|
|
693
|
+
logger.error(f"Error inserting event: {e}")
|
|
694
|
+
return False
|
|
695
|
+
|
|
696
|
+
def insert_feature(
|
|
697
|
+
self,
|
|
698
|
+
feature_id: str,
|
|
699
|
+
feature_type: str,
|
|
700
|
+
title: str,
|
|
701
|
+
status: str = "todo",
|
|
702
|
+
priority: str = "medium",
|
|
703
|
+
assigned_to: str | None = None,
|
|
704
|
+
track_id: str | None = None,
|
|
705
|
+
description: str | None = None,
|
|
706
|
+
steps_total: int = 0,
|
|
707
|
+
tags: list | None = None,
|
|
708
|
+
) -> bool:
|
|
709
|
+
"""
|
|
710
|
+
Insert a feature/bug/spike work item.
|
|
711
|
+
|
|
712
|
+
Args:
|
|
713
|
+
feature_id: Unique feature identifier
|
|
714
|
+
feature_type: Type (feature, bug, spike, chore, epic)
|
|
715
|
+
title: Feature title
|
|
716
|
+
status: Current status (todo, in_progress, done, etc.)
|
|
717
|
+
priority: Priority level (low, medium, high, critical)
|
|
718
|
+
assigned_to: Assigned agent (optional)
|
|
719
|
+
track_id: Parent track ID (optional)
|
|
720
|
+
description: Feature description (optional)
|
|
721
|
+
steps_total: Total implementation steps
|
|
722
|
+
tags: Tags for categorization (optional)
|
|
723
|
+
|
|
724
|
+
Returns:
|
|
725
|
+
True if insert successful, False otherwise
|
|
726
|
+
"""
|
|
727
|
+
if not self.connection:
|
|
728
|
+
self.connect()
|
|
729
|
+
|
|
730
|
+
try:
|
|
731
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
732
|
+
cursor.execute(
|
|
733
|
+
"""
|
|
734
|
+
INSERT INTO features
|
|
735
|
+
(id, type, title, status, priority, assigned_to, track_id,
|
|
736
|
+
description, steps_total, tags)
|
|
737
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
738
|
+
""",
|
|
739
|
+
(
|
|
740
|
+
feature_id,
|
|
741
|
+
feature_type,
|
|
742
|
+
title,
|
|
743
|
+
status,
|
|
744
|
+
priority,
|
|
745
|
+
assigned_to,
|
|
746
|
+
track_id,
|
|
747
|
+
description,
|
|
748
|
+
steps_total,
|
|
749
|
+
json.dumps(tags) if tags else None,
|
|
750
|
+
),
|
|
751
|
+
)
|
|
752
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
753
|
+
return True
|
|
754
|
+
except sqlite3.Error as e:
|
|
755
|
+
logger.error(f"Error inserting feature: {e}")
|
|
756
|
+
return False
|
|
757
|
+
|
|
758
|
+
def insert_session(
|
|
759
|
+
self,
|
|
760
|
+
session_id: str,
|
|
761
|
+
agent_assigned: str,
|
|
762
|
+
parent_session_id: str | None = None,
|
|
763
|
+
parent_event_id: str | None = None,
|
|
764
|
+
is_subagent: bool = False,
|
|
765
|
+
transcript_id: str | None = None,
|
|
766
|
+
transcript_path: str | None = None,
|
|
767
|
+
) -> bool:
|
|
768
|
+
"""
|
|
769
|
+
Insert a new session record.
|
|
770
|
+
|
|
771
|
+
Gracefully handles FOREIGN KEY constraint failures by retrying without
|
|
772
|
+
the parent_event_id or parent_session_id reference. This allows sessions
|
|
773
|
+
to be created even if the parent doesn't exist yet.
|
|
774
|
+
|
|
775
|
+
Args:
|
|
776
|
+
session_id: Unique session identifier
|
|
777
|
+
agent_assigned: Primary agent for this session
|
|
778
|
+
parent_session_id: Parent session if subagent (optional)
|
|
779
|
+
parent_event_id: Event that spawned this session (optional)
|
|
780
|
+
is_subagent: Whether this is a subagent session
|
|
781
|
+
transcript_id: ID of Claude transcript (optional)
|
|
782
|
+
transcript_path: Path to transcript file (optional)
|
|
783
|
+
|
|
784
|
+
Returns:
|
|
785
|
+
True if insert successful, False otherwise
|
|
786
|
+
"""
|
|
787
|
+
if not self.connection:
|
|
788
|
+
self.connect()
|
|
789
|
+
|
|
790
|
+
try:
|
|
791
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
792
|
+
cursor.execute(
|
|
793
|
+
"""
|
|
794
|
+
INSERT OR IGNORE INTO sessions
|
|
795
|
+
(session_id, agent_assigned, parent_session_id, parent_event_id,
|
|
796
|
+
is_subagent, transcript_id, transcript_path)
|
|
797
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
798
|
+
""",
|
|
799
|
+
(
|
|
800
|
+
session_id,
|
|
801
|
+
agent_assigned,
|
|
802
|
+
parent_session_id,
|
|
803
|
+
parent_event_id,
|
|
804
|
+
is_subagent,
|
|
805
|
+
transcript_id,
|
|
806
|
+
transcript_path,
|
|
807
|
+
),
|
|
808
|
+
)
|
|
809
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
810
|
+
return True
|
|
811
|
+
except sqlite3.IntegrityError as e:
|
|
812
|
+
# FOREIGN KEY constraint failed - parent doesn't exist
|
|
813
|
+
if "FOREIGN KEY constraint failed" in str(e) and (
|
|
814
|
+
parent_event_id or parent_session_id
|
|
815
|
+
):
|
|
816
|
+
logger.warning(
|
|
817
|
+
"Parent session/event not found, creating session without parent link"
|
|
818
|
+
)
|
|
819
|
+
# Retry without parent references to enable graceful degradation
|
|
820
|
+
try:
|
|
821
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
822
|
+
cursor.execute(
|
|
823
|
+
"""
|
|
824
|
+
INSERT OR IGNORE INTO sessions
|
|
825
|
+
(session_id, agent_assigned, parent_session_id, parent_event_id,
|
|
826
|
+
is_subagent, transcript_id, transcript_path)
|
|
827
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
828
|
+
""",
|
|
829
|
+
(
|
|
830
|
+
session_id,
|
|
831
|
+
agent_assigned,
|
|
832
|
+
None, # Drop parent_session_id
|
|
833
|
+
None, # Drop parent_event_id
|
|
834
|
+
is_subagent,
|
|
835
|
+
transcript_id,
|
|
836
|
+
transcript_path,
|
|
837
|
+
),
|
|
838
|
+
)
|
|
839
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
840
|
+
return True
|
|
841
|
+
except sqlite3.Error as retry_error:
|
|
842
|
+
logger.error(f"Error inserting session after retry: {retry_error}")
|
|
843
|
+
return False
|
|
844
|
+
else:
|
|
845
|
+
logger.error(f"Error inserting session: {e}")
|
|
846
|
+
return False
|
|
847
|
+
except sqlite3.Error as e:
|
|
848
|
+
logger.error(f"Error inserting session: {e}")
|
|
849
|
+
return False
|
|
850
|
+
|
|
851
|
+
def update_feature_status(
|
|
852
|
+
self,
|
|
853
|
+
feature_id: str,
|
|
854
|
+
status: str,
|
|
855
|
+
steps_completed: int | None = None,
|
|
856
|
+
) -> bool:
|
|
857
|
+
"""
|
|
858
|
+
Update feature status and completion progress.
|
|
859
|
+
|
|
860
|
+
Args:
|
|
861
|
+
feature_id: Feature to update
|
|
862
|
+
status: New status (todo, in_progress, done, etc.)
|
|
863
|
+
steps_completed: Number of steps completed (optional)
|
|
864
|
+
|
|
865
|
+
Returns:
|
|
866
|
+
True if update successful, False otherwise
|
|
867
|
+
"""
|
|
868
|
+
if not self.connection:
|
|
869
|
+
self.connect()
|
|
870
|
+
|
|
871
|
+
try:
|
|
872
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
873
|
+
if steps_completed is not None:
|
|
874
|
+
cursor.execute(
|
|
875
|
+
"""
|
|
876
|
+
UPDATE features
|
|
877
|
+
SET status = ?, steps_completed = ?, updated_at = CURRENT_TIMESTAMP
|
|
878
|
+
WHERE id = ?
|
|
879
|
+
""",
|
|
880
|
+
(status, steps_completed, feature_id),
|
|
881
|
+
)
|
|
882
|
+
else:
|
|
883
|
+
cursor.execute(
|
|
884
|
+
"""
|
|
885
|
+
UPDATE features
|
|
886
|
+
SET status = ?, updated_at = CURRENT_TIMESTAMP
|
|
887
|
+
WHERE id = ?
|
|
888
|
+
""",
|
|
889
|
+
(status, feature_id),
|
|
890
|
+
)
|
|
891
|
+
|
|
892
|
+
# Auto-set completed_at if status is done
|
|
893
|
+
if status == "done":
|
|
894
|
+
cursor.execute(
|
|
895
|
+
"""
|
|
896
|
+
UPDATE features
|
|
897
|
+
SET completed_at = CURRENT_TIMESTAMP
|
|
898
|
+
WHERE id = ?
|
|
899
|
+
""",
|
|
900
|
+
(feature_id,),
|
|
901
|
+
)
|
|
902
|
+
|
|
903
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
904
|
+
return True
|
|
905
|
+
except sqlite3.Error as e:
|
|
906
|
+
logger.error(f"Error updating feature: {e}")
|
|
907
|
+
return False
|
|
908
|
+
|
|
909
|
+
def get_session_events(self, session_id: str) -> list[dict[str, Any]]:
|
|
910
|
+
"""
|
|
911
|
+
Get all events for a session.
|
|
912
|
+
|
|
913
|
+
Args:
|
|
914
|
+
session_id: Session to query
|
|
915
|
+
|
|
916
|
+
Returns:
|
|
917
|
+
List of event dictionaries
|
|
918
|
+
"""
|
|
919
|
+
if not self.connection:
|
|
920
|
+
self.connect()
|
|
921
|
+
|
|
922
|
+
try:
|
|
923
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
924
|
+
cursor.execute(
|
|
925
|
+
"""
|
|
926
|
+
SELECT * FROM agent_events
|
|
927
|
+
WHERE session_id = ?
|
|
928
|
+
ORDER BY timestamp ASC
|
|
929
|
+
""",
|
|
930
|
+
(session_id,),
|
|
931
|
+
)
|
|
932
|
+
|
|
933
|
+
rows = cursor.fetchall()
|
|
934
|
+
return [dict(row) for row in rows]
|
|
935
|
+
except sqlite3.Error as e:
|
|
936
|
+
logger.error(f"Error querying events: {e}")
|
|
937
|
+
return []
|
|
938
|
+
|
|
939
|
+
def get_feature_by_id(self, feature_id: str) -> dict[str, Any] | None:
|
|
940
|
+
"""
|
|
941
|
+
Get a feature by ID.
|
|
942
|
+
|
|
943
|
+
Args:
|
|
944
|
+
feature_id: Feature ID to retrieve
|
|
945
|
+
|
|
946
|
+
Returns:
|
|
947
|
+
Feature dictionary or None if not found
|
|
948
|
+
"""
|
|
949
|
+
if not self.connection:
|
|
950
|
+
self.connect()
|
|
951
|
+
|
|
952
|
+
try:
|
|
953
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
954
|
+
cursor.execute(
|
|
955
|
+
"""
|
|
956
|
+
SELECT * FROM features WHERE id = ?
|
|
957
|
+
""",
|
|
958
|
+
(feature_id,),
|
|
959
|
+
)
|
|
960
|
+
|
|
961
|
+
row = cursor.fetchone()
|
|
962
|
+
return dict(row) if row else None
|
|
963
|
+
except sqlite3.Error as e:
|
|
964
|
+
logger.error(f"Error fetching feature: {e}")
|
|
965
|
+
return None
|
|
966
|
+
|
|
967
|
+
def get_features_by_status(self, status: str) -> list[dict[str, Any]]:
|
|
968
|
+
"""
|
|
969
|
+
Get all features with a specific status.
|
|
970
|
+
|
|
971
|
+
Args:
|
|
972
|
+
status: Status to filter by
|
|
973
|
+
|
|
974
|
+
Returns:
|
|
975
|
+
List of feature dictionaries
|
|
976
|
+
"""
|
|
977
|
+
if not self.connection:
|
|
978
|
+
self.connect()
|
|
979
|
+
|
|
980
|
+
try:
|
|
981
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
982
|
+
cursor.execute(
|
|
983
|
+
"""
|
|
984
|
+
SELECT * FROM features
|
|
985
|
+
WHERE status = ?
|
|
986
|
+
ORDER BY priority DESC, created_at DESC
|
|
987
|
+
""",
|
|
988
|
+
(status,),
|
|
989
|
+
)
|
|
990
|
+
|
|
991
|
+
rows = cursor.fetchall()
|
|
992
|
+
return [dict(row) for row in rows]
|
|
993
|
+
except sqlite3.Error as e:
|
|
994
|
+
logger.error(f"Error querying features: {e}")
|
|
995
|
+
return []
|
|
996
|
+
|
|
997
|
+
def _ensure_session_exists(
|
|
998
|
+
self, session_id: str, agent_id: str | None = None
|
|
999
|
+
) -> bool:
|
|
1000
|
+
"""
|
|
1001
|
+
Ensure a session record exists in the database.
|
|
1002
|
+
|
|
1003
|
+
Creates a placeholder session if it doesn't exist. Useful for
|
|
1004
|
+
handling foreign key constraints when recording delegations
|
|
1005
|
+
before the session is explicitly created.
|
|
1006
|
+
|
|
1007
|
+
Args:
|
|
1008
|
+
session_id: Session ID to ensure exists
|
|
1009
|
+
agent_id: Agent assigned to session (optional, defaults to 'system')
|
|
1010
|
+
|
|
1011
|
+
Returns:
|
|
1012
|
+
True if session exists or was created, False on error
|
|
1013
|
+
"""
|
|
1014
|
+
if not self.connection:
|
|
1015
|
+
self.connect()
|
|
1016
|
+
|
|
1017
|
+
try:
|
|
1018
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1019
|
+
|
|
1020
|
+
# Check if session already exists
|
|
1021
|
+
cursor.execute("SELECT 1 FROM sessions WHERE session_id = ?", (session_id,))
|
|
1022
|
+
if cursor.fetchone():
|
|
1023
|
+
return True
|
|
1024
|
+
|
|
1025
|
+
# Session doesn't exist, create placeholder
|
|
1026
|
+
cursor.execute(
|
|
1027
|
+
"""
|
|
1028
|
+
INSERT INTO sessions
|
|
1029
|
+
(session_id, agent_assigned, status)
|
|
1030
|
+
VALUES (?, ?, 'active')
|
|
1031
|
+
""",
|
|
1032
|
+
(session_id, agent_id or "system"),
|
|
1033
|
+
)
|
|
1034
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
1035
|
+
return True
|
|
1036
|
+
|
|
1037
|
+
except sqlite3.Error as e:
|
|
1038
|
+
# Session might exist but check failed, continue anyway
|
|
1039
|
+
logger.debug(f"Session creation warning: {e}")
|
|
1040
|
+
return False
|
|
1041
|
+
|
|
1042
|
+
def record_collaboration(
|
|
1043
|
+
self,
|
|
1044
|
+
handoff_id: str,
|
|
1045
|
+
from_agent: str,
|
|
1046
|
+
to_agent: str,
|
|
1047
|
+
session_id: str,
|
|
1048
|
+
feature_id: str | None = None,
|
|
1049
|
+
handoff_type: str = "delegation",
|
|
1050
|
+
reason: str | None = None,
|
|
1051
|
+
context: dict[str, Any] | None = None,
|
|
1052
|
+
) -> bool:
|
|
1053
|
+
"""
|
|
1054
|
+
Record an agent handoff or collaboration event.
|
|
1055
|
+
|
|
1056
|
+
Args:
|
|
1057
|
+
handoff_id: Unique handoff identifier
|
|
1058
|
+
from_agent: Agent handing off work
|
|
1059
|
+
to_agent: Agent receiving work
|
|
1060
|
+
session_id: Session this handoff occurs in
|
|
1061
|
+
feature_id: Feature being handed off (optional)
|
|
1062
|
+
handoff_type: Type of handoff (delegation, parallel, sequential, fallback)
|
|
1063
|
+
reason: Reason for handoff (optional)
|
|
1064
|
+
context: Additional context (optional)
|
|
1065
|
+
|
|
1066
|
+
Returns:
|
|
1067
|
+
True if record successful, False otherwise
|
|
1068
|
+
"""
|
|
1069
|
+
if not self.connection:
|
|
1070
|
+
self.connect()
|
|
1071
|
+
|
|
1072
|
+
try:
|
|
1073
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1074
|
+
cursor.execute(
|
|
1075
|
+
"""
|
|
1076
|
+
INSERT INTO agent_collaboration
|
|
1077
|
+
(handoff_id, from_agent, to_agent, session_id, feature_id,
|
|
1078
|
+
handoff_type, reason, context)
|
|
1079
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
1080
|
+
""",
|
|
1081
|
+
(
|
|
1082
|
+
handoff_id,
|
|
1083
|
+
from_agent,
|
|
1084
|
+
to_agent,
|
|
1085
|
+
session_id,
|
|
1086
|
+
feature_id,
|
|
1087
|
+
handoff_type,
|
|
1088
|
+
reason,
|
|
1089
|
+
json.dumps(context) if context else None,
|
|
1090
|
+
),
|
|
1091
|
+
)
|
|
1092
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
1093
|
+
return True
|
|
1094
|
+
except sqlite3.Error as e:
|
|
1095
|
+
logger.error(f"Error recording collaboration: {e}")
|
|
1096
|
+
return False
|
|
1097
|
+
|
|
1098
|
+
def record_delegation_event(
|
|
1099
|
+
self,
|
|
1100
|
+
from_agent: str,
|
|
1101
|
+
to_agent: str,
|
|
1102
|
+
task_description: str,
|
|
1103
|
+
session_id: str | None = None,
|
|
1104
|
+
feature_id: str | None = None,
|
|
1105
|
+
context: dict[str, Any] | None = None,
|
|
1106
|
+
) -> str | None:
|
|
1107
|
+
"""
|
|
1108
|
+
Record a delegation event from one agent to another.
|
|
1109
|
+
|
|
1110
|
+
This is a convenience method that wraps record_collaboration
|
|
1111
|
+
with sensible defaults for Task() delegation tracking.
|
|
1112
|
+
|
|
1113
|
+
Handles foreign key constraints by creating placeholder session
|
|
1114
|
+
if it doesn't exist.
|
|
1115
|
+
|
|
1116
|
+
Args:
|
|
1117
|
+
from_agent: Agent delegating work
|
|
1118
|
+
to_agent: Agent receiving work
|
|
1119
|
+
task_description: Description of the delegated task
|
|
1120
|
+
session_id: Session this delegation occurs in (optional, auto-creates if missing)
|
|
1121
|
+
feature_id: Feature being delegated (optional)
|
|
1122
|
+
context: Additional metadata (optional)
|
|
1123
|
+
|
|
1124
|
+
Returns:
|
|
1125
|
+
Handoff ID if successful, None otherwise
|
|
1126
|
+
"""
|
|
1127
|
+
import uuid
|
|
1128
|
+
|
|
1129
|
+
if not self.connection:
|
|
1130
|
+
self.connect()
|
|
1131
|
+
|
|
1132
|
+
# Auto-create session if not provided or doesn't exist
|
|
1133
|
+
if not session_id:
|
|
1134
|
+
session_id = f"session-{uuid.uuid4().hex[:8]}"
|
|
1135
|
+
|
|
1136
|
+
# Ensure session exists (create placeholder if needed)
|
|
1137
|
+
self._ensure_session_exists(session_id, from_agent)
|
|
1138
|
+
|
|
1139
|
+
handoff_id = f"hand-{uuid.uuid4().hex[:8]}"
|
|
1140
|
+
|
|
1141
|
+
# Prepare context with task description
|
|
1142
|
+
delegation_context = context or {}
|
|
1143
|
+
delegation_context["task_description"] = task_description
|
|
1144
|
+
|
|
1145
|
+
success = self.record_collaboration(
|
|
1146
|
+
handoff_id=handoff_id,
|
|
1147
|
+
from_agent=from_agent,
|
|
1148
|
+
to_agent=to_agent,
|
|
1149
|
+
session_id=session_id,
|
|
1150
|
+
feature_id=feature_id,
|
|
1151
|
+
handoff_type="delegation",
|
|
1152
|
+
reason=task_description,
|
|
1153
|
+
context=delegation_context,
|
|
1154
|
+
)
|
|
1155
|
+
|
|
1156
|
+
return handoff_id if success else None
|
|
1157
|
+
|
|
1158
|
+
def get_delegations(
|
|
1159
|
+
self,
|
|
1160
|
+
session_id: str | None = None,
|
|
1161
|
+
from_agent: str | None = None,
|
|
1162
|
+
to_agent: str | None = None,
|
|
1163
|
+
limit: int = 100,
|
|
1164
|
+
) -> list[dict[str, Any]]:
|
|
1165
|
+
"""
|
|
1166
|
+
Query delegation events from agent_collaboration table.
|
|
1167
|
+
|
|
1168
|
+
Args:
|
|
1169
|
+
session_id: Filter by session (optional)
|
|
1170
|
+
from_agent: Filter by source agent (optional)
|
|
1171
|
+
to_agent: Filter by target agent (optional)
|
|
1172
|
+
limit: Maximum number of results
|
|
1173
|
+
|
|
1174
|
+
Returns:
|
|
1175
|
+
List of delegation events as dictionaries
|
|
1176
|
+
"""
|
|
1177
|
+
if not self.connection:
|
|
1178
|
+
self.connect()
|
|
1179
|
+
|
|
1180
|
+
try:
|
|
1181
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1182
|
+
|
|
1183
|
+
# Build WHERE clause
|
|
1184
|
+
where_clauses = ["handoff_type = 'delegation'"]
|
|
1185
|
+
params: list[str | int] = []
|
|
1186
|
+
|
|
1187
|
+
if session_id:
|
|
1188
|
+
where_clauses.append("session_id = ?")
|
|
1189
|
+
params.append(session_id)
|
|
1190
|
+
if from_agent:
|
|
1191
|
+
where_clauses.append("from_agent = ?")
|
|
1192
|
+
params.append(from_agent)
|
|
1193
|
+
if to_agent:
|
|
1194
|
+
where_clauses.append("to_agent = ?")
|
|
1195
|
+
params.append(to_agent)
|
|
1196
|
+
|
|
1197
|
+
where_sql = " AND ".join(where_clauses)
|
|
1198
|
+
|
|
1199
|
+
# Query agent_collaboration table for delegations
|
|
1200
|
+
cursor.execute(
|
|
1201
|
+
f"""
|
|
1202
|
+
SELECT
|
|
1203
|
+
handoff_id,
|
|
1204
|
+
from_agent,
|
|
1205
|
+
to_agent,
|
|
1206
|
+
session_id,
|
|
1207
|
+
feature_id,
|
|
1208
|
+
handoff_type,
|
|
1209
|
+
reason,
|
|
1210
|
+
context,
|
|
1211
|
+
timestamp
|
|
1212
|
+
FROM agent_collaboration
|
|
1213
|
+
WHERE {where_sql}
|
|
1214
|
+
ORDER BY timestamp DESC
|
|
1215
|
+
LIMIT ?
|
|
1216
|
+
""",
|
|
1217
|
+
params + [limit],
|
|
1218
|
+
)
|
|
1219
|
+
|
|
1220
|
+
rows = cursor.fetchall()
|
|
1221
|
+
|
|
1222
|
+
# Convert to dictionaries
|
|
1223
|
+
delegations = []
|
|
1224
|
+
for row in rows:
|
|
1225
|
+
row_dict = dict(row)
|
|
1226
|
+
delegations.append(row_dict)
|
|
1227
|
+
|
|
1228
|
+
return delegations
|
|
1229
|
+
except sqlite3.Error as e:
|
|
1230
|
+
logger.error(f"Error querying delegations: {e}")
|
|
1231
|
+
return []
|
|
1232
|
+
|
|
1233
|
+
def insert_collaboration(
|
|
1234
|
+
self,
|
|
1235
|
+
handoff_id: str,
|
|
1236
|
+
from_agent: str,
|
|
1237
|
+
to_agent: str,
|
|
1238
|
+
session_id: str,
|
|
1239
|
+
handoff_type: str = "delegation",
|
|
1240
|
+
reason: str | None = None,
|
|
1241
|
+
context: dict[str, Any] | None = None,
|
|
1242
|
+
status: str = "pending",
|
|
1243
|
+
) -> bool:
|
|
1244
|
+
"""
|
|
1245
|
+
Record an agent collaboration/delegation event.
|
|
1246
|
+
|
|
1247
|
+
Args:
|
|
1248
|
+
handoff_id: Unique handoff identifier
|
|
1249
|
+
from_agent: Agent initiating the handoff
|
|
1250
|
+
to_agent: Target agent receiving the task
|
|
1251
|
+
session_id: Session this handoff belongs to
|
|
1252
|
+
handoff_type: Type of handoff (delegation, parallel, sequential, fallback)
|
|
1253
|
+
reason: Reason for the handoff (optional)
|
|
1254
|
+
context: Additional metadata as JSON (optional)
|
|
1255
|
+
status: Status of the handoff (pending, accepted, rejected, completed, failed)
|
|
1256
|
+
|
|
1257
|
+
Returns:
|
|
1258
|
+
True if insert successful, False otherwise
|
|
1259
|
+
"""
|
|
1260
|
+
if not self.connection:
|
|
1261
|
+
self.connect()
|
|
1262
|
+
|
|
1263
|
+
try:
|
|
1264
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1265
|
+
cursor.execute(
|
|
1266
|
+
"""
|
|
1267
|
+
INSERT INTO agent_collaboration
|
|
1268
|
+
(handoff_id, from_agent, to_agent, session_id, handoff_type,
|
|
1269
|
+
reason, context, status)
|
|
1270
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
1271
|
+
""",
|
|
1272
|
+
(
|
|
1273
|
+
handoff_id,
|
|
1274
|
+
from_agent,
|
|
1275
|
+
to_agent,
|
|
1276
|
+
session_id,
|
|
1277
|
+
handoff_type,
|
|
1278
|
+
reason,
|
|
1279
|
+
json.dumps(context) if context else None,
|
|
1280
|
+
status,
|
|
1281
|
+
),
|
|
1282
|
+
)
|
|
1283
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
1284
|
+
return True
|
|
1285
|
+
except sqlite3.Error as e:
|
|
1286
|
+
logger.error(f"Error inserting collaboration record: {e}")
|
|
1287
|
+
return False
|
|
1288
|
+
|
|
1289
|
+
def insert_tool_trace(
|
|
1290
|
+
self,
|
|
1291
|
+
tool_use_id: str,
|
|
1292
|
+
trace_id: str,
|
|
1293
|
+
session_id: str,
|
|
1294
|
+
tool_name: str,
|
|
1295
|
+
tool_input: dict[str, Any] | None = None,
|
|
1296
|
+
start_time: str | None = None,
|
|
1297
|
+
parent_tool_use_id: str | None = None,
|
|
1298
|
+
) -> bool:
|
|
1299
|
+
"""
|
|
1300
|
+
Insert a tool trace start event.
|
|
1301
|
+
|
|
1302
|
+
Args:
|
|
1303
|
+
tool_use_id: Unique tool use identifier (UUID)
|
|
1304
|
+
trace_id: Parent trace ID for correlation
|
|
1305
|
+
session_id: Session this tool use belongs to
|
|
1306
|
+
tool_name: Name of the tool being executed
|
|
1307
|
+
tool_input: Tool input parameters as dict (optional)
|
|
1308
|
+
start_time: Start time ISO8601 UTC (optional, defaults to now)
|
|
1309
|
+
parent_tool_use_id: Parent tool use ID if nested (optional)
|
|
1310
|
+
|
|
1311
|
+
Returns:
|
|
1312
|
+
True if insert successful, False otherwise
|
|
1313
|
+
"""
|
|
1314
|
+
if not self.connection:
|
|
1315
|
+
self.connect()
|
|
1316
|
+
|
|
1317
|
+
try:
|
|
1318
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1319
|
+
|
|
1320
|
+
if start_time is None:
|
|
1321
|
+
start_time = datetime.now(timezone.utc).isoformat()
|
|
1322
|
+
|
|
1323
|
+
cursor.execute(
|
|
1324
|
+
"""
|
|
1325
|
+
INSERT INTO tool_traces
|
|
1326
|
+
(tool_use_id, trace_id, session_id, tool_name, tool_input,
|
|
1327
|
+
start_time, status, parent_tool_use_id)
|
|
1328
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
1329
|
+
""",
|
|
1330
|
+
(
|
|
1331
|
+
tool_use_id,
|
|
1332
|
+
trace_id,
|
|
1333
|
+
session_id,
|
|
1334
|
+
tool_name,
|
|
1335
|
+
json.dumps(tool_input) if tool_input else None,
|
|
1336
|
+
start_time,
|
|
1337
|
+
"started",
|
|
1338
|
+
parent_tool_use_id,
|
|
1339
|
+
),
|
|
1340
|
+
)
|
|
1341
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
1342
|
+
return True
|
|
1343
|
+
except sqlite3.Error as e:
|
|
1344
|
+
logger.error(f"Error inserting tool trace: {e}")
|
|
1345
|
+
return False
|
|
1346
|
+
|
|
1347
|
+
def update_tool_trace(
|
|
1348
|
+
self,
|
|
1349
|
+
tool_use_id: str,
|
|
1350
|
+
tool_output: dict[str, Any] | None = None,
|
|
1351
|
+
end_time: str | None = None,
|
|
1352
|
+
duration_ms: int | None = None,
|
|
1353
|
+
status: str = "completed",
|
|
1354
|
+
error_message: str | None = None,
|
|
1355
|
+
) -> bool:
|
|
1356
|
+
"""
|
|
1357
|
+
Update tool trace with completion data.
|
|
1358
|
+
|
|
1359
|
+
Args:
|
|
1360
|
+
tool_use_id: Tool use ID to update
|
|
1361
|
+
tool_output: Tool output result (optional)
|
|
1362
|
+
end_time: End time ISO8601 UTC (optional, defaults to now)
|
|
1363
|
+
duration_ms: Execution duration in milliseconds (optional)
|
|
1364
|
+
status: Final status (completed, failed, timeout, cancelled)
|
|
1365
|
+
error_message: Error message if failed (optional)
|
|
1366
|
+
|
|
1367
|
+
Returns:
|
|
1368
|
+
True if update successful, False otherwise
|
|
1369
|
+
"""
|
|
1370
|
+
if not self.connection:
|
|
1371
|
+
self.connect()
|
|
1372
|
+
|
|
1373
|
+
try:
|
|
1374
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1375
|
+
|
|
1376
|
+
if end_time is None:
|
|
1377
|
+
end_time = datetime.now(timezone.utc).isoformat()
|
|
1378
|
+
|
|
1379
|
+
cursor.execute(
|
|
1380
|
+
"""
|
|
1381
|
+
UPDATE tool_traces
|
|
1382
|
+
SET tool_output = ?, end_time = ?, duration_ms = ?,
|
|
1383
|
+
status = ?, error_message = ?
|
|
1384
|
+
WHERE tool_use_id = ?
|
|
1385
|
+
""",
|
|
1386
|
+
(
|
|
1387
|
+
json.dumps(tool_output) if tool_output else None,
|
|
1388
|
+
end_time,
|
|
1389
|
+
duration_ms,
|
|
1390
|
+
status,
|
|
1391
|
+
error_message,
|
|
1392
|
+
tool_use_id,
|
|
1393
|
+
),
|
|
1394
|
+
)
|
|
1395
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
1396
|
+
return True
|
|
1397
|
+
except sqlite3.Error as e:
|
|
1398
|
+
logger.error(f"Error updating tool trace: {e}")
|
|
1399
|
+
return False
|
|
1400
|
+
|
|
1401
|
+
def get_tool_trace(self, tool_use_id: str) -> dict[str, Any] | None:
|
|
1402
|
+
"""
|
|
1403
|
+
Get a tool trace by tool_use_id.
|
|
1404
|
+
|
|
1405
|
+
Args:
|
|
1406
|
+
tool_use_id: Tool use ID to retrieve
|
|
1407
|
+
|
|
1408
|
+
Returns:
|
|
1409
|
+
Tool trace dictionary or None if not found
|
|
1410
|
+
"""
|
|
1411
|
+
if not self.connection:
|
|
1412
|
+
self.connect()
|
|
1413
|
+
|
|
1414
|
+
try:
|
|
1415
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1416
|
+
cursor.execute(
|
|
1417
|
+
"""
|
|
1418
|
+
SELECT * FROM tool_traces
|
|
1419
|
+
WHERE tool_use_id = ?
|
|
1420
|
+
""",
|
|
1421
|
+
(tool_use_id,),
|
|
1422
|
+
)
|
|
1423
|
+
|
|
1424
|
+
row = cursor.fetchone()
|
|
1425
|
+
return dict(row) if row else None
|
|
1426
|
+
except sqlite3.Error as e:
|
|
1427
|
+
logger.error(f"Error fetching tool trace: {e}")
|
|
1428
|
+
return None
|
|
1429
|
+
|
|
1430
|
+
def get_session_tool_traces(
|
|
1431
|
+
self, session_id: str, limit: int = 1000
|
|
1432
|
+
) -> list[dict[str, Any]]:
|
|
1433
|
+
"""
|
|
1434
|
+
Get all tool traces for a session ordered by start time DESC.
|
|
1435
|
+
|
|
1436
|
+
Args:
|
|
1437
|
+
session_id: Session to query
|
|
1438
|
+
limit: Maximum number of results
|
|
1439
|
+
|
|
1440
|
+
Returns:
|
|
1441
|
+
List of tool trace dictionaries
|
|
1442
|
+
"""
|
|
1443
|
+
if not self.connection:
|
|
1444
|
+
self.connect()
|
|
1445
|
+
|
|
1446
|
+
try:
|
|
1447
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1448
|
+
cursor.execute(
|
|
1449
|
+
"""
|
|
1450
|
+
SELECT * FROM tool_traces
|
|
1451
|
+
WHERE session_id = ?
|
|
1452
|
+
ORDER BY start_time DESC
|
|
1453
|
+
LIMIT ?
|
|
1454
|
+
""",
|
|
1455
|
+
(session_id, limit),
|
|
1456
|
+
)
|
|
1457
|
+
|
|
1458
|
+
rows = cursor.fetchall()
|
|
1459
|
+
return [dict(row) for row in rows]
|
|
1460
|
+
except sqlite3.Error as e:
|
|
1461
|
+
logger.error(f"Error querying tool traces: {e}")
|
|
1462
|
+
return []
|
|
1463
|
+
|
|
1464
|
+
def update_session_activity(self, session_id: str, user_query: str) -> None:
|
|
1465
|
+
"""
|
|
1466
|
+
Update session with latest user query activity.
|
|
1467
|
+
|
|
1468
|
+
Args:
|
|
1469
|
+
session_id: Session ID to update
|
|
1470
|
+
user_query: The user query text (will be truncated to 200 chars)
|
|
1471
|
+
"""
|
|
1472
|
+
if not self.connection:
|
|
1473
|
+
self.connect()
|
|
1474
|
+
|
|
1475
|
+
try:
|
|
1476
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1477
|
+
cursor.execute(
|
|
1478
|
+
"""
|
|
1479
|
+
UPDATE sessions
|
|
1480
|
+
SET last_user_query_at = ?, last_user_query = ?
|
|
1481
|
+
WHERE session_id = ?
|
|
1482
|
+
""",
|
|
1483
|
+
(
|
|
1484
|
+
datetime.now(timezone.utc).isoformat(),
|
|
1485
|
+
user_query[:200] if user_query else None,
|
|
1486
|
+
session_id,
|
|
1487
|
+
),
|
|
1488
|
+
)
|
|
1489
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
1490
|
+
except sqlite3.Error as e:
|
|
1491
|
+
logger.error(f"Error updating session activity: {e}")
|
|
1492
|
+
|
|
1493
|
+
def get_concurrent_sessions(
|
|
1494
|
+
self, current_session_id: str, minutes: int = 30
|
|
1495
|
+
) -> list[dict[str, Any]]:
|
|
1496
|
+
"""
|
|
1497
|
+
Get other sessions active in the last N minutes.
|
|
1498
|
+
|
|
1499
|
+
Args:
|
|
1500
|
+
current_session_id: Current session ID to exclude from results
|
|
1501
|
+
minutes: Time window in minutes (default: 30)
|
|
1502
|
+
|
|
1503
|
+
Returns:
|
|
1504
|
+
List of concurrent session dictionaries
|
|
1505
|
+
"""
|
|
1506
|
+
if not self.connection:
|
|
1507
|
+
self.connect()
|
|
1508
|
+
|
|
1509
|
+
try:
|
|
1510
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1511
|
+
cutoff = (
|
|
1512
|
+
datetime.now(timezone.utc) - timedelta(minutes=minutes)
|
|
1513
|
+
).isoformat()
|
|
1514
|
+
cursor.execute(
|
|
1515
|
+
"""
|
|
1516
|
+
SELECT session_id, agent_assigned, created_at, last_user_query_at,
|
|
1517
|
+
last_user_query, status
|
|
1518
|
+
FROM sessions
|
|
1519
|
+
WHERE session_id != ?
|
|
1520
|
+
AND status = 'active'
|
|
1521
|
+
AND (last_user_query_at > ? OR created_at > ?)
|
|
1522
|
+
ORDER BY last_user_query_at DESC
|
|
1523
|
+
""",
|
|
1524
|
+
(current_session_id, cutoff, cutoff),
|
|
1525
|
+
)
|
|
1526
|
+
|
|
1527
|
+
rows = cursor.fetchall()
|
|
1528
|
+
return [dict(row) for row in rows]
|
|
1529
|
+
except sqlite3.Error as e:
|
|
1530
|
+
logger.error(f"Error querying concurrent sessions: {e}")
|
|
1531
|
+
return []
|
|
1532
|
+
|
|
1533
|
+
def insert_live_event(
|
|
1534
|
+
self,
|
|
1535
|
+
event_type: str,
|
|
1536
|
+
event_data: dict[str, Any],
|
|
1537
|
+
parent_event_id: str | None = None,
|
|
1538
|
+
session_id: str | None = None,
|
|
1539
|
+
spawner_type: str | None = None,
|
|
1540
|
+
) -> int | None:
|
|
1541
|
+
"""
|
|
1542
|
+
Insert a live event for real-time WebSocket streaming.
|
|
1543
|
+
|
|
1544
|
+
These events are temporary and should be cleaned up after broadcast.
|
|
1545
|
+
|
|
1546
|
+
Args:
|
|
1547
|
+
event_type: Type of live event (spawner_start, spawner_phase, spawner_complete, etc.)
|
|
1548
|
+
event_data: Event payload as dictionary (will be JSON serialized)
|
|
1549
|
+
parent_event_id: Parent event ID for hierarchical linking (optional)
|
|
1550
|
+
session_id: Session this event belongs to (optional)
|
|
1551
|
+
spawner_type: Spawner type (gemini, codex, copilot) if applicable (optional)
|
|
1552
|
+
|
|
1553
|
+
Returns:
|
|
1554
|
+
Live event ID if successful, None otherwise
|
|
1555
|
+
"""
|
|
1556
|
+
if not self.connection:
|
|
1557
|
+
self.connect()
|
|
1558
|
+
|
|
1559
|
+
try:
|
|
1560
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1561
|
+
cursor.execute(
|
|
1562
|
+
"""
|
|
1563
|
+
INSERT INTO live_events
|
|
1564
|
+
(event_type, event_data, parent_event_id, session_id, spawner_type)
|
|
1565
|
+
VALUES (?, ?, ?, ?, ?)
|
|
1566
|
+
""",
|
|
1567
|
+
(
|
|
1568
|
+
event_type,
|
|
1569
|
+
json.dumps(event_data),
|
|
1570
|
+
parent_event_id,
|
|
1571
|
+
session_id,
|
|
1572
|
+
spawner_type,
|
|
1573
|
+
),
|
|
1574
|
+
)
|
|
1575
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
1576
|
+
return cursor.lastrowid
|
|
1577
|
+
except sqlite3.Error as e:
|
|
1578
|
+
logger.error(f"Error inserting live event: {e}")
|
|
1579
|
+
return None
|
|
1580
|
+
|
|
1581
|
+
def get_pending_live_events(self, limit: int = 100) -> list[dict[str, Any]]:
|
|
1582
|
+
"""
|
|
1583
|
+
Get live events that haven't been broadcast yet.
|
|
1584
|
+
|
|
1585
|
+
Args:
|
|
1586
|
+
limit: Maximum number of events to return
|
|
1587
|
+
|
|
1588
|
+
Returns:
|
|
1589
|
+
List of pending live event dictionaries
|
|
1590
|
+
"""
|
|
1591
|
+
if not self.connection:
|
|
1592
|
+
self.connect()
|
|
1593
|
+
|
|
1594
|
+
try:
|
|
1595
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1596
|
+
cursor.execute(
|
|
1597
|
+
"""
|
|
1598
|
+
SELECT id, event_type, event_data, parent_event_id, session_id,
|
|
1599
|
+
spawner_type, created_at
|
|
1600
|
+
FROM live_events
|
|
1601
|
+
WHERE broadcast_at IS NULL
|
|
1602
|
+
ORDER BY created_at ASC
|
|
1603
|
+
LIMIT ?
|
|
1604
|
+
""",
|
|
1605
|
+
(limit,),
|
|
1606
|
+
)
|
|
1607
|
+
|
|
1608
|
+
rows = cursor.fetchall()
|
|
1609
|
+
events = []
|
|
1610
|
+
for row in rows:
|
|
1611
|
+
event = dict(row)
|
|
1612
|
+
# Parse JSON event_data
|
|
1613
|
+
if event.get("event_data"):
|
|
1614
|
+
try:
|
|
1615
|
+
event["event_data"] = json.loads(event["event_data"])
|
|
1616
|
+
except json.JSONDecodeError:
|
|
1617
|
+
pass
|
|
1618
|
+
events.append(event)
|
|
1619
|
+
return events
|
|
1620
|
+
except sqlite3.Error as e:
|
|
1621
|
+
logger.error(f"Error fetching pending live events: {e}")
|
|
1622
|
+
return []
|
|
1623
|
+
|
|
1624
|
+
def mark_live_events_broadcast(self, event_ids: list[int]) -> bool:
|
|
1625
|
+
"""
|
|
1626
|
+
Mark live events as broadcast (sets broadcast_at timestamp).
|
|
1627
|
+
|
|
1628
|
+
Args:
|
|
1629
|
+
event_ids: List of live event IDs to mark as broadcast
|
|
1630
|
+
|
|
1631
|
+
Returns:
|
|
1632
|
+
True if successful, False otherwise
|
|
1633
|
+
"""
|
|
1634
|
+
if not self.connection or not event_ids:
|
|
1635
|
+
return False
|
|
1636
|
+
|
|
1637
|
+
try:
|
|
1638
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1639
|
+
placeholders = ",".join("?" for _ in event_ids)
|
|
1640
|
+
cursor.execute(
|
|
1641
|
+
f"""
|
|
1642
|
+
UPDATE live_events
|
|
1643
|
+
SET broadcast_at = CURRENT_TIMESTAMP
|
|
1644
|
+
WHERE id IN ({placeholders})
|
|
1645
|
+
""",
|
|
1646
|
+
event_ids,
|
|
1647
|
+
)
|
|
1648
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
1649
|
+
return True
|
|
1650
|
+
except sqlite3.Error as e:
|
|
1651
|
+
logger.error(f"Error marking live events as broadcast: {e}")
|
|
1652
|
+
return False
|
|
1653
|
+
|
|
1654
|
+
def cleanup_old_live_events(self, max_age_minutes: int = 5) -> int:
|
|
1655
|
+
"""
|
|
1656
|
+
Delete live events that have been broadcast and are older than max_age_minutes.
|
|
1657
|
+
|
|
1658
|
+
Args:
|
|
1659
|
+
max_age_minutes: Maximum age in minutes for broadcast events
|
|
1660
|
+
|
|
1661
|
+
Returns:
|
|
1662
|
+
Number of deleted events
|
|
1663
|
+
"""
|
|
1664
|
+
if not self.connection:
|
|
1665
|
+
self.connect()
|
|
1666
|
+
|
|
1667
|
+
try:
|
|
1668
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1669
|
+
cutoff = (
|
|
1670
|
+
datetime.now(timezone.utc) - timedelta(minutes=max_age_minutes)
|
|
1671
|
+
).isoformat()
|
|
1672
|
+
cursor.execute(
|
|
1673
|
+
"""
|
|
1674
|
+
DELETE FROM live_events
|
|
1675
|
+
WHERE broadcast_at IS NOT NULL
|
|
1676
|
+
AND created_at < ?
|
|
1677
|
+
""",
|
|
1678
|
+
(cutoff,),
|
|
1679
|
+
)
|
|
1680
|
+
deleted_count = cursor.rowcount
|
|
1681
|
+
self.connection.commit() # type: ignore[union-attr]
|
|
1682
|
+
return deleted_count
|
|
1683
|
+
except sqlite3.Error as e:
|
|
1684
|
+
logger.error(f"Error cleaning up old live events: {e}")
|
|
1685
|
+
return 0
|
|
1686
|
+
|
|
1687
|
+
def get_events_for_task(self, claude_task_id: str) -> list[dict[str, Any]]:
|
|
1688
|
+
"""
|
|
1689
|
+
Get all events (and their descendants) for a Claude Code task.
|
|
1690
|
+
|
|
1691
|
+
This enables answering "show me all the work (tool calls) that happened
|
|
1692
|
+
when this Task() was delegated".
|
|
1693
|
+
|
|
1694
|
+
Args:
|
|
1695
|
+
claude_task_id: Claude Code's internal task ID
|
|
1696
|
+
|
|
1697
|
+
Returns:
|
|
1698
|
+
List of event dictionaries, ordered by timestamp
|
|
1699
|
+
"""
|
|
1700
|
+
if not self.connection:
|
|
1701
|
+
self.connect()
|
|
1702
|
+
|
|
1703
|
+
try:
|
|
1704
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1705
|
+
cursor.execute(
|
|
1706
|
+
"""
|
|
1707
|
+
WITH task_events AS (
|
|
1708
|
+
SELECT event_id FROM agent_events
|
|
1709
|
+
WHERE claude_task_id = ?
|
|
1710
|
+
)
|
|
1711
|
+
SELECT ae.* FROM agent_events ae
|
|
1712
|
+
WHERE ae.claude_task_id = ?
|
|
1713
|
+
OR ae.parent_event_id IN (
|
|
1714
|
+
SELECT event_id FROM task_events
|
|
1715
|
+
)
|
|
1716
|
+
ORDER BY ae.created_at
|
|
1717
|
+
""",
|
|
1718
|
+
(claude_task_id, claude_task_id),
|
|
1719
|
+
)
|
|
1720
|
+
|
|
1721
|
+
rows = cursor.fetchall()
|
|
1722
|
+
return [dict(row) for row in rows]
|
|
1723
|
+
except sqlite3.Error as e:
|
|
1724
|
+
logger.error(f"Error querying events for task: {e}")
|
|
1725
|
+
return []
|
|
1726
|
+
|
|
1727
|
+
def get_subagent_work(self, session_id: str) -> dict[str, list[dict[str, Any]]]:
|
|
1728
|
+
"""
|
|
1729
|
+
Get all work grouped by which subagent did it.
|
|
1730
|
+
|
|
1731
|
+
This enables answering "which subagent did what work in this session?"
|
|
1732
|
+
|
|
1733
|
+
Args:
|
|
1734
|
+
session_id: Session ID to analyze
|
|
1735
|
+
|
|
1736
|
+
Returns:
|
|
1737
|
+
Dictionary mapping subagent_type to list of events they executed.
|
|
1738
|
+
Example: {
|
|
1739
|
+
'researcher': [
|
|
1740
|
+
{'tool_name': 'Read', 'input_summary': '...', ...},
|
|
1741
|
+
{'tool_name': 'Grep', 'input_summary': '...', ...}
|
|
1742
|
+
],
|
|
1743
|
+
'general-purpose': [
|
|
1744
|
+
{'tool_name': 'Bash', 'input_summary': '...', ...}
|
|
1745
|
+
]
|
|
1746
|
+
}
|
|
1747
|
+
"""
|
|
1748
|
+
if not self.connection:
|
|
1749
|
+
self.connect()
|
|
1750
|
+
|
|
1751
|
+
try:
|
|
1752
|
+
cursor = self.connection.cursor() # type: ignore[union-attr]
|
|
1753
|
+
cursor.execute(
|
|
1754
|
+
"""
|
|
1755
|
+
SELECT
|
|
1756
|
+
ae.subagent_type,
|
|
1757
|
+
ae.tool_name,
|
|
1758
|
+
ae.event_id,
|
|
1759
|
+
ae.input_summary,
|
|
1760
|
+
ae.output_summary,
|
|
1761
|
+
ae.created_at,
|
|
1762
|
+
ae.claude_task_id
|
|
1763
|
+
FROM agent_events ae
|
|
1764
|
+
WHERE ae.session_id = ?
|
|
1765
|
+
AND ae.subagent_type IS NOT NULL
|
|
1766
|
+
AND ae.event_type = 'tool_call'
|
|
1767
|
+
ORDER BY ae.subagent_type, ae.created_at
|
|
1768
|
+
""",
|
|
1769
|
+
(session_id,),
|
|
1770
|
+
)
|
|
1771
|
+
|
|
1772
|
+
# Group by subagent_type
|
|
1773
|
+
result: dict[str, list[dict[str, Any]]] = {}
|
|
1774
|
+
for row in cursor.fetchall():
|
|
1775
|
+
row_dict = dict(row)
|
|
1776
|
+
subagent = row_dict.pop("subagent_type")
|
|
1777
|
+
if subagent not in result:
|
|
1778
|
+
result[subagent] = []
|
|
1779
|
+
result[subagent].append(row_dict)
|
|
1780
|
+
|
|
1781
|
+
return result
|
|
1782
|
+
except sqlite3.Error as e:
|
|
1783
|
+
logger.error(f"Error querying subagent work: {e}")
|
|
1784
|
+
return {}
|
|
1785
|
+
|
|
1786
|
+
def close(self) -> None:
|
|
1787
|
+
"""Clean up database connection."""
|
|
1788
|
+
self.disconnect()
|