htmlgraph 0.24.1__py3-none-any.whl → 0.25.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- htmlgraph/__init__.py +20 -1
- htmlgraph/agent_detection.py +26 -10
- htmlgraph/analytics/cross_session.py +4 -3
- htmlgraph/analytics/work_type.py +52 -16
- htmlgraph/analytics_index.py +51 -19
- htmlgraph/api/__init__.py +3 -0
- htmlgraph/api/main.py +2115 -0
- htmlgraph/api/static/htmx.min.js +1 -0
- htmlgraph/api/static/style-redesign.css +1344 -0
- htmlgraph/api/static/style.css +1079 -0
- htmlgraph/api/templates/dashboard-redesign.html +812 -0
- htmlgraph/api/templates/dashboard.html +783 -0
- htmlgraph/api/templates/partials/activity-feed-hierarchical.html +326 -0
- htmlgraph/api/templates/partials/activity-feed.html +570 -0
- htmlgraph/api/templates/partials/agents-redesign.html +317 -0
- htmlgraph/api/templates/partials/agents.html +317 -0
- htmlgraph/api/templates/partials/event-traces.html +373 -0
- htmlgraph/api/templates/partials/features-kanban-redesign.html +509 -0
- htmlgraph/api/templates/partials/features.html +509 -0
- htmlgraph/api/templates/partials/metrics-redesign.html +346 -0
- htmlgraph/api/templates/partials/metrics.html +346 -0
- htmlgraph/api/templates/partials/orchestration-redesign.html +443 -0
- htmlgraph/api/templates/partials/orchestration.html +163 -0
- htmlgraph/api/templates/partials/spawners.html +375 -0
- htmlgraph/atomic_ops.py +560 -0
- htmlgraph/builders/base.py +55 -1
- htmlgraph/builders/bug.py +17 -2
- htmlgraph/builders/chore.py +17 -2
- htmlgraph/builders/epic.py +17 -2
- htmlgraph/builders/feature.py +25 -2
- htmlgraph/builders/phase.py +17 -2
- htmlgraph/builders/spike.py +27 -2
- htmlgraph/builders/track.py +14 -0
- htmlgraph/cigs/__init__.py +4 -0
- htmlgraph/cigs/reporter.py +818 -0
- htmlgraph/cli.py +1427 -401
- htmlgraph/cli_commands/__init__.py +1 -0
- htmlgraph/cli_commands/feature.py +195 -0
- htmlgraph/cli_framework.py +115 -0
- htmlgraph/collections/__init__.py +2 -0
- htmlgraph/collections/base.py +21 -0
- htmlgraph/collections/session.py +189 -0
- htmlgraph/collections/spike.py +7 -1
- htmlgraph/collections/task_delegation.py +236 -0
- htmlgraph/collections/traces.py +482 -0
- htmlgraph/config.py +113 -0
- htmlgraph/converter.py +41 -0
- htmlgraph/cost_analysis/__init__.py +5 -0
- htmlgraph/cost_analysis/analyzer.py +438 -0
- htmlgraph/dashboard.html +3315 -492
- htmlgraph-0.24.1.data/data/htmlgraph/dashboard.html → htmlgraph/dashboard.html.backup +2246 -248
- htmlgraph/dashboard.html.bak +7181 -0
- htmlgraph/dashboard.html.bak2 +7231 -0
- htmlgraph/dashboard.html.bak3 +7232 -0
- htmlgraph/db/__init__.py +38 -0
- htmlgraph/db/queries.py +790 -0
- htmlgraph/db/schema.py +1334 -0
- htmlgraph/deploy.py +26 -27
- htmlgraph/docs/API_REFERENCE.md +841 -0
- htmlgraph/docs/HTTP_API.md +750 -0
- htmlgraph/docs/INTEGRATION_GUIDE.md +752 -0
- htmlgraph/docs/ORCHESTRATION_PATTERNS.md +710 -0
- htmlgraph/docs/README.md +533 -0
- htmlgraph/docs/version_check.py +3 -1
- htmlgraph/error_handler.py +544 -0
- htmlgraph/event_log.py +2 -0
- htmlgraph/hooks/__init__.py +8 -0
- htmlgraph/hooks/bootstrap.py +169 -0
- htmlgraph/hooks/context.py +271 -0
- htmlgraph/hooks/drift_handler.py +521 -0
- htmlgraph/hooks/event_tracker.py +405 -15
- htmlgraph/hooks/post_tool_use_handler.py +257 -0
- htmlgraph/hooks/pretooluse.py +476 -6
- htmlgraph/hooks/prompt_analyzer.py +648 -0
- htmlgraph/hooks/session_handler.py +583 -0
- htmlgraph/hooks/state_manager.py +501 -0
- htmlgraph/hooks/subagent_stop.py +309 -0
- htmlgraph/hooks/task_enforcer.py +39 -0
- htmlgraph/models.py +111 -15
- htmlgraph/operations/fastapi_server.py +230 -0
- htmlgraph/orchestration/headless_spawner.py +22 -14
- htmlgraph/pydantic_models.py +476 -0
- htmlgraph/quality_gates.py +350 -0
- htmlgraph/repo_hash.py +511 -0
- htmlgraph/sdk.py +348 -10
- htmlgraph/server.py +194 -0
- htmlgraph/session_hooks.py +300 -0
- htmlgraph/session_manager.py +131 -1
- htmlgraph/session_registry.py +587 -0
- htmlgraph/session_state.py +436 -0
- htmlgraph/system_prompts.py +449 -0
- htmlgraph/templates/orchestration-view.html +350 -0
- htmlgraph/track_builder.py +19 -0
- htmlgraph/validation.py +115 -0
- htmlgraph-0.25.0.data/data/htmlgraph/dashboard.html +7417 -0
- {htmlgraph-0.24.1.dist-info → htmlgraph-0.25.0.dist-info}/METADATA +91 -64
- {htmlgraph-0.24.1.dist-info → htmlgraph-0.25.0.dist-info}/RECORD +103 -42
- {htmlgraph-0.24.1.data → htmlgraph-0.25.0.data}/data/htmlgraph/styles.css +0 -0
- {htmlgraph-0.24.1.data → htmlgraph-0.25.0.data}/data/htmlgraph/templates/AGENTS.md.template +0 -0
- {htmlgraph-0.24.1.data → htmlgraph-0.25.0.data}/data/htmlgraph/templates/CLAUDE.md.template +0 -0
- {htmlgraph-0.24.1.data → htmlgraph-0.25.0.data}/data/htmlgraph/templates/GEMINI.md.template +0 -0
- {htmlgraph-0.24.1.dist-info → htmlgraph-0.25.0.dist-info}/WHEEL +0 -0
- {htmlgraph-0.24.1.dist-info → htmlgraph-0.25.0.dist-info}/entry_points.txt +0 -0
htmlgraph/__init__.py
CHANGED
|
@@ -8,6 +8,14 @@ hyperlinks as edges, and CSS selectors as the query language.
|
|
|
8
8
|
from htmlgraph.agent_detection import detect_agent_name, get_agent_display_name
|
|
9
9
|
from htmlgraph.agents import AgentInterface
|
|
10
10
|
from htmlgraph.analytics import Analytics, DependencyAnalytics
|
|
11
|
+
from htmlgraph.atomic_ops import (
|
|
12
|
+
AtomicFileWriter,
|
|
13
|
+
DirectoryLocker,
|
|
14
|
+
atomic_rename,
|
|
15
|
+
cleanup_orphaned_temp_files,
|
|
16
|
+
safe_temp_file,
|
|
17
|
+
validate_atomic_write,
|
|
18
|
+
)
|
|
11
19
|
from htmlgraph.builders import BaseBuilder, FeatureBuilder, SpikeBuilder
|
|
12
20
|
from htmlgraph.collections import BaseCollection, FeatureCollection, SpikeCollection
|
|
13
21
|
from htmlgraph.context_analytics import ContextAnalytics, ContextUsage
|
|
@@ -56,9 +64,11 @@ from htmlgraph.orchestrator_mode import OrchestratorMode, OrchestratorModeManage
|
|
|
56
64
|
from htmlgraph.parallel import AggregateResult, ParallelAnalysis, ParallelWorkflow
|
|
57
65
|
from htmlgraph.query_builder import Condition, Operator, QueryBuilder
|
|
58
66
|
from htmlgraph.reflection import ComputationalReflection, get_reflection_context
|
|
67
|
+
from htmlgraph.repo_hash import RepoHash
|
|
59
68
|
from htmlgraph.sdk import SDK
|
|
60
69
|
from htmlgraph.server import serve
|
|
61
70
|
from htmlgraph.session_manager import SessionManager
|
|
71
|
+
from htmlgraph.session_registry import SessionRegistry
|
|
62
72
|
from htmlgraph.types import (
|
|
63
73
|
ActiveWorkItem,
|
|
64
74
|
AggregateResultsDict,
|
|
@@ -85,7 +95,7 @@ from htmlgraph.types import (
|
|
|
85
95
|
)
|
|
86
96
|
from htmlgraph.work_type_utils import infer_work_type, infer_work_type_from_id
|
|
87
97
|
|
|
88
|
-
__version__ = "0.
|
|
98
|
+
__version__ = "0.25.0"
|
|
89
99
|
__all__ = [
|
|
90
100
|
# Exceptions
|
|
91
101
|
"HtmlGraphError",
|
|
@@ -123,6 +133,8 @@ __all__ = [
|
|
|
123
133
|
"find_all",
|
|
124
134
|
"AgentInterface",
|
|
125
135
|
"SessionManager",
|
|
136
|
+
"SessionRegistry",
|
|
137
|
+
"RepoHash",
|
|
126
138
|
"SDK",
|
|
127
139
|
"Analytics", # Phase 2: Work Type Analytics
|
|
128
140
|
"DependencyAnalytics", # Advanced dependency-aware analytics
|
|
@@ -190,4 +202,11 @@ __all__ = [
|
|
|
190
202
|
"get_results_by_task_id",
|
|
191
203
|
"parallel_delegate",
|
|
192
204
|
"generate_task_id",
|
|
205
|
+
# Atomic file operations (Phase 1.3: Session File Tracking)
|
|
206
|
+
"AtomicFileWriter",
|
|
207
|
+
"DirectoryLocker",
|
|
208
|
+
"atomic_rename",
|
|
209
|
+
"cleanup_orphaned_temp_files",
|
|
210
|
+
"safe_temp_file",
|
|
211
|
+
"validate_atomic_write",
|
|
193
212
|
]
|
htmlgraph/agent_detection.py
CHANGED
|
@@ -14,34 +14,48 @@ def detect_agent_name() -> str:
|
|
|
14
14
|
Detect the current agent/interface name based on environment.
|
|
15
15
|
|
|
16
16
|
Returns:
|
|
17
|
-
Agent name (e.g., "claude", "gemini", "cli")
|
|
17
|
+
Agent name (e.g., "claude-code", "gemini", "cli")
|
|
18
18
|
|
|
19
19
|
Detection order:
|
|
20
20
|
1. HTMLGRAPH_AGENT environment variable (explicit override)
|
|
21
|
-
2.
|
|
22
|
-
3. Gemini detection (GEMINI environment markers)
|
|
23
|
-
4.
|
|
21
|
+
2. HTMLGRAPH_PARENT_AGENT (set by hooks for session context)
|
|
22
|
+
3. Gemini detection (GEMINI environment markers) - checked before Claude to allow override
|
|
23
|
+
4. Claude Code detection (CLAUDECODE env var, parent process)
|
|
24
|
+
5. Fall back to "cli" only if no AI agent detected
|
|
24
25
|
"""
|
|
25
26
|
# 1. Explicit override
|
|
26
27
|
explicit = os.environ.get("HTMLGRAPH_AGENT")
|
|
27
28
|
if explicit:
|
|
28
29
|
return explicit.strip()
|
|
29
30
|
|
|
30
|
-
# 2.
|
|
31
|
-
|
|
32
|
-
|
|
31
|
+
# 2. Parent agent context (set by HtmlGraph hooks)
|
|
32
|
+
parent_agent = os.environ.get("HTMLGRAPH_PARENT_AGENT")
|
|
33
|
+
if parent_agent:
|
|
34
|
+
return parent_agent.strip()
|
|
33
35
|
|
|
34
|
-
# 3. Gemini detection
|
|
36
|
+
# 3. Gemini detection (checked before Claude to allow explicit override)
|
|
35
37
|
if _is_gemini():
|
|
36
38
|
return "gemini"
|
|
37
39
|
|
|
38
|
-
# 4.
|
|
40
|
+
# 4. Claude Code detection
|
|
41
|
+
if _is_claude_code():
|
|
42
|
+
return "claude-code"
|
|
43
|
+
|
|
44
|
+
# 5. Default to CLI only if no AI agent detected
|
|
39
45
|
return "cli"
|
|
40
46
|
|
|
41
47
|
|
|
42
48
|
def _is_claude_code() -> bool:
|
|
43
49
|
"""Check if running in Claude Code environment."""
|
|
44
50
|
# Check for Claude Code environment variables
|
|
51
|
+
# CLAUDECODE=1 is set by Claude Code CLI
|
|
52
|
+
if os.environ.get("CLAUDECODE"):
|
|
53
|
+
return True
|
|
54
|
+
|
|
55
|
+
# CLAUDE_CODE_ENTRYPOINT indicates the entry point (cli, api, etc.)
|
|
56
|
+
if os.environ.get("CLAUDE_CODE_ENTRYPOINT"):
|
|
57
|
+
return True
|
|
58
|
+
|
|
45
59
|
if os.environ.get("CLAUDE_CODE_VERSION"):
|
|
46
60
|
return True
|
|
47
61
|
|
|
@@ -100,12 +114,14 @@ def get_agent_display_name(agent: str) -> str:
|
|
|
100
114
|
"""
|
|
101
115
|
display_names = {
|
|
102
116
|
"claude": "Claude",
|
|
103
|
-
"claude-code": "Claude",
|
|
117
|
+
"claude-code": "Claude Code",
|
|
104
118
|
"gemini": "Gemini",
|
|
105
119
|
"cli": "CLI",
|
|
106
120
|
"haiku": "Haiku",
|
|
107
121
|
"opus": "Opus",
|
|
108
122
|
"sonnet": "Sonnet",
|
|
123
|
+
"claude-opus-4-5-20251101": "Claude Opus 4.5",
|
|
124
|
+
"claude-sonnet-4-20250514": "Claude Sonnet 4",
|
|
109
125
|
}
|
|
110
126
|
|
|
111
127
|
return display_names.get(agent.lower(), agent.title())
|
|
@@ -51,6 +51,7 @@ if TYPE_CHECKING:
|
|
|
51
51
|
from htmlgraph import SDK
|
|
52
52
|
|
|
53
53
|
from htmlgraph.event_log import JsonlEventLog
|
|
54
|
+
from htmlgraph.models import utc_now
|
|
54
55
|
|
|
55
56
|
|
|
56
57
|
@dataclass
|
|
@@ -598,7 +599,7 @@ class CrossSessionAnalytics:
|
|
|
598
599
|
def _parse_timestamp(self, timestamp: str | datetime | None) -> datetime:
|
|
599
600
|
"""Parse timestamp from various formats."""
|
|
600
601
|
if timestamp is None:
|
|
601
|
-
return
|
|
602
|
+
return utc_now()
|
|
602
603
|
|
|
603
604
|
if isinstance(timestamp, datetime):
|
|
604
605
|
return timestamp
|
|
@@ -607,6 +608,6 @@ class CrossSessionAnalytics:
|
|
|
607
608
|
try:
|
|
608
609
|
return datetime.fromisoformat(timestamp.replace("Z", "+00:00"))
|
|
609
610
|
except (ValueError, AttributeError):
|
|
610
|
-
return
|
|
611
|
+
return utc_now()
|
|
611
612
|
|
|
612
|
-
return
|
|
613
|
+
return utc_now()
|
htmlgraph/analytics/work_type.py
CHANGED
|
@@ -27,17 +27,35 @@ Example:
|
|
|
27
27
|
|
|
28
28
|
from __future__ import annotations
|
|
29
29
|
|
|
30
|
-
from datetime import datetime
|
|
30
|
+
from datetime import datetime, timezone
|
|
31
31
|
from typing import TYPE_CHECKING
|
|
32
32
|
|
|
33
33
|
if TYPE_CHECKING:
|
|
34
34
|
from htmlgraph import SDK
|
|
35
35
|
|
|
36
36
|
from htmlgraph.converter import html_to_session
|
|
37
|
-
from htmlgraph.models import Session, WorkType
|
|
37
|
+
from htmlgraph.models import Session, WorkType, utc_now
|
|
38
38
|
from htmlgraph.session_manager import SessionManager
|
|
39
39
|
|
|
40
40
|
|
|
41
|
+
def normalize_datetime(dt: datetime | None) -> datetime | None:
|
|
42
|
+
"""
|
|
43
|
+
Normalize datetime to UTC-aware format for safe comparisons.
|
|
44
|
+
|
|
45
|
+
Handles three cases:
|
|
46
|
+
- None: returns None
|
|
47
|
+
- Naive (no timezone): assumes UTC and adds timezone
|
|
48
|
+
- Aware (has timezone): converts to UTC
|
|
49
|
+
"""
|
|
50
|
+
if dt is None:
|
|
51
|
+
return None
|
|
52
|
+
if dt.tzinfo is None:
|
|
53
|
+
# Naive datetime - assume UTC
|
|
54
|
+
return dt.replace(tzinfo=timezone.utc)
|
|
55
|
+
# Already aware - convert to UTC
|
|
56
|
+
return dt.astimezone(timezone.utc)
|
|
57
|
+
|
|
58
|
+
|
|
41
59
|
class Analytics:
|
|
42
60
|
"""
|
|
43
61
|
Analytics interface for work type analysis.
|
|
@@ -270,9 +288,11 @@ class Analytics:
|
|
|
270
288
|
continue
|
|
271
289
|
|
|
272
290
|
# Check date range
|
|
273
|
-
|
|
291
|
+
start_normalized = normalize_datetime(start_date)
|
|
292
|
+
end_normalized = normalize_datetime(end_date)
|
|
293
|
+
if start_normalized and session.started_at < start_normalized:
|
|
274
294
|
continue
|
|
275
|
-
if
|
|
295
|
+
if end_normalized and session.started_at > end_normalized:
|
|
276
296
|
continue
|
|
277
297
|
|
|
278
298
|
# Check primary work type
|
|
@@ -413,18 +433,26 @@ class Analytics:
|
|
|
413
433
|
# Calculate time for each spike
|
|
414
434
|
for spike in all_spikes:
|
|
415
435
|
# Apply date filters
|
|
416
|
-
|
|
436
|
+
start_normalized = normalize_datetime(start_date)
|
|
437
|
+
end_normalized = normalize_datetime(end_date)
|
|
438
|
+
if start_normalized and spike.created < start_normalized:
|
|
417
439
|
continue
|
|
418
|
-
if
|
|
440
|
+
if end_normalized and spike.created > end_normalized:
|
|
419
441
|
continue
|
|
420
442
|
|
|
421
|
-
# Calculate duration
|
|
422
|
-
start_time = spike.created
|
|
443
|
+
# Calculate duration (normalize datetimes for safe comparison)
|
|
444
|
+
start_time = normalize_datetime(spike.created)
|
|
445
|
+
if not start_time:
|
|
446
|
+
continue # Skip if spike creation date is missing
|
|
423
447
|
if spike.status == "done" and spike.updated:
|
|
424
|
-
end_time = spike.updated
|
|
448
|
+
end_time = normalize_datetime(spike.updated)
|
|
425
449
|
else:
|
|
426
450
|
# If still in progress, use last updated time
|
|
427
|
-
end_time =
|
|
451
|
+
end_time = normalize_datetime(
|
|
452
|
+
spike.updated if spike.updated else utc_now()
|
|
453
|
+
)
|
|
454
|
+
if not end_time:
|
|
455
|
+
end_time = start_time # Fallback to start time if end time missing
|
|
428
456
|
|
|
429
457
|
duration = (
|
|
430
458
|
end_time - start_time
|
|
@@ -460,17 +488,25 @@ class Analytics:
|
|
|
460
488
|
|
|
461
489
|
for node in nodes:
|
|
462
490
|
# Apply date filters
|
|
463
|
-
|
|
491
|
+
start_normalized = normalize_datetime(start_date)
|
|
492
|
+
end_normalized = normalize_datetime(end_date)
|
|
493
|
+
if start_normalized and node.created < start_normalized:
|
|
464
494
|
continue
|
|
465
|
-
if
|
|
495
|
+
if end_normalized and node.created > end_normalized:
|
|
466
496
|
continue
|
|
467
497
|
|
|
468
|
-
# Calculate duration
|
|
469
|
-
start_time = node.created
|
|
498
|
+
# Calculate duration (normalize datetimes for safe comparison)
|
|
499
|
+
start_time = normalize_datetime(node.created)
|
|
500
|
+
if not start_time:
|
|
501
|
+
continue # Skip if node creation date is missing
|
|
470
502
|
if node.status == "done" and node.updated:
|
|
471
|
-
end_time = node.updated
|
|
503
|
+
end_time = normalize_datetime(node.updated)
|
|
472
504
|
else:
|
|
473
|
-
end_time =
|
|
505
|
+
end_time = normalize_datetime(
|
|
506
|
+
node.updated if node.updated else utc_now()
|
|
507
|
+
)
|
|
508
|
+
if not end_time:
|
|
509
|
+
end_time = start_time # Fallback to start time if end time missing
|
|
474
510
|
|
|
475
511
|
duration = (end_time - start_time).total_seconds() / 60
|
|
476
512
|
feature_minutes += duration
|
htmlgraph/analytics_index.py
CHANGED
|
@@ -14,7 +14,7 @@ from dataclasses import dataclass
|
|
|
14
14
|
from pathlib import Path
|
|
15
15
|
from typing import Any
|
|
16
16
|
|
|
17
|
-
SCHEMA_VERSION =
|
|
17
|
+
SCHEMA_VERSION = 4 # Bumped: renamed 'agent' column to 'agent_assigned'
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
@dataclass(frozen=True)
|
|
@@ -82,12 +82,14 @@ class AnalyticsIndex:
|
|
|
82
82
|
"""
|
|
83
83
|
CREATE TABLE IF NOT EXISTS sessions (
|
|
84
84
|
session_id TEXT PRIMARY KEY,
|
|
85
|
-
|
|
85
|
+
agent_assigned TEXT,
|
|
86
86
|
start_commit TEXT,
|
|
87
87
|
continued_from TEXT,
|
|
88
88
|
status TEXT,
|
|
89
89
|
started_at TEXT,
|
|
90
|
-
ended_at TEXT
|
|
90
|
+
ended_at TEXT,
|
|
91
|
+
parent_session_id TEXT,
|
|
92
|
+
parent_event_id TEXT
|
|
91
93
|
)
|
|
92
94
|
"""
|
|
93
95
|
)
|
|
@@ -103,6 +105,9 @@ class AnalyticsIndex:
|
|
|
103
105
|
feature_id TEXT,
|
|
104
106
|
drift_score REAL,
|
|
105
107
|
payload_json TEXT,
|
|
108
|
+
parent_event_id TEXT,
|
|
109
|
+
cost_tokens INTEGER,
|
|
110
|
+
execution_duration_seconds REAL,
|
|
106
111
|
FOREIGN KEY(session_id) REFERENCES sessions(session_id)
|
|
107
112
|
)
|
|
108
113
|
"""
|
|
@@ -157,6 +162,9 @@ class AnalyticsIndex:
|
|
|
157
162
|
)
|
|
158
163
|
|
|
159
164
|
# Indexes for typical dashboard queries
|
|
165
|
+
conn.execute(
|
|
166
|
+
"CREATE INDEX IF NOT EXISTS idx_sessions_parent ON sessions(parent_session_id)"
|
|
167
|
+
)
|
|
160
168
|
conn.execute("CREATE INDEX IF NOT EXISTS idx_events_ts ON events(ts)")
|
|
161
169
|
conn.execute(
|
|
162
170
|
"CREATE INDEX IF NOT EXISTS idx_events_session_ts ON events(session_id, ts)"
|
|
@@ -190,15 +198,17 @@ class AnalyticsIndex:
|
|
|
190
198
|
with self.connect() as conn:
|
|
191
199
|
conn.execute(
|
|
192
200
|
"""
|
|
193
|
-
INSERT INTO sessions(session_id,
|
|
194
|
-
VALUES(
|
|
201
|
+
INSERT INTO sessions(session_id, agent_assigned, start_commit, continued_from, status, started_at, ended_at, parent_session_id, parent_event_id)
|
|
202
|
+
VALUES(?,?,?,?,?,?,?,?,?)
|
|
195
203
|
ON CONFLICT(session_id) DO UPDATE SET
|
|
196
|
-
|
|
204
|
+
agent_assigned=excluded.agent_assigned,
|
|
197
205
|
start_commit=excluded.start_commit,
|
|
198
206
|
continued_from=excluded.continued_from,
|
|
199
207
|
status=excluded.status,
|
|
200
208
|
started_at=excluded.started_at,
|
|
201
|
-
ended_at=excluded.ended_at
|
|
209
|
+
ended_at=excluded.ended_at,
|
|
210
|
+
parent_session_id=excluded.parent_session_id,
|
|
211
|
+
parent_event_id=excluded.parent_event_id
|
|
202
212
|
""",
|
|
203
213
|
(
|
|
204
214
|
session.get("session_id"),
|
|
@@ -208,6 +218,8 @@ class AnalyticsIndex:
|
|
|
208
218
|
session.get("status"),
|
|
209
219
|
session.get("started_at"),
|
|
210
220
|
session.get("ended_at"),
|
|
221
|
+
session.get("parent_session_id"),
|
|
222
|
+
session.get("parent_event_id"),
|
|
211
223
|
),
|
|
212
224
|
)
|
|
213
225
|
|
|
@@ -238,8 +250,8 @@ class AnalyticsIndex:
|
|
|
238
250
|
with self.connect() as conn:
|
|
239
251
|
conn.execute(
|
|
240
252
|
"""
|
|
241
|
-
INSERT OR IGNORE INTO events(event_id, session_id, ts, tool, summary, success, feature_id, drift_score, payload_json)
|
|
242
|
-
VALUES(
|
|
253
|
+
INSERT OR IGNORE INTO events(event_id, session_id, ts, tool, summary, success, feature_id, drift_score, payload_json, parent_event_id, cost_tokens, execution_duration_seconds)
|
|
254
|
+
VALUES(?,?,?,?,?,?,?,?,?,?,?,?)
|
|
243
255
|
""",
|
|
244
256
|
(
|
|
245
257
|
event_id,
|
|
@@ -251,6 +263,9 @@ class AnalyticsIndex:
|
|
|
251
263
|
event.get("feature_id"),
|
|
252
264
|
event.get("drift_score"),
|
|
253
265
|
payload_json,
|
|
266
|
+
event.get("parent_event_id"),
|
|
267
|
+
event.get("cost_tokens"),
|
|
268
|
+
event.get("execution_duration_seconds"),
|
|
254
269
|
),
|
|
255
270
|
)
|
|
256
271
|
# Insert file path rows, idempotent by (event_id, path)
|
|
@@ -367,6 +382,8 @@ class AnalyticsIndex:
|
|
|
367
382
|
"status": event.get("session_status"),
|
|
368
383
|
"started_at": None,
|
|
369
384
|
"ended_at": None,
|
|
385
|
+
"parent_session_id": event.get("parent_session_id"),
|
|
386
|
+
"parent_event_id": event.get("parent_event_id"),
|
|
370
387
|
},
|
|
371
388
|
)
|
|
372
389
|
if meta.get("agent") is None and event.get("agent"):
|
|
@@ -377,6 +394,12 @@ class AnalyticsIndex:
|
|
|
377
394
|
meta["continued_from"] = event.get("continued_from")
|
|
378
395
|
if meta.get("status") is None and event.get("session_status"):
|
|
379
396
|
meta["status"] = event.get("session_status")
|
|
397
|
+
if meta.get("parent_session_id") is None and event.get(
|
|
398
|
+
"parent_session_id"
|
|
399
|
+
):
|
|
400
|
+
meta["parent_session_id"] = event.get("parent_session_id")
|
|
401
|
+
if meta.get("parent_event_id") is None and event.get("parent_event_id"):
|
|
402
|
+
meta["parent_event_id"] = event.get("parent_event_id")
|
|
380
403
|
|
|
381
404
|
# Track time range (treat earliest event as started_at, latest as ended_at if session is ended)
|
|
382
405
|
if meta["started_at"] is None or ts < meta["started_at"]:
|
|
@@ -393,8 +416,8 @@ class AnalyticsIndex:
|
|
|
393
416
|
|
|
394
417
|
conn.execute(
|
|
395
418
|
"""
|
|
396
|
-
INSERT OR IGNORE INTO events(event_id, session_id, ts, tool, summary, success, feature_id, drift_score, payload_json)
|
|
397
|
-
VALUES(
|
|
419
|
+
INSERT OR IGNORE INTO events(event_id, session_id, ts, tool, summary, success, feature_id, drift_score, payload_json, parent_event_id, cost_tokens, execution_duration_seconds)
|
|
420
|
+
VALUES(?,?,?,?,?,?,?,?,?,?,?,?)
|
|
398
421
|
""",
|
|
399
422
|
(
|
|
400
423
|
event_id,
|
|
@@ -406,6 +429,9 @@ class AnalyticsIndex:
|
|
|
406
429
|
event.get("feature_id"),
|
|
407
430
|
event.get("drift_score"),
|
|
408
431
|
payload_json,
|
|
432
|
+
event.get("parent_event_id"),
|
|
433
|
+
event.get("cost_tokens"),
|
|
434
|
+
event.get("execution_duration_seconds"),
|
|
409
435
|
),
|
|
410
436
|
)
|
|
411
437
|
|
|
@@ -483,17 +509,19 @@ class AnalyticsIndex:
|
|
|
483
509
|
for meta in session_meta.values():
|
|
484
510
|
conn.execute(
|
|
485
511
|
"""
|
|
486
|
-
INSERT INTO sessions(session_id,
|
|
487
|
-
VALUES(
|
|
512
|
+
INSERT INTO sessions(session_id, agent_assigned, start_commit, continued_from, status, started_at, ended_at, parent_session_id, parent_event_id)
|
|
513
|
+
VALUES(?,?,?,?,?,?,?,?,?)
|
|
488
514
|
""",
|
|
489
515
|
(
|
|
490
516
|
meta.get("session_id"),
|
|
491
|
-
meta.get("agent"),
|
|
517
|
+
meta.get("agent"), # Source data still uses 'agent' key
|
|
492
518
|
meta.get("start_commit"),
|
|
493
519
|
meta.get("continued_from"),
|
|
494
520
|
meta.get("status"),
|
|
495
521
|
meta.get("started_at"),
|
|
496
522
|
meta.get("ended_at"),
|
|
523
|
+
meta.get("parent_session_id"),
|
|
524
|
+
meta.get("parent_event_id"),
|
|
497
525
|
),
|
|
498
526
|
)
|
|
499
527
|
|
|
@@ -676,13 +704,17 @@ class AnalyticsIndex:
|
|
|
676
704
|
with self.connect() as conn:
|
|
677
705
|
rows = conn.execute(
|
|
678
706
|
"""
|
|
679
|
-
SELECT event_id, session_id, ts, tool, summary, success, feature_id, drift_score
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
707
|
+
SELECT e.event_id, e.session_id, e.ts, e.tool, e.summary, e.success, e.feature_id, e.drift_score,
|
|
708
|
+
COALESCE(e.parent_event_id, s.parent_event_id) as parent_event_id,
|
|
709
|
+
e.cost_tokens, e.execution_duration_seconds
|
|
710
|
+
FROM events e
|
|
711
|
+
JOIN sessions s ON e.session_id = s.session_id
|
|
712
|
+
WHERE e.session_id = ?
|
|
713
|
+
OR s.parent_session_id = ?
|
|
714
|
+
ORDER BY e.ts DESC
|
|
683
715
|
LIMIT ?
|
|
684
716
|
""",
|
|
685
|
-
(session_id, int(limit)),
|
|
717
|
+
(session_id, session_id, int(limit)),
|
|
686
718
|
).fetchall()
|
|
687
719
|
return [dict(r) for r in rows]
|
|
688
720
|
|