htmlgraph 0.24.2__py3-none-any.whl → 0.25.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- htmlgraph/__init__.py +20 -1
- htmlgraph/agent_detection.py +26 -10
- htmlgraph/analytics/cross_session.py +4 -3
- htmlgraph/analytics/work_type.py +52 -16
- htmlgraph/analytics_index.py +51 -19
- htmlgraph/api/__init__.py +3 -0
- htmlgraph/api/main.py +2115 -0
- htmlgraph/api/static/htmx.min.js +1 -0
- htmlgraph/api/static/style-redesign.css +1344 -0
- htmlgraph/api/static/style.css +1079 -0
- htmlgraph/api/templates/dashboard-redesign.html +812 -0
- htmlgraph/api/templates/dashboard.html +783 -0
- htmlgraph/api/templates/partials/activity-feed-hierarchical.html +326 -0
- htmlgraph/api/templates/partials/activity-feed.html +570 -0
- htmlgraph/api/templates/partials/agents-redesign.html +317 -0
- htmlgraph/api/templates/partials/agents.html +317 -0
- htmlgraph/api/templates/partials/event-traces.html +373 -0
- htmlgraph/api/templates/partials/features-kanban-redesign.html +509 -0
- htmlgraph/api/templates/partials/features.html +509 -0
- htmlgraph/api/templates/partials/metrics-redesign.html +346 -0
- htmlgraph/api/templates/partials/metrics.html +346 -0
- htmlgraph/api/templates/partials/orchestration-redesign.html +443 -0
- htmlgraph/api/templates/partials/orchestration.html +163 -0
- htmlgraph/api/templates/partials/spawners.html +375 -0
- htmlgraph/atomic_ops.py +560 -0
- htmlgraph/builders/base.py +55 -1
- htmlgraph/builders/bug.py +17 -2
- htmlgraph/builders/chore.py +17 -2
- htmlgraph/builders/epic.py +17 -2
- htmlgraph/builders/feature.py +25 -2
- htmlgraph/builders/phase.py +17 -2
- htmlgraph/builders/spike.py +27 -2
- htmlgraph/builders/track.py +14 -0
- htmlgraph/cigs/__init__.py +4 -0
- htmlgraph/cigs/reporter.py +818 -0
- htmlgraph/cli.py +1427 -401
- htmlgraph/cli_commands/__init__.py +1 -0
- htmlgraph/cli_commands/feature.py +195 -0
- htmlgraph/cli_framework.py +115 -0
- htmlgraph/collections/__init__.py +2 -0
- htmlgraph/collections/base.py +21 -0
- htmlgraph/collections/session.py +189 -0
- htmlgraph/collections/spike.py +7 -1
- htmlgraph/collections/task_delegation.py +236 -0
- htmlgraph/collections/traces.py +482 -0
- htmlgraph/config.py +113 -0
- htmlgraph/converter.py +41 -0
- htmlgraph/cost_analysis/__init__.py +5 -0
- htmlgraph/cost_analysis/analyzer.py +438 -0
- htmlgraph/dashboard.html +3315 -492
- htmlgraph-0.24.2.data/data/htmlgraph/dashboard.html → htmlgraph/dashboard.html.backup +2246 -248
- htmlgraph/dashboard.html.bak +7181 -0
- htmlgraph/dashboard.html.bak2 +7231 -0
- htmlgraph/dashboard.html.bak3 +7232 -0
- htmlgraph/db/__init__.py +38 -0
- htmlgraph/db/queries.py +790 -0
- htmlgraph/db/schema.py +1334 -0
- htmlgraph/deploy.py +26 -27
- htmlgraph/docs/API_REFERENCE.md +841 -0
- htmlgraph/docs/HTTP_API.md +750 -0
- htmlgraph/docs/INTEGRATION_GUIDE.md +752 -0
- htmlgraph/docs/ORCHESTRATION_PATTERNS.md +710 -0
- htmlgraph/docs/README.md +533 -0
- htmlgraph/docs/version_check.py +3 -1
- htmlgraph/error_handler.py +544 -0
- htmlgraph/event_log.py +2 -0
- htmlgraph/hooks/__init__.py +8 -0
- htmlgraph/hooks/bootstrap.py +169 -0
- htmlgraph/hooks/context.py +271 -0
- htmlgraph/hooks/drift_handler.py +521 -0
- htmlgraph/hooks/event_tracker.py +405 -15
- htmlgraph/hooks/post_tool_use_handler.py +257 -0
- htmlgraph/hooks/pretooluse.py +476 -6
- htmlgraph/hooks/prompt_analyzer.py +648 -0
- htmlgraph/hooks/session_handler.py +583 -0
- htmlgraph/hooks/state_manager.py +501 -0
- htmlgraph/hooks/subagent_stop.py +309 -0
- htmlgraph/hooks/task_enforcer.py +39 -0
- htmlgraph/models.py +111 -15
- htmlgraph/operations/fastapi_server.py +230 -0
- htmlgraph/orchestration/headless_spawner.py +22 -14
- htmlgraph/pydantic_models.py +476 -0
- htmlgraph/quality_gates.py +350 -0
- htmlgraph/repo_hash.py +511 -0
- htmlgraph/sdk.py +348 -10
- htmlgraph/server.py +194 -0
- htmlgraph/session_hooks.py +300 -0
- htmlgraph/session_manager.py +131 -1
- htmlgraph/session_registry.py +587 -0
- htmlgraph/session_state.py +436 -0
- htmlgraph/system_prompts.py +449 -0
- htmlgraph/templates/orchestration-view.html +350 -0
- htmlgraph/track_builder.py +19 -0
- htmlgraph/validation.py +115 -0
- htmlgraph-0.25.0.data/data/htmlgraph/dashboard.html +7417 -0
- {htmlgraph-0.24.2.dist-info → htmlgraph-0.25.0.dist-info}/METADATA +91 -64
- {htmlgraph-0.24.2.dist-info → htmlgraph-0.25.0.dist-info}/RECORD +103 -42
- {htmlgraph-0.24.2.data → htmlgraph-0.25.0.data}/data/htmlgraph/styles.css +0 -0
- {htmlgraph-0.24.2.data → htmlgraph-0.25.0.data}/data/htmlgraph/templates/AGENTS.md.template +0 -0
- {htmlgraph-0.24.2.data → htmlgraph-0.25.0.data}/data/htmlgraph/templates/CLAUDE.md.template +0 -0
- {htmlgraph-0.24.2.data → htmlgraph-0.25.0.data}/data/htmlgraph/templates/GEMINI.md.template +0 -0
- {htmlgraph-0.24.2.dist-info → htmlgraph-0.25.0.dist-info}/WHEEL +0 -0
- {htmlgraph-0.24.2.dist-info → htmlgraph-0.25.0.dist-info}/entry_points.txt +0 -0
htmlgraph/hooks/event_tracker.py
CHANGED
|
@@ -2,11 +2,15 @@
|
|
|
2
2
|
HtmlGraph Event Tracker Module
|
|
3
3
|
|
|
4
4
|
Reusable event tracking logic for hook integrations.
|
|
5
|
-
Provides session management, drift detection, and
|
|
5
|
+
Provides session management, drift detection, activity logging, and SQLite persistence.
|
|
6
6
|
|
|
7
7
|
Public API:
|
|
8
8
|
track_event(hook_type: str, tool_input: dict) -> dict
|
|
9
9
|
Main entry point for tracking hook events (PostToolUse, Stop, UserPromptSubmit)
|
|
10
|
+
|
|
11
|
+
Events are recorded to both:
|
|
12
|
+
- HTML files via SessionManager (existing)
|
|
13
|
+
- SQLite database via HtmlGraphDB (new - for dashboard queries)
|
|
10
14
|
"""
|
|
11
15
|
|
|
12
16
|
import json
|
|
@@ -14,16 +18,25 @@ import os
|
|
|
14
18
|
import re
|
|
15
19
|
import subprocess
|
|
16
20
|
import sys
|
|
17
|
-
from datetime import datetime, timedelta
|
|
21
|
+
from datetime import datetime, timedelta, timezone
|
|
18
22
|
from pathlib import Path
|
|
19
23
|
from typing import Any, cast
|
|
20
24
|
|
|
25
|
+
from htmlgraph.db.schema import HtmlGraphDB
|
|
26
|
+
from htmlgraph.ids import generate_id
|
|
21
27
|
from htmlgraph.session_manager import SessionManager
|
|
22
28
|
|
|
23
29
|
# Drift classification queue (stored in session directory)
|
|
24
30
|
DRIFT_QUEUE_FILE = "drift-queue.json"
|
|
25
31
|
# Active parent activity tracker (for Skill/Task invocations)
|
|
26
32
|
PARENT_ACTIVITY_FILE = "parent-activity.json"
|
|
33
|
+
# UserQuery event tracker (for parent-child linking) - DEPRECATED (use session-scoped files)
|
|
34
|
+
USER_QUERY_EVENT_FILE = "user-query-event.json"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def get_user_query_event_file(graph_dir: Path, session_id: str) -> Path:
|
|
38
|
+
"""Get the session-scoped user query event file path."""
|
|
39
|
+
return graph_dir / f"user-query-event-{session_id}.json"
|
|
27
40
|
|
|
28
41
|
|
|
29
42
|
def load_drift_config() -> dict:
|
|
@@ -77,7 +90,12 @@ def load_parent_activity(graph_dir: Path) -> dict:
|
|
|
77
90
|
# Clean up stale parent activities (older than 5 minutes)
|
|
78
91
|
if data.get("timestamp"):
|
|
79
92
|
ts = datetime.fromisoformat(data["timestamp"])
|
|
80
|
-
|
|
93
|
+
# Use timezone-aware datetime for comparison
|
|
94
|
+
now = datetime.now(timezone.utc)
|
|
95
|
+
# Ensure ts is timezone-aware (handle both formats)
|
|
96
|
+
if ts.tzinfo is None:
|
|
97
|
+
ts = ts.replace(tzinfo=timezone.utc)
|
|
98
|
+
if now - ts > timedelta(minutes=5):
|
|
81
99
|
return {}
|
|
82
100
|
return data
|
|
83
101
|
except Exception:
|
|
@@ -97,7 +115,7 @@ def save_parent_activity(
|
|
|
97
115
|
{
|
|
98
116
|
"parent_id": parent_id,
|
|
99
117
|
"tool": tool,
|
|
100
|
-
"timestamp": datetime.now().isoformat(),
|
|
118
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
101
119
|
},
|
|
102
120
|
f,
|
|
103
121
|
)
|
|
@@ -108,6 +126,63 @@ def save_parent_activity(
|
|
|
108
126
|
print(f"Warning: Could not save parent activity: {e}", file=sys.stderr)
|
|
109
127
|
|
|
110
128
|
|
|
129
|
+
def load_user_query_event(graph_dir: Path, session_id: str) -> str | None:
|
|
130
|
+
"""
|
|
131
|
+
Load the active UserQuery event ID for parent-child linking.
|
|
132
|
+
|
|
133
|
+
Session-scoped: Each session maintains its own parent context via
|
|
134
|
+
user-query-event-{SESSION_ID}.json to support multiple concurrent
|
|
135
|
+
Claude windows in the same project.
|
|
136
|
+
"""
|
|
137
|
+
path = get_user_query_event_file(graph_dir, session_id)
|
|
138
|
+
if path.exists():
|
|
139
|
+
try:
|
|
140
|
+
with open(path) as f:
|
|
141
|
+
data = cast(dict[Any, Any], json.load(f))
|
|
142
|
+
# Clean up stale UserQuery events (older than 10 minutes)
|
|
143
|
+
if data.get("timestamp"):
|
|
144
|
+
ts = datetime.fromisoformat(data["timestamp"])
|
|
145
|
+
now = datetime.now(timezone.utc)
|
|
146
|
+
if ts.tzinfo is None:
|
|
147
|
+
ts = ts.replace(tzinfo=timezone.utc)
|
|
148
|
+
# UserQuery events expire after 10 minutes (conversation turn boundary)
|
|
149
|
+
# This allows tool calls up to 10 minutes after a user query to be linked as children
|
|
150
|
+
if now - ts > timedelta(minutes=10):
|
|
151
|
+
return None
|
|
152
|
+
return data.get("event_id")
|
|
153
|
+
except Exception:
|
|
154
|
+
pass
|
|
155
|
+
return None
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def save_user_query_event(
|
|
159
|
+
graph_dir: Path, session_id: str, event_id: str | None
|
|
160
|
+
) -> None:
|
|
161
|
+
"""
|
|
162
|
+
Save the active UserQuery event ID for parent-child linking.
|
|
163
|
+
|
|
164
|
+
Session-scoped: Each session maintains its own parent context via
|
|
165
|
+
user-query-event-{SESSION_ID}.json to support multiple concurrent
|
|
166
|
+
Claude windows in the same project.
|
|
167
|
+
"""
|
|
168
|
+
path = get_user_query_event_file(graph_dir, session_id)
|
|
169
|
+
try:
|
|
170
|
+
if event_id:
|
|
171
|
+
with open(path, "w") as f:
|
|
172
|
+
json.dump(
|
|
173
|
+
{
|
|
174
|
+
"event_id": event_id,
|
|
175
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
176
|
+
},
|
|
177
|
+
f,
|
|
178
|
+
)
|
|
179
|
+
else:
|
|
180
|
+
# Clear UserQuery event
|
|
181
|
+
path.unlink(missing_ok=True)
|
|
182
|
+
except Exception as e:
|
|
183
|
+
print(f"Warning: Could not save UserQuery event: {e}", file=sys.stderr)
|
|
184
|
+
|
|
185
|
+
|
|
111
186
|
def load_drift_queue(graph_dir: Path, max_age_hours: int = 48) -> dict:
|
|
112
187
|
"""
|
|
113
188
|
Load the drift queue from file and clean up stale entries.
|
|
@@ -199,7 +274,7 @@ def add_to_drift_queue(graph_dir: Path, activity: dict, config: dict) -> dict:
|
|
|
199
274
|
|
|
200
275
|
queue["activities"].append(
|
|
201
276
|
{
|
|
202
|
-
"timestamp": datetime.now().isoformat(),
|
|
277
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
203
278
|
"tool": activity.get("tool"),
|
|
204
279
|
"summary": activity.get("summary"),
|
|
205
280
|
"file_paths": activity.get("file_paths", []),
|
|
@@ -293,6 +368,40 @@ def resolve_project_path(cwd: str | None = None) -> str:
|
|
|
293
368
|
return start_dir
|
|
294
369
|
|
|
295
370
|
|
|
371
|
+
def detect_agent_from_environment() -> str:
|
|
372
|
+
"""
|
|
373
|
+
Detect the agent/model name from environment variables.
|
|
374
|
+
|
|
375
|
+
Checks multiple environment variables in order of priority:
|
|
376
|
+
1. HTMLGRAPH_AGENT - Explicit agent name set by user
|
|
377
|
+
2. HTMLGRAPH_SUBAGENT_TYPE - For subagent sessions
|
|
378
|
+
3. CLAUDE_MODEL - Model name if exposed by Claude Code
|
|
379
|
+
4. ANTHROPIC_MODEL - Alternative model env var
|
|
380
|
+
5. HTMLGRAPH_PARENT_AGENT - Parent agent context
|
|
381
|
+
|
|
382
|
+
Falls back to 'claude-code' if no environment variable is set.
|
|
383
|
+
|
|
384
|
+
Returns:
|
|
385
|
+
Agent/model identifier string
|
|
386
|
+
"""
|
|
387
|
+
# Check environment variables in priority order
|
|
388
|
+
env_vars = [
|
|
389
|
+
"HTMLGRAPH_AGENT",
|
|
390
|
+
"HTMLGRAPH_SUBAGENT_TYPE",
|
|
391
|
+
"CLAUDE_MODEL",
|
|
392
|
+
"ANTHROPIC_MODEL",
|
|
393
|
+
"HTMLGRAPH_PARENT_AGENT",
|
|
394
|
+
]
|
|
395
|
+
|
|
396
|
+
for var in env_vars:
|
|
397
|
+
value = os.environ.get(var)
|
|
398
|
+
if value and value.strip():
|
|
399
|
+
return value.strip()
|
|
400
|
+
|
|
401
|
+
# Default fallback
|
|
402
|
+
return "claude-code"
|
|
403
|
+
|
|
404
|
+
|
|
296
405
|
def extract_file_paths(tool_input: dict, tool_name: str) -> list[str]:
|
|
297
406
|
"""Extract file paths from tool input based on tool type."""
|
|
298
407
|
paths = []
|
|
@@ -366,13 +475,153 @@ def format_tool_summary(
|
|
|
366
475
|
url = tool_input.get("url", "")[:40]
|
|
367
476
|
return f"WebFetch: {url}"
|
|
368
477
|
|
|
478
|
+
elif tool_name == "UserQuery":
|
|
479
|
+
# Extract the actual prompt text from the tool_input
|
|
480
|
+
prompt = str(tool_input.get("prompt", ""))
|
|
481
|
+
preview = prompt[:100].replace("\n", " ")
|
|
482
|
+
if len(prompt) > 100:
|
|
483
|
+
preview += "..."
|
|
484
|
+
return preview
|
|
485
|
+
|
|
369
486
|
else:
|
|
370
487
|
return f"{tool_name}: {str(tool_input)[:50]}"
|
|
371
488
|
|
|
372
489
|
|
|
490
|
+
def record_event_to_sqlite(
|
|
491
|
+
db: HtmlGraphDB,
|
|
492
|
+
session_id: str,
|
|
493
|
+
tool_name: str,
|
|
494
|
+
tool_input: dict,
|
|
495
|
+
tool_response: dict,
|
|
496
|
+
is_error: bool,
|
|
497
|
+
file_paths: list[str] | None = None,
|
|
498
|
+
parent_event_id: str | None = None,
|
|
499
|
+
agent_id: str | None = None,
|
|
500
|
+
subagent_type: str | None = None,
|
|
501
|
+
) -> str | None:
|
|
502
|
+
"""
|
|
503
|
+
Record a tool call event to SQLite database for dashboard queries.
|
|
504
|
+
|
|
505
|
+
Args:
|
|
506
|
+
db: HtmlGraphDB instance
|
|
507
|
+
session_id: Session ID from HtmlGraph
|
|
508
|
+
tool_name: Name of the tool called
|
|
509
|
+
tool_input: Tool input parameters
|
|
510
|
+
tool_response: Tool response/result
|
|
511
|
+
is_error: Whether the tool call resulted in an error
|
|
512
|
+
file_paths: File paths affected by the tool
|
|
513
|
+
parent_event_id: Parent event ID if this is a child event
|
|
514
|
+
agent_id: Agent identifier (optional)
|
|
515
|
+
subagent_type: Subagent type for Task delegations (optional)
|
|
516
|
+
|
|
517
|
+
Returns:
|
|
518
|
+
event_id if successful, None otherwise
|
|
519
|
+
"""
|
|
520
|
+
try:
|
|
521
|
+
event_id = generate_id("event")
|
|
522
|
+
input_summary = format_tool_summary(tool_name, tool_input, tool_response)
|
|
523
|
+
|
|
524
|
+
# Build output summary from tool response
|
|
525
|
+
output_summary = ""
|
|
526
|
+
if isinstance(tool_response, dict):
|
|
527
|
+
if is_error:
|
|
528
|
+
output_summary = tool_response.get("error", "error")[:200]
|
|
529
|
+
else:
|
|
530
|
+
# Extract summary from response
|
|
531
|
+
content = tool_response.get("content", tool_response.get("output", ""))
|
|
532
|
+
if isinstance(content, str):
|
|
533
|
+
output_summary = content[:200]
|
|
534
|
+
elif isinstance(content, list):
|
|
535
|
+
output_summary = f"{len(content)} items"
|
|
536
|
+
else:
|
|
537
|
+
output_summary = "success"
|
|
538
|
+
|
|
539
|
+
# Build context metadata
|
|
540
|
+
context = {
|
|
541
|
+
"file_paths": file_paths or [],
|
|
542
|
+
"tool_input_keys": list(tool_input.keys()),
|
|
543
|
+
"is_error": is_error,
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
# Insert event to SQLite
|
|
547
|
+
success = db.insert_event(
|
|
548
|
+
event_id=event_id,
|
|
549
|
+
agent_id=agent_id or "claude-code",
|
|
550
|
+
event_type="tool_call",
|
|
551
|
+
session_id=session_id,
|
|
552
|
+
tool_name=tool_name,
|
|
553
|
+
input_summary=input_summary,
|
|
554
|
+
output_summary=output_summary,
|
|
555
|
+
context=context,
|
|
556
|
+
parent_event_id=parent_event_id,
|
|
557
|
+
cost_tokens=0,
|
|
558
|
+
subagent_type=subagent_type,
|
|
559
|
+
)
|
|
560
|
+
|
|
561
|
+
if success:
|
|
562
|
+
return event_id
|
|
563
|
+
return None
|
|
564
|
+
|
|
565
|
+
except Exception as e:
|
|
566
|
+
print(f"Warning: Could not record event to SQLite: {e}", file=sys.stderr)
|
|
567
|
+
return None
|
|
568
|
+
|
|
569
|
+
|
|
570
|
+
def record_delegation_to_sqlite(
|
|
571
|
+
db: HtmlGraphDB,
|
|
572
|
+
session_id: str,
|
|
573
|
+
from_agent: str,
|
|
574
|
+
to_agent: str,
|
|
575
|
+
task_description: str,
|
|
576
|
+
task_input: dict,
|
|
577
|
+
) -> str | None:
|
|
578
|
+
"""
|
|
579
|
+
Record a Task() delegation to agent_collaboration table.
|
|
580
|
+
|
|
581
|
+
Args:
|
|
582
|
+
db: HtmlGraphDB instance
|
|
583
|
+
session_id: Session ID from HtmlGraph
|
|
584
|
+
from_agent: Agent delegating the task (usually 'orchestrator' or 'claude-code')
|
|
585
|
+
to_agent: Target subagent type (e.g., 'general-purpose', 'researcher')
|
|
586
|
+
task_description: Task description/prompt
|
|
587
|
+
task_input: Full task input parameters
|
|
588
|
+
|
|
589
|
+
Returns:
|
|
590
|
+
handoff_id if successful, None otherwise
|
|
591
|
+
"""
|
|
592
|
+
try:
|
|
593
|
+
handoff_id = generate_id("handoff")
|
|
594
|
+
|
|
595
|
+
# Build context with task input
|
|
596
|
+
context = {
|
|
597
|
+
"task_input_keys": list(task_input.keys()),
|
|
598
|
+
"model": task_input.get("model"),
|
|
599
|
+
"temperature": task_input.get("temperature"),
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
# Insert delegation record
|
|
603
|
+
success = db.insert_collaboration(
|
|
604
|
+
handoff_id=handoff_id,
|
|
605
|
+
from_agent=from_agent,
|
|
606
|
+
to_agent=to_agent,
|
|
607
|
+
session_id=session_id,
|
|
608
|
+
handoff_type="delegation",
|
|
609
|
+
reason=task_description[:200],
|
|
610
|
+
context=context,
|
|
611
|
+
)
|
|
612
|
+
|
|
613
|
+
if success:
|
|
614
|
+
return handoff_id
|
|
615
|
+
return None
|
|
616
|
+
|
|
617
|
+
except Exception as e:
|
|
618
|
+
print(f"Warning: Could not record delegation to SQLite: {e}", file=sys.stderr)
|
|
619
|
+
return None
|
|
620
|
+
|
|
621
|
+
|
|
373
622
|
def track_event(hook_type: str, hook_input: dict) -> dict:
|
|
374
623
|
"""
|
|
375
|
-
Track a hook event and log it to HtmlGraph.
|
|
624
|
+
Track a hook event and log it to HtmlGraph (both HTML files and SQLite).
|
|
376
625
|
|
|
377
626
|
Args:
|
|
378
627
|
hook_type: Type of hook event ("PostToolUse", "Stop", "UserPromptSubmit")
|
|
@@ -388,13 +637,24 @@ def track_event(hook_type: str, hook_input: dict) -> dict:
|
|
|
388
637
|
# Load drift configuration
|
|
389
638
|
drift_config = load_drift_config()
|
|
390
639
|
|
|
391
|
-
# Initialize SessionManager
|
|
640
|
+
# Initialize SessionManager and SQLite DB
|
|
392
641
|
try:
|
|
393
642
|
manager = SessionManager(graph_dir)
|
|
394
643
|
except Exception as e:
|
|
395
644
|
print(f"Warning: Could not initialize SessionManager: {e}", file=sys.stderr)
|
|
396
645
|
return {"continue": True}
|
|
397
646
|
|
|
647
|
+
# Initialize SQLite database for event recording
|
|
648
|
+
db = None
|
|
649
|
+
try:
|
|
650
|
+
db = HtmlGraphDB(str(graph_dir / "index.sqlite"))
|
|
651
|
+
except Exception as e:
|
|
652
|
+
print(f"Warning: Could not initialize SQLite database: {e}", file=sys.stderr)
|
|
653
|
+
# Continue without SQLite (graceful degradation)
|
|
654
|
+
|
|
655
|
+
# Detect agent from environment
|
|
656
|
+
detected_agent = detect_agent_from_environment()
|
|
657
|
+
|
|
398
658
|
# Get active session ID
|
|
399
659
|
active_session = manager.get_active_session()
|
|
400
660
|
if not active_session:
|
|
@@ -402,7 +662,7 @@ def track_event(hook_type: str, hook_input: dict) -> dict:
|
|
|
402
662
|
try:
|
|
403
663
|
active_session = manager.start_session(
|
|
404
664
|
session_id=None,
|
|
405
|
-
agent=
|
|
665
|
+
agent=detected_agent,
|
|
406
666
|
title=f"Session {datetime.now().strftime('%Y-%m-%d %H:%M')}",
|
|
407
667
|
)
|
|
408
668
|
except Exception:
|
|
@@ -410,6 +670,50 @@ def track_event(hook_type: str, hook_input: dict) -> dict:
|
|
|
410
670
|
|
|
411
671
|
active_session_id = active_session.id
|
|
412
672
|
|
|
673
|
+
# Ensure session exists in SQLite database (for foreign key constraints)
|
|
674
|
+
if db:
|
|
675
|
+
try:
|
|
676
|
+
# Get attributes safely - MagicMock objects can cause SQLite binding errors
|
|
677
|
+
# When getattr is called on a MagicMock, it returns another MagicMock, not the default
|
|
678
|
+
def safe_getattr(obj: Any, attr: str, default: Any) -> Any:
|
|
679
|
+
"""Get attribute safely, returning default for MagicMock/invalid values."""
|
|
680
|
+
try:
|
|
681
|
+
val = getattr(obj, attr, default)
|
|
682
|
+
# Check if it's a mock object (has _mock_name attribute)
|
|
683
|
+
if hasattr(val, "_mock_name"):
|
|
684
|
+
return default
|
|
685
|
+
return val
|
|
686
|
+
except Exception:
|
|
687
|
+
return default
|
|
688
|
+
|
|
689
|
+
is_subagent_raw = safe_getattr(active_session, "is_subagent", False)
|
|
690
|
+
is_subagent = (
|
|
691
|
+
bool(is_subagent_raw) if isinstance(is_subagent_raw, bool) else False
|
|
692
|
+
)
|
|
693
|
+
|
|
694
|
+
transcript_id = safe_getattr(active_session, "transcript_id", None)
|
|
695
|
+
transcript_path = safe_getattr(active_session, "transcript_path", None)
|
|
696
|
+
# Ensure strings or None, not mock objects
|
|
697
|
+
if transcript_id is not None and not isinstance(transcript_id, str):
|
|
698
|
+
transcript_id = None
|
|
699
|
+
if transcript_path is not None and not isinstance(transcript_path, str):
|
|
700
|
+
transcript_path = None
|
|
701
|
+
|
|
702
|
+
db.insert_session(
|
|
703
|
+
session_id=active_session_id,
|
|
704
|
+
agent_assigned=safe_getattr(active_session, "agent", None)
|
|
705
|
+
or detected_agent,
|
|
706
|
+
is_subagent=is_subagent,
|
|
707
|
+
transcript_id=transcript_id,
|
|
708
|
+
transcript_path=transcript_path,
|
|
709
|
+
)
|
|
710
|
+
except Exception as e:
|
|
711
|
+
# Session may already exist, that's OK - continue
|
|
712
|
+
print(
|
|
713
|
+
f"Debug: Could not insert session to SQLite (may already exist): {e}",
|
|
714
|
+
file=sys.stderr,
|
|
715
|
+
)
|
|
716
|
+
|
|
413
717
|
# Handle different hook types
|
|
414
718
|
if hook_type == "Stop":
|
|
415
719
|
# Session is ending - track stop event
|
|
@@ -417,6 +721,18 @@ def track_event(hook_type: str, hook_input: dict) -> dict:
|
|
|
417
721
|
manager.track_activity(
|
|
418
722
|
session_id=active_session_id, tool="Stop", summary="Agent stopped"
|
|
419
723
|
)
|
|
724
|
+
|
|
725
|
+
# Record to SQLite if available
|
|
726
|
+
if db:
|
|
727
|
+
record_event_to_sqlite(
|
|
728
|
+
db=db,
|
|
729
|
+
session_id=active_session_id,
|
|
730
|
+
tool_name="Stop",
|
|
731
|
+
tool_input={},
|
|
732
|
+
tool_response={"content": "Agent stopped"},
|
|
733
|
+
is_error=False,
|
|
734
|
+
agent_id=detected_agent,
|
|
735
|
+
)
|
|
420
736
|
except Exception as e:
|
|
421
737
|
print(f"Warning: Could not track stop: {e}", file=sys.stderr)
|
|
422
738
|
return {"continue": True}
|
|
@@ -432,6 +748,26 @@ def track_event(hook_type: str, hook_input: dict) -> dict:
|
|
|
432
748
|
manager.track_activity(
|
|
433
749
|
session_id=active_session_id, tool="UserQuery", summary=f'"{preview}"'
|
|
434
750
|
)
|
|
751
|
+
|
|
752
|
+
# Record to SQLite if available and capture event_id for parent-child linking
|
|
753
|
+
user_query_event_id = None
|
|
754
|
+
if db:
|
|
755
|
+
user_query_event_id = record_event_to_sqlite(
|
|
756
|
+
db=db,
|
|
757
|
+
session_id=active_session_id,
|
|
758
|
+
tool_name="UserQuery",
|
|
759
|
+
tool_input={"prompt": prompt},
|
|
760
|
+
tool_response={"content": "Query received"},
|
|
761
|
+
is_error=False,
|
|
762
|
+
agent_id=detected_agent,
|
|
763
|
+
)
|
|
764
|
+
|
|
765
|
+
# Store the UserQuery event_id for subsequent tool calls to use as parent
|
|
766
|
+
if user_query_event_id:
|
|
767
|
+
save_user_query_event(
|
|
768
|
+
graph_dir, active_session_id, user_query_event_id
|
|
769
|
+
)
|
|
770
|
+
|
|
435
771
|
except Exception as e:
|
|
436
772
|
print(f"Warning: Could not track query: {e}", file=sys.stderr)
|
|
437
773
|
return {"continue": True}
|
|
@@ -479,8 +815,8 @@ def track_event(hook_type: str, hook_input: dict) -> dict:
|
|
|
479
815
|
|
|
480
816
|
# Get drift thresholds from config
|
|
481
817
|
drift_settings = drift_config.get("drift_detection", {})
|
|
482
|
-
warning_threshold = drift_settings.get("warning_threshold"
|
|
483
|
-
auto_classify_threshold = drift_settings.get("auto_classify_threshold"
|
|
818
|
+
warning_threshold = drift_settings.get("warning_threshold") or 0.7
|
|
819
|
+
auto_classify_threshold = drift_settings.get("auto_classify_threshold") or 0.85
|
|
484
820
|
|
|
485
821
|
# Determine parent activity context
|
|
486
822
|
parent_activity_state = load_parent_activity(graph_dir)
|
|
@@ -496,9 +832,23 @@ def track_event(hook_type: str, hook_input: dict) -> dict:
|
|
|
496
832
|
is_parent_tool = True
|
|
497
833
|
else:
|
|
498
834
|
is_parent_tool = False
|
|
499
|
-
# Check
|
|
500
|
-
|
|
501
|
-
|
|
835
|
+
# Check environment variable FIRST for cross-process parent linking
|
|
836
|
+
# This is set by PreToolUse hook when Task() spawns a subagent
|
|
837
|
+
env_parent = os.environ.get("HTMLGRAPH_PARENT_EVENT")
|
|
838
|
+
if env_parent:
|
|
839
|
+
parent_activity_id = env_parent
|
|
840
|
+
# Next, check for UserQuery event as parent (for prompt-based grouping)
|
|
841
|
+
# UserQuery takes priority over parent_activity_json to ensure each conversation turn
|
|
842
|
+
# has its tool calls properly grouped together
|
|
843
|
+
else:
|
|
844
|
+
user_query_event_id = load_user_query_event(
|
|
845
|
+
graph_dir, active_session_id
|
|
846
|
+
)
|
|
847
|
+
if user_query_event_id:
|
|
848
|
+
parent_activity_id = user_query_event_id
|
|
849
|
+
# Fall back to parent-activity.json only if no UserQuery event (backward compatibility)
|
|
850
|
+
elif parent_activity_state.get("parent_id"):
|
|
851
|
+
parent_activity_id = parent_activity_state["parent_id"]
|
|
502
852
|
|
|
503
853
|
# Track the activity
|
|
504
854
|
nudge = None
|
|
@@ -512,6 +862,41 @@ def track_event(hook_type: str, hook_input: dict) -> dict:
|
|
|
512
862
|
parent_activity_id=parent_activity_id,
|
|
513
863
|
)
|
|
514
864
|
|
|
865
|
+
# Record to SQLite if available
|
|
866
|
+
if db:
|
|
867
|
+
# Extract subagent_type for Task delegations
|
|
868
|
+
task_subagent_type = None
|
|
869
|
+
if tool_name == "Task":
|
|
870
|
+
task_subagent_type = tool_input_data.get(
|
|
871
|
+
"subagent_type", "general-purpose"
|
|
872
|
+
)
|
|
873
|
+
|
|
874
|
+
record_event_to_sqlite(
|
|
875
|
+
db=db,
|
|
876
|
+
session_id=active_session_id,
|
|
877
|
+
tool_name=tool_name,
|
|
878
|
+
tool_input=tool_input_data,
|
|
879
|
+
tool_response=tool_response,
|
|
880
|
+
is_error=is_error,
|
|
881
|
+
file_paths=file_paths if file_paths else None,
|
|
882
|
+
parent_event_id=parent_activity_id, # Link to parent event
|
|
883
|
+
agent_id=detected_agent,
|
|
884
|
+
subagent_type=task_subagent_type,
|
|
885
|
+
)
|
|
886
|
+
|
|
887
|
+
# If this was a Task() delegation, also record to agent_collaboration
|
|
888
|
+
if tool_name == "Task" and db:
|
|
889
|
+
subagent = tool_input_data.get("subagent_type", "general-purpose")
|
|
890
|
+
description = tool_input_data.get("description", "")
|
|
891
|
+
record_delegation_to_sqlite(
|
|
892
|
+
db=db,
|
|
893
|
+
session_id=active_session_id,
|
|
894
|
+
from_agent=detected_agent,
|
|
895
|
+
to_agent=subagent,
|
|
896
|
+
task_description=description,
|
|
897
|
+
task_input=tool_input_data,
|
|
898
|
+
)
|
|
899
|
+
|
|
515
900
|
# If this was a parent tool, save its ID for subsequent activities
|
|
516
901
|
if is_parent_tool and result:
|
|
517
902
|
save_parent_activity(graph_dir, result.id, tool_name)
|
|
@@ -524,7 +909,10 @@ def track_event(hook_type: str, hook_input: dict) -> dict:
|
|
|
524
909
|
drift_score = result.drift_score
|
|
525
910
|
feature_id = getattr(result, "feature_id", "unknown")
|
|
526
911
|
|
|
527
|
-
|
|
912
|
+
# Skip drift detection if no score available
|
|
913
|
+
if drift_score is None:
|
|
914
|
+
pass # No active features - can't calculate drift
|
|
915
|
+
elif drift_score >= auto_classify_threshold:
|
|
528
916
|
# High drift - add to classification queue
|
|
529
917
|
queue = add_to_drift_queue(
|
|
530
918
|
graph_dir,
|
|
@@ -598,7 +986,9 @@ Task tool with subagent_type="general-purpose", model="haiku", prompt:
|
|
|
598
986
|
Or manually create a work item in .htmlgraph/ (bug, feature, spike, or chore)."""
|
|
599
987
|
|
|
600
988
|
# Mark classification as triggered
|
|
601
|
-
queue["last_classification"] = datetime.now(
|
|
989
|
+
queue["last_classification"] = datetime.now(
|
|
990
|
+
timezone.utc
|
|
991
|
+
).isoformat()
|
|
602
992
|
save_drift_queue(graph_dir, queue)
|
|
603
993
|
else:
|
|
604
994
|
nudge = f"Drift detected ({drift_score:.2f}): Activity queued for classification ({len(queue['activities'])}/{drift_settings.get('min_activities_before_classify', 3)} needed)."
|