htmlgraph 0.25.0__py3-none-any.whl → 0.26.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- htmlgraph/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/.htmlgraph/agents.json +72 -0
- htmlgraph/.htmlgraph/htmlgraph.db +0 -0
- htmlgraph/__init__.py +1 -1
- htmlgraph/api/main.py +252 -47
- htmlgraph/api/templates/dashboard.html +11 -0
- htmlgraph/api/templates/partials/activity-feed.html +517 -8
- htmlgraph/cli.py +1 -1
- htmlgraph/config.py +173 -96
- htmlgraph/dashboard.html +632 -7237
- htmlgraph/db/schema.py +258 -9
- htmlgraph/hooks/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/hooks/.htmlgraph/agents.json +72 -0
- htmlgraph/hooks/.htmlgraph/index.sqlite +0 -0
- htmlgraph/hooks/cigs_pretool_enforcer.py +2 -2
- htmlgraph/hooks/concurrent_sessions.py +208 -0
- htmlgraph/hooks/context.py +88 -10
- htmlgraph/hooks/drift_handler.py +24 -20
- htmlgraph/hooks/event_tracker.py +264 -189
- htmlgraph/hooks/orchestrator.py +6 -4
- htmlgraph/hooks/orchestrator_reflector.py +4 -4
- htmlgraph/hooks/pretooluse.py +63 -36
- htmlgraph/hooks/prompt_analyzer.py +14 -25
- htmlgraph/hooks/session_handler.py +123 -69
- htmlgraph/hooks/state_manager.py +7 -4
- htmlgraph/hooks/subagent_stop.py +3 -2
- htmlgraph/hooks/validator.py +15 -11
- htmlgraph/operations/fastapi_server.py +2 -2
- htmlgraph/orchestration/headless_spawner.py +489 -16
- htmlgraph/orchestration/live_events.py +377 -0
- htmlgraph/server.py +100 -203
- htmlgraph-0.26.2.data/data/htmlgraph/dashboard.html +812 -0
- {htmlgraph-0.25.0.dist-info → htmlgraph-0.26.2.dist-info}/METADATA +1 -1
- {htmlgraph-0.25.0.dist-info → htmlgraph-0.26.2.dist-info}/RECORD +40 -32
- htmlgraph-0.25.0.data/data/htmlgraph/dashboard.html +0 -7417
- {htmlgraph-0.25.0.data → htmlgraph-0.26.2.data}/data/htmlgraph/styles.css +0 -0
- {htmlgraph-0.25.0.data → htmlgraph-0.26.2.data}/data/htmlgraph/templates/AGENTS.md.template +0 -0
- {htmlgraph-0.25.0.data → htmlgraph-0.26.2.data}/data/htmlgraph/templates/CLAUDE.md.template +0 -0
- {htmlgraph-0.25.0.data → htmlgraph-0.26.2.data}/data/htmlgraph/templates/GEMINI.md.template +0 -0
- {htmlgraph-0.25.0.dist-info → htmlgraph-0.26.2.dist-info}/WHEEL +0 -0
- {htmlgraph-0.25.0.dist-info → htmlgraph-0.26.2.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": "1.0",
|
|
3
|
+
"updated": "2026-01-11T04:13:54.354401",
|
|
4
|
+
"agents": {
|
|
5
|
+
"claude": {
|
|
6
|
+
"id": "claude",
|
|
7
|
+
"name": "Claude",
|
|
8
|
+
"capabilities": [
|
|
9
|
+
"python",
|
|
10
|
+
"javascript",
|
|
11
|
+
"typescript",
|
|
12
|
+
"html",
|
|
13
|
+
"css",
|
|
14
|
+
"code-review",
|
|
15
|
+
"testing",
|
|
16
|
+
"documentation",
|
|
17
|
+
"debugging",
|
|
18
|
+
"refactoring",
|
|
19
|
+
"architecture",
|
|
20
|
+
"api-design"
|
|
21
|
+
],
|
|
22
|
+
"max_parallel_tasks": 3,
|
|
23
|
+
"preferred_complexity": [
|
|
24
|
+
"low",
|
|
25
|
+
"medium",
|
|
26
|
+
"high",
|
|
27
|
+
"very-high"
|
|
28
|
+
],
|
|
29
|
+
"active": true,
|
|
30
|
+
"metadata": {}
|
|
31
|
+
},
|
|
32
|
+
"gemini": {
|
|
33
|
+
"id": "gemini",
|
|
34
|
+
"name": "Gemini",
|
|
35
|
+
"capabilities": [
|
|
36
|
+
"python",
|
|
37
|
+
"data-analysis",
|
|
38
|
+
"documentation",
|
|
39
|
+
"testing",
|
|
40
|
+
"code-review",
|
|
41
|
+
"javascript"
|
|
42
|
+
],
|
|
43
|
+
"max_parallel_tasks": 2,
|
|
44
|
+
"preferred_complexity": [
|
|
45
|
+
"low",
|
|
46
|
+
"medium",
|
|
47
|
+
"high"
|
|
48
|
+
],
|
|
49
|
+
"active": true,
|
|
50
|
+
"metadata": {}
|
|
51
|
+
},
|
|
52
|
+
"codex": {
|
|
53
|
+
"id": "codex",
|
|
54
|
+
"name": "Codex",
|
|
55
|
+
"capabilities": [
|
|
56
|
+
"python",
|
|
57
|
+
"javascript",
|
|
58
|
+
"debugging",
|
|
59
|
+
"testing",
|
|
60
|
+
"code-generation",
|
|
61
|
+
"documentation"
|
|
62
|
+
],
|
|
63
|
+
"max_parallel_tasks": 2,
|
|
64
|
+
"preferred_complexity": [
|
|
65
|
+
"low",
|
|
66
|
+
"medium"
|
|
67
|
+
],
|
|
68
|
+
"active": true,
|
|
69
|
+
"metadata": {}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
Binary file
|
htmlgraph/__init__.py
CHANGED
htmlgraph/api/main.py
CHANGED
|
@@ -17,6 +17,7 @@ Architecture:
|
|
|
17
17
|
import asyncio
|
|
18
18
|
import json
|
|
19
19
|
import logging
|
|
20
|
+
import random
|
|
20
21
|
import sqlite3
|
|
21
22
|
import time
|
|
22
23
|
from datetime import datetime
|
|
@@ -88,6 +89,7 @@ class EventModel(BaseModel):
|
|
|
88
89
|
session_id: str
|
|
89
90
|
parent_event_id: str | None = None
|
|
90
91
|
status: str
|
|
92
|
+
model: str | None = None
|
|
91
93
|
|
|
92
94
|
|
|
93
95
|
class FeatureModel(BaseModel):
|
|
@@ -212,9 +214,12 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
212
214
|
# ========== DATABASE HELPERS ==========
|
|
213
215
|
|
|
214
216
|
async def get_db() -> aiosqlite.Connection:
|
|
215
|
-
"""Get database connection."""
|
|
217
|
+
"""Get database connection with busy_timeout to prevent lock errors."""
|
|
216
218
|
db = await aiosqlite.connect(app.state.db_path)
|
|
217
219
|
db.row_factory = aiosqlite.Row
|
|
220
|
+
# Set busy_timeout to 5 seconds - prevents "database is locked" errors
|
|
221
|
+
# during concurrent access from spawner scripts and WebSocket polling
|
|
222
|
+
await db.execute("PRAGMA busy_timeout = 5000")
|
|
218
223
|
return db
|
|
219
224
|
|
|
220
225
|
# ========== ROUTES ==========
|
|
@@ -389,7 +394,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
389
394
|
query = """
|
|
390
395
|
SELECT e.event_id, e.agent_id, e.event_type, e.timestamp, e.tool_name,
|
|
391
396
|
e.input_summary, e.output_summary, e.session_id,
|
|
392
|
-
e.status
|
|
397
|
+
e.parent_event_id, e.status, e.model
|
|
393
398
|
FROM agent_events e
|
|
394
399
|
WHERE 1=1
|
|
395
400
|
"""
|
|
@@ -423,8 +428,9 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
423
428
|
input_summary=row[5],
|
|
424
429
|
output_summary=row[6],
|
|
425
430
|
session_id=row[7],
|
|
426
|
-
parent_event_id=
|
|
427
|
-
status=row[
|
|
431
|
+
parent_event_id=row[8],
|
|
432
|
+
status=row[9],
|
|
433
|
+
model=row[10],
|
|
428
434
|
)
|
|
429
435
|
for row in rows
|
|
430
436
|
]
|
|
@@ -575,7 +581,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
575
581
|
# Query parent events (task delegations)
|
|
576
582
|
parent_query = """
|
|
577
583
|
SELECT event_id, agent_id, subagent_type, timestamp, status,
|
|
578
|
-
child_spike_count, output_summary
|
|
584
|
+
child_spike_count, output_summary, model
|
|
579
585
|
FROM agent_events
|
|
580
586
|
WHERE event_type = 'task_delegation'
|
|
581
587
|
"""
|
|
@@ -601,6 +607,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
601
607
|
status = parent_row[4]
|
|
602
608
|
child_spike_count = parent_row[5] or 0
|
|
603
609
|
output_summary = parent_row[6]
|
|
610
|
+
model = parent_row[7]
|
|
604
611
|
|
|
605
612
|
# Parse output summary to get child spike IDs if available
|
|
606
613
|
child_spikes = []
|
|
@@ -663,6 +670,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
663
670
|
"child_events": child_events,
|
|
664
671
|
"child_spike_count": child_spike_count,
|
|
665
672
|
"child_spikes": child_spikes,
|
|
673
|
+
"model": model,
|
|
666
674
|
}
|
|
667
675
|
|
|
668
676
|
traces.append(trace)
|
|
@@ -759,7 +767,9 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
759
767
|
input_summary,
|
|
760
768
|
output_summary,
|
|
761
769
|
session_id,
|
|
762
|
-
status
|
|
770
|
+
status,
|
|
771
|
+
model,
|
|
772
|
+
parent_event_id
|
|
763
773
|
FROM agent_events
|
|
764
774
|
WHERE event_type IN ({event_type_placeholders})
|
|
765
775
|
"""
|
|
@@ -789,6 +799,8 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
789
799
|
"output_summary": row[7],
|
|
790
800
|
"session_id": row[8],
|
|
791
801
|
"status": row[9],
|
|
802
|
+
"model": row[10],
|
|
803
|
+
"parent_event_id": row[11],
|
|
792
804
|
}
|
|
793
805
|
)
|
|
794
806
|
|
|
@@ -1007,56 +1019,148 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
1007
1019
|
input_summary,
|
|
1008
1020
|
execution_duration_seconds,
|
|
1009
1021
|
status,
|
|
1010
|
-
|
|
1022
|
+
agent_id,
|
|
1023
|
+
model,
|
|
1024
|
+
context,
|
|
1025
|
+
subagent_type
|
|
1011
1026
|
FROM agent_events
|
|
1012
1027
|
WHERE parent_event_id = ?
|
|
1013
1028
|
ORDER BY timestamp ASC
|
|
1014
1029
|
"""
|
|
1015
1030
|
|
|
1016
|
-
|
|
1017
|
-
|
|
1031
|
+
# Recursive helper to fetch children at any depth
|
|
1032
|
+
async def fetch_children_recursive(
|
|
1033
|
+
parent_id: str, depth: int = 0, max_depth: int = 4
|
|
1034
|
+
) -> tuple[list[dict[str, Any]], float, int, int]:
|
|
1035
|
+
"""Recursively fetch children up to max_depth levels."""
|
|
1036
|
+
if depth >= max_depth:
|
|
1037
|
+
return [], 0.0, 0, 0
|
|
1018
1038
|
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
total_duration = uq_duration
|
|
1022
|
-
success_count = (
|
|
1023
|
-
1 if uq_status == "recorded" or uq_status == "success" else 0
|
|
1024
|
-
)
|
|
1025
|
-
error_count = (
|
|
1026
|
-
0 if uq_status == "recorded" or uq_status == "success" else 1
|
|
1027
|
-
)
|
|
1039
|
+
cursor = await db.execute(children_query, [parent_id])
|
|
1040
|
+
rows = await cursor.fetchall()
|
|
1028
1041
|
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1042
|
+
children_list: list[dict[str, Any]] = []
|
|
1043
|
+
total_dur = 0.0
|
|
1044
|
+
success_cnt = 0
|
|
1045
|
+
error_cnt = 0
|
|
1046
|
+
|
|
1047
|
+
for row in rows:
|
|
1048
|
+
evt_id = row[0]
|
|
1049
|
+
tool = row[1]
|
|
1050
|
+
timestamp = row[2]
|
|
1051
|
+
input_text = row[3] or ""
|
|
1052
|
+
duration = row[4] or 0.0
|
|
1053
|
+
status = row[5]
|
|
1054
|
+
agent = row[6] or "unknown"
|
|
1055
|
+
model = row[7]
|
|
1056
|
+
context_json = row[8]
|
|
1057
|
+
subagent_type = row[9]
|
|
1058
|
+
|
|
1059
|
+
# Parse context to extract spawner metadata
|
|
1060
|
+
context = {}
|
|
1061
|
+
spawner_type = None
|
|
1062
|
+
spawned_agent = None
|
|
1063
|
+
if context_json:
|
|
1064
|
+
try:
|
|
1065
|
+
context = json.loads(context_json)
|
|
1066
|
+
spawner_type = context.get("spawner_type")
|
|
1067
|
+
spawned_agent = context.get("spawned_agent")
|
|
1068
|
+
except (json.JSONDecodeError, TypeError):
|
|
1069
|
+
pass
|
|
1070
|
+
|
|
1071
|
+
# If no spawner_type but subagent_type is set, treat it as a spawner delegation
|
|
1072
|
+
# This handles both HeadlessSpawner (spawner_type in context) and
|
|
1073
|
+
# Claude Code plugin agents (subagent_type field)
|
|
1074
|
+
if not spawner_type and subagent_type:
|
|
1075
|
+
# Extract spawner name from subagent_type (e.g., ".claude-plugin:gemini" -> "gemini")
|
|
1076
|
+
if ":" in subagent_type:
|
|
1077
|
+
spawner_type = subagent_type.split(":")[-1]
|
|
1078
|
+
else:
|
|
1079
|
+
spawner_type = subagent_type
|
|
1080
|
+
spawned_agent = (
|
|
1081
|
+
agent # Use the agent_id as the spawned agent
|
|
1082
|
+
)
|
|
1083
|
+
|
|
1084
|
+
# Build summary (input_text already contains formatted summary)
|
|
1085
|
+
summary = input_text[:80] + (
|
|
1086
|
+
"..." if len(input_text) > 80 else ""
|
|
1087
|
+
)
|
|
1088
|
+
|
|
1089
|
+
# Recursively fetch this child's children
|
|
1090
|
+
(
|
|
1091
|
+
nested_children,
|
|
1092
|
+
nested_dur,
|
|
1093
|
+
nested_success,
|
|
1094
|
+
nested_error,
|
|
1095
|
+
) = await fetch_children_recursive(evt_id, depth + 1, max_depth)
|
|
1096
|
+
|
|
1097
|
+
child_dict: dict[str, Any] = {
|
|
1098
|
+
"event_id": evt_id,
|
|
1099
|
+
"tool_name": tool,
|
|
1100
|
+
"timestamp": timestamp,
|
|
1048
1101
|
"summary": summary,
|
|
1049
|
-
"duration_seconds": round(
|
|
1050
|
-
"agent":
|
|
1102
|
+
"duration_seconds": round(duration, 2),
|
|
1103
|
+
"agent": agent,
|
|
1104
|
+
"depth": depth,
|
|
1105
|
+
"model": model,
|
|
1051
1106
|
}
|
|
1052
|
-
)
|
|
1053
1107
|
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1108
|
+
# Include spawner metadata if present
|
|
1109
|
+
if spawner_type:
|
|
1110
|
+
child_dict["spawner_type"] = spawner_type
|
|
1111
|
+
if spawned_agent:
|
|
1112
|
+
child_dict["spawned_agent"] = spawned_agent
|
|
1113
|
+
if subagent_type:
|
|
1114
|
+
child_dict["subagent_type"] = subagent_type
|
|
1115
|
+
|
|
1116
|
+
# Only add children key if there are nested children
|
|
1117
|
+
if nested_children:
|
|
1118
|
+
child_dict["children"] = nested_children
|
|
1119
|
+
|
|
1120
|
+
children_list.append(child_dict)
|
|
1121
|
+
|
|
1122
|
+
# Update stats (include nested)
|
|
1123
|
+
total_dur += duration + nested_dur
|
|
1124
|
+
if status == "recorded" or status == "success":
|
|
1125
|
+
success_cnt += 1
|
|
1126
|
+
else:
|
|
1127
|
+
error_cnt += 1
|
|
1128
|
+
success_cnt += nested_success
|
|
1129
|
+
error_cnt += nested_error
|
|
1130
|
+
|
|
1131
|
+
return children_list, total_dur, success_cnt, error_cnt
|
|
1132
|
+
|
|
1133
|
+
# Step 3: Build child events with recursive nesting
|
|
1134
|
+
(
|
|
1135
|
+
children,
|
|
1136
|
+
children_duration,
|
|
1137
|
+
children_success,
|
|
1138
|
+
children_error,
|
|
1139
|
+
) = await fetch_children_recursive(uq_event_id, depth=0, max_depth=4)
|
|
1140
|
+
|
|
1141
|
+
total_duration = uq_duration + children_duration
|
|
1142
|
+
success_count = (
|
|
1143
|
+
1 if uq_status == "recorded" or uq_status == "success" else 0
|
|
1144
|
+
) + children_success
|
|
1145
|
+
error_count = (
|
|
1146
|
+
0 if uq_status == "recorded" or uq_status == "success" else 1
|
|
1147
|
+
) + children_error
|
|
1148
|
+
|
|
1149
|
+
# Check if any child has spawner metadata
|
|
1150
|
+
def has_spawner_in_children(
|
|
1151
|
+
children_list: list[dict[str, Any]],
|
|
1152
|
+
) -> bool:
|
|
1153
|
+
"""Recursively check if any child has spawner metadata."""
|
|
1154
|
+
for child in children_list:
|
|
1155
|
+
if child.get("spawner_type") or child.get("spawned_agent"):
|
|
1156
|
+
return True
|
|
1157
|
+
if child.get("children") and has_spawner_in_children(
|
|
1158
|
+
child["children"]
|
|
1159
|
+
):
|
|
1160
|
+
return True
|
|
1161
|
+
return False
|
|
1162
|
+
|
|
1163
|
+
has_spawner = has_spawner_in_children(children)
|
|
1060
1164
|
|
|
1061
1165
|
# Step 4: Build conversation turn object
|
|
1062
1166
|
conversation_turn = {
|
|
@@ -1065,8 +1169,10 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
1065
1169
|
"timestamp": uq_timestamp,
|
|
1066
1170
|
"prompt": prompt_text[:200], # Truncate for display
|
|
1067
1171
|
"duration_seconds": round(uq_duration, 2),
|
|
1172
|
+
"agent_id": uq_row[5], # Include agent_id from UserQuery
|
|
1068
1173
|
},
|
|
1069
1174
|
"children": children,
|
|
1175
|
+
"has_spawner": has_spawner,
|
|
1070
1176
|
"stats": {
|
|
1071
1177
|
"tool_count": len(children),
|
|
1072
1178
|
"total_duration": round(total_duration, 2),
|
|
@@ -2034,23 +2140,29 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
2034
2140
|
IMPORTANT: Initializes last_timestamp to current time to only stream NEW events.
|
|
2035
2141
|
Historical events are already counted in /api/initial-stats, so streaming them
|
|
2036
2142
|
again would cause double-counting in the header stats.
|
|
2143
|
+
|
|
2144
|
+
LIVE EVENTS: Also polls live_events table for real-time spawner activity
|
|
2145
|
+
streaming. These events are marked as broadcast after sending and cleaned up.
|
|
2037
2146
|
"""
|
|
2038
2147
|
await websocket.accept()
|
|
2039
2148
|
# Initialize to current time - only stream events created AFTER connection
|
|
2040
2149
|
# This prevents double-counting: initial-stats already includes historical events
|
|
2041
2150
|
last_timestamp: str = datetime.now().isoformat()
|
|
2042
2151
|
poll_interval = 0.5 # OPTIMIZATION: Adaptive polling (reduced from 1s)
|
|
2152
|
+
last_live_event_id = 0 # Track last broadcast live event ID
|
|
2043
2153
|
|
|
2044
2154
|
try:
|
|
2045
2155
|
while True:
|
|
2046
2156
|
db = await get_db()
|
|
2157
|
+
has_activity = False
|
|
2047
2158
|
try:
|
|
2159
|
+
# ===== 1. Poll agent_events (existing logic) =====
|
|
2048
2160
|
# OPTIMIZATION: Only select needed columns, use DESC index
|
|
2049
2161
|
# Pattern uses index: idx_agent_events_timestamp DESC
|
|
2050
2162
|
# Only fetch events AFTER last_timestamp to stream new events only
|
|
2051
2163
|
query = """
|
|
2052
2164
|
SELECT event_id, agent_id, event_type, timestamp, tool_name,
|
|
2053
|
-
input_summary, output_summary, session_id, status
|
|
2165
|
+
input_summary, output_summary, session_id, status, model
|
|
2054
2166
|
FROM agent_events
|
|
2055
2167
|
WHERE timestamp > ?
|
|
2056
2168
|
ORDER BY timestamp ASC
|
|
@@ -2061,6 +2173,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
2061
2173
|
rows = await cursor.fetchall()
|
|
2062
2174
|
|
|
2063
2175
|
if rows:
|
|
2176
|
+
has_activity = True
|
|
2064
2177
|
rows_list = [list(row) for row in rows]
|
|
2065
2178
|
# Update last timestamp (last row since ORDER BY ts ASC)
|
|
2066
2179
|
last_timestamp = rows_list[-1][3]
|
|
@@ -2078,14 +2191,106 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
2078
2191
|
"output_summary": row[6],
|
|
2079
2192
|
"session_id": row[7],
|
|
2080
2193
|
"status": row[8],
|
|
2194
|
+
"model": row[9],
|
|
2081
2195
|
"parent_event_id": None,
|
|
2082
2196
|
"cost_tokens": 0,
|
|
2083
2197
|
"execution_duration_seconds": 0.0,
|
|
2084
2198
|
}
|
|
2085
2199
|
await websocket.send_json(event_data)
|
|
2200
|
+
|
|
2201
|
+
# ===== 2. Poll live_events for spawner streaming =====
|
|
2202
|
+
# Fetch pending live events that haven't been broadcast yet
|
|
2203
|
+
live_query = """
|
|
2204
|
+
SELECT id, event_type, event_data, parent_event_id,
|
|
2205
|
+
session_id, spawner_type, created_at
|
|
2206
|
+
FROM live_events
|
|
2207
|
+
WHERE broadcast_at IS NULL AND id > ?
|
|
2208
|
+
ORDER BY created_at ASC
|
|
2209
|
+
LIMIT 50
|
|
2210
|
+
"""
|
|
2211
|
+
live_cursor = await db.execute(live_query, [last_live_event_id])
|
|
2212
|
+
live_rows = list(await live_cursor.fetchall())
|
|
2213
|
+
|
|
2214
|
+
if live_rows:
|
|
2215
|
+
logger.info(
|
|
2216
|
+
f"[WebSocket] Found {len(live_rows)} pending live_events to broadcast"
|
|
2217
|
+
)
|
|
2218
|
+
has_activity = True
|
|
2219
|
+
broadcast_ids: list[int] = []
|
|
2220
|
+
|
|
2221
|
+
for live_row in live_rows:
|
|
2222
|
+
live_id: int = live_row[0]
|
|
2223
|
+
event_type: str = live_row[1]
|
|
2224
|
+
event_data_json: str | None = live_row[2]
|
|
2225
|
+
parent_event_id: str | None = live_row[3]
|
|
2226
|
+
session_id: str | None = live_row[4]
|
|
2227
|
+
spawner_type: str | None = live_row[5]
|
|
2228
|
+
created_at: str = live_row[6]
|
|
2229
|
+
|
|
2230
|
+
# Parse event_data JSON
|
|
2231
|
+
try:
|
|
2232
|
+
event_data_parsed = (
|
|
2233
|
+
json.loads(event_data_json)
|
|
2234
|
+
if event_data_json
|
|
2235
|
+
else {}
|
|
2236
|
+
)
|
|
2237
|
+
except (json.JSONDecodeError, TypeError):
|
|
2238
|
+
event_data_parsed = {}
|
|
2239
|
+
|
|
2240
|
+
# Send spawner event to client
|
|
2241
|
+
spawner_event = {
|
|
2242
|
+
"type": "spawner_event",
|
|
2243
|
+
"live_event_id": live_id,
|
|
2244
|
+
"event_type": event_type,
|
|
2245
|
+
"spawner_type": spawner_type,
|
|
2246
|
+
"parent_event_id": parent_event_id,
|
|
2247
|
+
"session_id": session_id,
|
|
2248
|
+
"timestamp": created_at,
|
|
2249
|
+
"data": event_data_parsed,
|
|
2250
|
+
}
|
|
2251
|
+
logger.info(
|
|
2252
|
+
f"[WebSocket] Sending spawner_event: id={live_id}, type={event_type}, spawner={spawner_type}"
|
|
2253
|
+
)
|
|
2254
|
+
await websocket.send_json(spawner_event)
|
|
2255
|
+
|
|
2256
|
+
broadcast_ids.append(live_id)
|
|
2257
|
+
last_live_event_id = max(last_live_event_id, live_id)
|
|
2258
|
+
|
|
2259
|
+
# Mark events as broadcast
|
|
2260
|
+
if broadcast_ids:
|
|
2261
|
+
logger.info(
|
|
2262
|
+
f"[WebSocket] Marking {len(broadcast_ids)} events as broadcast: {broadcast_ids}"
|
|
2263
|
+
)
|
|
2264
|
+
placeholders = ",".join("?" for _ in broadcast_ids)
|
|
2265
|
+
await db.execute(
|
|
2266
|
+
f"""
|
|
2267
|
+
UPDATE live_events
|
|
2268
|
+
SET broadcast_at = CURRENT_TIMESTAMP
|
|
2269
|
+
WHERE id IN ({placeholders})
|
|
2270
|
+
""",
|
|
2271
|
+
broadcast_ids,
|
|
2272
|
+
)
|
|
2273
|
+
await db.commit()
|
|
2274
|
+
|
|
2275
|
+
# ===== 3. Periodic cleanup of old broadcast events =====
|
|
2276
|
+
# Clean up events older than 5 minutes (every ~10 poll cycles)
|
|
2277
|
+
if random.random() < 0.1: # 10% chance each cycle
|
|
2278
|
+
await db.execute(
|
|
2279
|
+
"""
|
|
2280
|
+
DELETE FROM live_events
|
|
2281
|
+
WHERE broadcast_at IS NOT NULL
|
|
2282
|
+
AND created_at < datetime('now', '-5 minutes')
|
|
2283
|
+
"""
|
|
2284
|
+
)
|
|
2285
|
+
await db.commit()
|
|
2286
|
+
|
|
2287
|
+
# Adjust poll interval based on activity
|
|
2288
|
+
if has_activity:
|
|
2289
|
+
poll_interval = 0.3 # Speed up when active
|
|
2086
2290
|
else:
|
|
2087
2291
|
# No new events, increase poll interval (exponential backoff)
|
|
2088
2292
|
poll_interval = min(poll_interval * 1.2, 2.0)
|
|
2293
|
+
|
|
2089
2294
|
finally:
|
|
2090
2295
|
await db.close()
|
|
2091
2296
|
|
|
@@ -161,6 +161,7 @@
|
|
|
161
161
|
ws.onmessage = function(event) {
|
|
162
162
|
try {
|
|
163
163
|
const data = JSON.parse(event.data);
|
|
164
|
+
console.log('[WebSocket] Received message type:', data.type);
|
|
164
165
|
|
|
165
166
|
if (data.type === 'event') {
|
|
166
167
|
// Prevent duplicate event insertions
|
|
@@ -187,6 +188,16 @@
|
|
|
187
188
|
// Insert new event into Activity Feed if visible
|
|
188
189
|
insertNewEventIntoActivityFeed(data);
|
|
189
190
|
}
|
|
191
|
+
// Handle live spawner events for real-time streaming
|
|
192
|
+
else if (data.type === 'spawner_event') {
|
|
193
|
+
console.log('[WebSocket] spawner_event received:', data.event_type, data.spawner_type, 'handler exists:', typeof window.handleSpawnerEvent === 'function');
|
|
194
|
+
// Delegate to activity-feed.html handler if available
|
|
195
|
+
if (typeof window.handleSpawnerEvent === 'function') {
|
|
196
|
+
window.handleSpawnerEvent(data);
|
|
197
|
+
} else {
|
|
198
|
+
console.warn('[WebSocket] handleSpawnerEvent not available, spawner event dropped:', data.event_type, data.spawner_type);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
190
201
|
} catch (e) {
|
|
191
202
|
console.error('WebSocket message error:', e);
|
|
192
203
|
}
|