htmlgraph 0.25.0__py3-none-any.whl → 0.26.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- htmlgraph/__init__.py +1 -1
- htmlgraph/api/main.py +193 -45
- htmlgraph/api/templates/dashboard.html +11 -0
- htmlgraph/api/templates/partials/activity-feed.html +458 -8
- htmlgraph/dashboard.html +41 -0
- htmlgraph/db/schema.py +254 -4
- htmlgraph/hooks/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/hooks/.htmlgraph/agents.json +72 -0
- htmlgraph/hooks/.htmlgraph/index.sqlite +0 -0
- htmlgraph/hooks/cigs_pretool_enforcer.py +2 -2
- htmlgraph/hooks/concurrent_sessions.py +208 -0
- htmlgraph/hooks/context.py +57 -10
- htmlgraph/hooks/drift_handler.py +24 -20
- htmlgraph/hooks/event_tracker.py +204 -177
- htmlgraph/hooks/orchestrator.py +6 -4
- htmlgraph/hooks/orchestrator_reflector.py +4 -4
- htmlgraph/hooks/pretooluse.py +3 -6
- htmlgraph/hooks/prompt_analyzer.py +14 -25
- htmlgraph/hooks/session_handler.py +123 -69
- htmlgraph/hooks/state_manager.py +7 -4
- htmlgraph/hooks/validator.py +15 -11
- htmlgraph/orchestration/headless_spawner.py +322 -15
- htmlgraph/orchestration/live_events.py +377 -0
- {htmlgraph-0.25.0.data → htmlgraph-0.26.1.data}/data/htmlgraph/dashboard.html +41 -0
- {htmlgraph-0.25.0.dist-info → htmlgraph-0.26.1.dist-info}/METADATA +1 -1
- {htmlgraph-0.25.0.dist-info → htmlgraph-0.26.1.dist-info}/RECORD +32 -27
- {htmlgraph-0.25.0.data → htmlgraph-0.26.1.data}/data/htmlgraph/styles.css +0 -0
- {htmlgraph-0.25.0.data → htmlgraph-0.26.1.data}/data/htmlgraph/templates/AGENTS.md.template +0 -0
- {htmlgraph-0.25.0.data → htmlgraph-0.26.1.data}/data/htmlgraph/templates/CLAUDE.md.template +0 -0
- {htmlgraph-0.25.0.data → htmlgraph-0.26.1.data}/data/htmlgraph/templates/GEMINI.md.template +0 -0
- {htmlgraph-0.25.0.dist-info → htmlgraph-0.26.1.dist-info}/WHEEL +0 -0
- {htmlgraph-0.25.0.dist-info → htmlgraph-0.26.1.dist-info}/entry_points.txt +0 -0
htmlgraph/__init__.py
CHANGED
htmlgraph/api/main.py
CHANGED
|
@@ -17,6 +17,7 @@ Architecture:
|
|
|
17
17
|
import asyncio
|
|
18
18
|
import json
|
|
19
19
|
import logging
|
|
20
|
+
import random
|
|
20
21
|
import sqlite3
|
|
21
22
|
import time
|
|
22
23
|
from datetime import datetime
|
|
@@ -88,6 +89,7 @@ class EventModel(BaseModel):
|
|
|
88
89
|
session_id: str
|
|
89
90
|
parent_event_id: str | None = None
|
|
90
91
|
status: str
|
|
92
|
+
model: str | None = None
|
|
91
93
|
|
|
92
94
|
|
|
93
95
|
class FeatureModel(BaseModel):
|
|
@@ -212,9 +214,12 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
212
214
|
# ========== DATABASE HELPERS ==========
|
|
213
215
|
|
|
214
216
|
async def get_db() -> aiosqlite.Connection:
|
|
215
|
-
"""Get database connection."""
|
|
217
|
+
"""Get database connection with busy_timeout to prevent lock errors."""
|
|
216
218
|
db = await aiosqlite.connect(app.state.db_path)
|
|
217
219
|
db.row_factory = aiosqlite.Row
|
|
220
|
+
# Set busy_timeout to 5 seconds - prevents "database is locked" errors
|
|
221
|
+
# during concurrent access from spawner scripts and WebSocket polling
|
|
222
|
+
await db.execute("PRAGMA busy_timeout = 5000")
|
|
218
223
|
return db
|
|
219
224
|
|
|
220
225
|
# ========== ROUTES ==========
|
|
@@ -389,7 +394,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
389
394
|
query = """
|
|
390
395
|
SELECT e.event_id, e.agent_id, e.event_type, e.timestamp, e.tool_name,
|
|
391
396
|
e.input_summary, e.output_summary, e.session_id,
|
|
392
|
-
e.status
|
|
397
|
+
e.status, e.model
|
|
393
398
|
FROM agent_events e
|
|
394
399
|
WHERE 1=1
|
|
395
400
|
"""
|
|
@@ -425,6 +430,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
425
430
|
session_id=row[7],
|
|
426
431
|
parent_event_id=None, # Not available in all schema versions
|
|
427
432
|
status=row[8],
|
|
433
|
+
model=row[9],
|
|
428
434
|
)
|
|
429
435
|
for row in rows
|
|
430
436
|
]
|
|
@@ -575,7 +581,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
575
581
|
# Query parent events (task delegations)
|
|
576
582
|
parent_query = """
|
|
577
583
|
SELECT event_id, agent_id, subagent_type, timestamp, status,
|
|
578
|
-
child_spike_count, output_summary
|
|
584
|
+
child_spike_count, output_summary, model
|
|
579
585
|
FROM agent_events
|
|
580
586
|
WHERE event_type = 'task_delegation'
|
|
581
587
|
"""
|
|
@@ -601,6 +607,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
601
607
|
status = parent_row[4]
|
|
602
608
|
child_spike_count = parent_row[5] or 0
|
|
603
609
|
output_summary = parent_row[6]
|
|
610
|
+
model = parent_row[7]
|
|
604
611
|
|
|
605
612
|
# Parse output summary to get child spike IDs if available
|
|
606
613
|
child_spikes = []
|
|
@@ -663,6 +670,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
663
670
|
"child_events": child_events,
|
|
664
671
|
"child_spike_count": child_spike_count,
|
|
665
672
|
"child_spikes": child_spikes,
|
|
673
|
+
"model": model,
|
|
666
674
|
}
|
|
667
675
|
|
|
668
676
|
traces.append(trace)
|
|
@@ -759,7 +767,8 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
759
767
|
input_summary,
|
|
760
768
|
output_summary,
|
|
761
769
|
session_id,
|
|
762
|
-
status
|
|
770
|
+
status,
|
|
771
|
+
model
|
|
763
772
|
FROM agent_events
|
|
764
773
|
WHERE event_type IN ({event_type_placeholders})
|
|
765
774
|
"""
|
|
@@ -789,6 +798,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
789
798
|
"output_summary": row[7],
|
|
790
799
|
"session_id": row[8],
|
|
791
800
|
"status": row[9],
|
|
801
|
+
"model": row[10],
|
|
792
802
|
}
|
|
793
803
|
)
|
|
794
804
|
|
|
@@ -1007,56 +1017,95 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
1007
1017
|
input_summary,
|
|
1008
1018
|
execution_duration_seconds,
|
|
1009
1019
|
status,
|
|
1010
|
-
COALESCE(subagent_type, agent_id) as agent_id
|
|
1020
|
+
COALESCE(subagent_type, agent_id) as agent_id,
|
|
1021
|
+
model
|
|
1011
1022
|
FROM agent_events
|
|
1012
1023
|
WHERE parent_event_id = ?
|
|
1013
1024
|
ORDER BY timestamp ASC
|
|
1014
1025
|
"""
|
|
1015
1026
|
|
|
1016
|
-
|
|
1017
|
-
|
|
1027
|
+
# Recursive helper to fetch children at any depth
|
|
1028
|
+
async def fetch_children_recursive(
|
|
1029
|
+
parent_id: str, depth: int = 0, max_depth: int = 4
|
|
1030
|
+
) -> tuple[list[dict[str, Any]], float, int, int]:
|
|
1031
|
+
"""Recursively fetch children up to max_depth levels."""
|
|
1032
|
+
if depth >= max_depth:
|
|
1033
|
+
return [], 0.0, 0, 0
|
|
1018
1034
|
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
total_duration = uq_duration
|
|
1022
|
-
success_count = (
|
|
1023
|
-
1 if uq_status == "recorded" or uq_status == "success" else 0
|
|
1024
|
-
)
|
|
1025
|
-
error_count = (
|
|
1026
|
-
0 if uq_status == "recorded" or uq_status == "success" else 1
|
|
1027
|
-
)
|
|
1035
|
+
cursor = await db.execute(children_query, [parent_id])
|
|
1036
|
+
rows = await cursor.fetchall()
|
|
1028
1037
|
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
"
|
|
1047
|
-
|
|
1038
|
+
children_list: list[dict[str, Any]] = []
|
|
1039
|
+
total_dur = 0.0
|
|
1040
|
+
success_cnt = 0
|
|
1041
|
+
error_cnt = 0
|
|
1042
|
+
|
|
1043
|
+
for row in rows:
|
|
1044
|
+
evt_id = row[0]
|
|
1045
|
+
tool = row[1]
|
|
1046
|
+
timestamp = row[2]
|
|
1047
|
+
input_text = row[3] or ""
|
|
1048
|
+
duration = row[4] or 0.0
|
|
1049
|
+
status = row[5]
|
|
1050
|
+
agent = row[6] or "unknown"
|
|
1051
|
+
model = row[7] # Add model field
|
|
1052
|
+
|
|
1053
|
+
# Build summary (input_text already contains formatted summary)
|
|
1054
|
+
summary = input_text[:80] + (
|
|
1055
|
+
"..." if len(input_text) > 80 else ""
|
|
1056
|
+
)
|
|
1057
|
+
|
|
1058
|
+
# Recursively fetch this child's children
|
|
1059
|
+
(
|
|
1060
|
+
nested_children,
|
|
1061
|
+
nested_dur,
|
|
1062
|
+
nested_success,
|
|
1063
|
+
nested_error,
|
|
1064
|
+
) = await fetch_children_recursive(evt_id, depth + 1, max_depth)
|
|
1065
|
+
|
|
1066
|
+
child_dict: dict[str, Any] = {
|
|
1067
|
+
"event_id": evt_id,
|
|
1068
|
+
"tool_name": tool,
|
|
1069
|
+
"timestamp": timestamp,
|
|
1048
1070
|
"summary": summary,
|
|
1049
|
-
"duration_seconds": round(
|
|
1050
|
-
"agent":
|
|
1071
|
+
"duration_seconds": round(duration, 2),
|
|
1072
|
+
"agent": agent,
|
|
1073
|
+
"depth": depth,
|
|
1074
|
+
"model": model, # Include model in child dict
|
|
1051
1075
|
}
|
|
1052
|
-
)
|
|
1053
1076
|
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1077
|
+
# Only add children key if there are nested children
|
|
1078
|
+
if nested_children:
|
|
1079
|
+
child_dict["children"] = nested_children
|
|
1080
|
+
|
|
1081
|
+
children_list.append(child_dict)
|
|
1082
|
+
|
|
1083
|
+
# Update stats (include nested)
|
|
1084
|
+
total_dur += duration + nested_dur
|
|
1085
|
+
if status == "recorded" or status == "success":
|
|
1086
|
+
success_cnt += 1
|
|
1087
|
+
else:
|
|
1088
|
+
error_cnt += 1
|
|
1089
|
+
success_cnt += nested_success
|
|
1090
|
+
error_cnt += nested_error
|
|
1091
|
+
|
|
1092
|
+
return children_list, total_dur, success_cnt, error_cnt
|
|
1093
|
+
|
|
1094
|
+
# Step 3: Build child events with recursive nesting
|
|
1095
|
+
(
|
|
1096
|
+
children,
|
|
1097
|
+
children_duration,
|
|
1098
|
+
children_success,
|
|
1099
|
+
children_error,
|
|
1100
|
+
) = await fetch_children_recursive(uq_event_id, depth=0, max_depth=4)
|
|
1101
|
+
|
|
1102
|
+
total_duration = uq_duration + children_duration
|
|
1103
|
+
success_count = (
|
|
1104
|
+
1 if uq_status == "recorded" or uq_status == "success" else 0
|
|
1105
|
+
) + children_success
|
|
1106
|
+
error_count = (
|
|
1107
|
+
0 if uq_status == "recorded" or uq_status == "success" else 1
|
|
1108
|
+
) + children_error
|
|
1060
1109
|
|
|
1061
1110
|
# Step 4: Build conversation turn object
|
|
1062
1111
|
conversation_turn = {
|
|
@@ -2034,23 +2083,29 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
2034
2083
|
IMPORTANT: Initializes last_timestamp to current time to only stream NEW events.
|
|
2035
2084
|
Historical events are already counted in /api/initial-stats, so streaming them
|
|
2036
2085
|
again would cause double-counting in the header stats.
|
|
2086
|
+
|
|
2087
|
+
LIVE EVENTS: Also polls live_events table for real-time spawner activity
|
|
2088
|
+
streaming. These events are marked as broadcast after sending and cleaned up.
|
|
2037
2089
|
"""
|
|
2038
2090
|
await websocket.accept()
|
|
2039
2091
|
# Initialize to current time - only stream events created AFTER connection
|
|
2040
2092
|
# This prevents double-counting: initial-stats already includes historical events
|
|
2041
2093
|
last_timestamp: str = datetime.now().isoformat()
|
|
2042
2094
|
poll_interval = 0.5 # OPTIMIZATION: Adaptive polling (reduced from 1s)
|
|
2095
|
+
last_live_event_id = 0 # Track last broadcast live event ID
|
|
2043
2096
|
|
|
2044
2097
|
try:
|
|
2045
2098
|
while True:
|
|
2046
2099
|
db = await get_db()
|
|
2100
|
+
has_activity = False
|
|
2047
2101
|
try:
|
|
2102
|
+
# ===== 1. Poll agent_events (existing logic) =====
|
|
2048
2103
|
# OPTIMIZATION: Only select needed columns, use DESC index
|
|
2049
2104
|
# Pattern uses index: idx_agent_events_timestamp DESC
|
|
2050
2105
|
# Only fetch events AFTER last_timestamp to stream new events only
|
|
2051
2106
|
query = """
|
|
2052
2107
|
SELECT event_id, agent_id, event_type, timestamp, tool_name,
|
|
2053
|
-
input_summary, output_summary, session_id, status
|
|
2108
|
+
input_summary, output_summary, session_id, status, model
|
|
2054
2109
|
FROM agent_events
|
|
2055
2110
|
WHERE timestamp > ?
|
|
2056
2111
|
ORDER BY timestamp ASC
|
|
@@ -2061,6 +2116,7 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
2061
2116
|
rows = await cursor.fetchall()
|
|
2062
2117
|
|
|
2063
2118
|
if rows:
|
|
2119
|
+
has_activity = True
|
|
2064
2120
|
rows_list = [list(row) for row in rows]
|
|
2065
2121
|
# Update last timestamp (last row since ORDER BY ts ASC)
|
|
2066
2122
|
last_timestamp = rows_list[-1][3]
|
|
@@ -2078,14 +2134,106 @@ def get_app(db_path: str) -> FastAPI:
|
|
|
2078
2134
|
"output_summary": row[6],
|
|
2079
2135
|
"session_id": row[7],
|
|
2080
2136
|
"status": row[8],
|
|
2137
|
+
"model": row[9],
|
|
2081
2138
|
"parent_event_id": None,
|
|
2082
2139
|
"cost_tokens": 0,
|
|
2083
2140
|
"execution_duration_seconds": 0.0,
|
|
2084
2141
|
}
|
|
2085
2142
|
await websocket.send_json(event_data)
|
|
2143
|
+
|
|
2144
|
+
# ===== 2. Poll live_events for spawner streaming =====
|
|
2145
|
+
# Fetch pending live events that haven't been broadcast yet
|
|
2146
|
+
live_query = """
|
|
2147
|
+
SELECT id, event_type, event_data, parent_event_id,
|
|
2148
|
+
session_id, spawner_type, created_at
|
|
2149
|
+
FROM live_events
|
|
2150
|
+
WHERE broadcast_at IS NULL AND id > ?
|
|
2151
|
+
ORDER BY created_at ASC
|
|
2152
|
+
LIMIT 50
|
|
2153
|
+
"""
|
|
2154
|
+
live_cursor = await db.execute(live_query, [last_live_event_id])
|
|
2155
|
+
live_rows = list(await live_cursor.fetchall())
|
|
2156
|
+
|
|
2157
|
+
if live_rows:
|
|
2158
|
+
logger.info(
|
|
2159
|
+
f"[WebSocket] Found {len(live_rows)} pending live_events to broadcast"
|
|
2160
|
+
)
|
|
2161
|
+
has_activity = True
|
|
2162
|
+
broadcast_ids: list[int] = []
|
|
2163
|
+
|
|
2164
|
+
for live_row in live_rows:
|
|
2165
|
+
live_id: int = live_row[0]
|
|
2166
|
+
event_type: str = live_row[1]
|
|
2167
|
+
event_data_json: str | None = live_row[2]
|
|
2168
|
+
parent_event_id: str | None = live_row[3]
|
|
2169
|
+
session_id: str | None = live_row[4]
|
|
2170
|
+
spawner_type: str | None = live_row[5]
|
|
2171
|
+
created_at: str = live_row[6]
|
|
2172
|
+
|
|
2173
|
+
# Parse event_data JSON
|
|
2174
|
+
try:
|
|
2175
|
+
event_data_parsed = (
|
|
2176
|
+
json.loads(event_data_json)
|
|
2177
|
+
if event_data_json
|
|
2178
|
+
else {}
|
|
2179
|
+
)
|
|
2180
|
+
except (json.JSONDecodeError, TypeError):
|
|
2181
|
+
event_data_parsed = {}
|
|
2182
|
+
|
|
2183
|
+
# Send spawner event to client
|
|
2184
|
+
spawner_event = {
|
|
2185
|
+
"type": "spawner_event",
|
|
2186
|
+
"live_event_id": live_id,
|
|
2187
|
+
"event_type": event_type,
|
|
2188
|
+
"spawner_type": spawner_type,
|
|
2189
|
+
"parent_event_id": parent_event_id,
|
|
2190
|
+
"session_id": session_id,
|
|
2191
|
+
"timestamp": created_at,
|
|
2192
|
+
"data": event_data_parsed,
|
|
2193
|
+
}
|
|
2194
|
+
logger.info(
|
|
2195
|
+
f"[WebSocket] Sending spawner_event: id={live_id}, type={event_type}, spawner={spawner_type}"
|
|
2196
|
+
)
|
|
2197
|
+
await websocket.send_json(spawner_event)
|
|
2198
|
+
|
|
2199
|
+
broadcast_ids.append(live_id)
|
|
2200
|
+
last_live_event_id = max(last_live_event_id, live_id)
|
|
2201
|
+
|
|
2202
|
+
# Mark events as broadcast
|
|
2203
|
+
if broadcast_ids:
|
|
2204
|
+
logger.info(
|
|
2205
|
+
f"[WebSocket] Marking {len(broadcast_ids)} events as broadcast: {broadcast_ids}"
|
|
2206
|
+
)
|
|
2207
|
+
placeholders = ",".join("?" for _ in broadcast_ids)
|
|
2208
|
+
await db.execute(
|
|
2209
|
+
f"""
|
|
2210
|
+
UPDATE live_events
|
|
2211
|
+
SET broadcast_at = CURRENT_TIMESTAMP
|
|
2212
|
+
WHERE id IN ({placeholders})
|
|
2213
|
+
""",
|
|
2214
|
+
broadcast_ids,
|
|
2215
|
+
)
|
|
2216
|
+
await db.commit()
|
|
2217
|
+
|
|
2218
|
+
# ===== 3. Periodic cleanup of old broadcast events =====
|
|
2219
|
+
# Clean up events older than 5 minutes (every ~10 poll cycles)
|
|
2220
|
+
if random.random() < 0.1: # 10% chance each cycle
|
|
2221
|
+
await db.execute(
|
|
2222
|
+
"""
|
|
2223
|
+
DELETE FROM live_events
|
|
2224
|
+
WHERE broadcast_at IS NOT NULL
|
|
2225
|
+
AND created_at < datetime('now', '-5 minutes')
|
|
2226
|
+
"""
|
|
2227
|
+
)
|
|
2228
|
+
await db.commit()
|
|
2229
|
+
|
|
2230
|
+
# Adjust poll interval based on activity
|
|
2231
|
+
if has_activity:
|
|
2232
|
+
poll_interval = 0.3 # Speed up when active
|
|
2086
2233
|
else:
|
|
2087
2234
|
# No new events, increase poll interval (exponential backoff)
|
|
2088
2235
|
poll_interval = min(poll_interval * 1.2, 2.0)
|
|
2236
|
+
|
|
2089
2237
|
finally:
|
|
2090
2238
|
await db.close()
|
|
2091
2239
|
|
|
@@ -161,6 +161,7 @@
|
|
|
161
161
|
ws.onmessage = function(event) {
|
|
162
162
|
try {
|
|
163
163
|
const data = JSON.parse(event.data);
|
|
164
|
+
console.log('[WebSocket] Received message type:', data.type);
|
|
164
165
|
|
|
165
166
|
if (data.type === 'event') {
|
|
166
167
|
// Prevent duplicate event insertions
|
|
@@ -187,6 +188,16 @@
|
|
|
187
188
|
// Insert new event into Activity Feed if visible
|
|
188
189
|
insertNewEventIntoActivityFeed(data);
|
|
189
190
|
}
|
|
191
|
+
// Handle live spawner events for real-time streaming
|
|
192
|
+
else if (data.type === 'spawner_event') {
|
|
193
|
+
console.log('[WebSocket] spawner_event received:', data.event_type, data.spawner_type, 'handler exists:', typeof window.handleSpawnerEvent === 'function');
|
|
194
|
+
// Delegate to activity-feed.html handler if available
|
|
195
|
+
if (typeof window.handleSpawnerEvent === 'function') {
|
|
196
|
+
window.handleSpawnerEvent(data);
|
|
197
|
+
} else {
|
|
198
|
+
console.warn('[WebSocket] handleSpawnerEvent not available, spawner event dropped:', data.event_type, data.spawner_type);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
190
201
|
} catch (e) {
|
|
191
202
|
console.error('WebSocket message error:', e);
|
|
192
203
|
}
|