universal-agent-context 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uacs/__init__.py +12 -0
- uacs/adapters/__init__.py +19 -0
- uacs/adapters/agent_skill_adapter.py +202 -0
- uacs/adapters/agents_md_adapter.py +330 -0
- uacs/adapters/base.py +261 -0
- uacs/adapters/clinerules_adapter.py +39 -0
- uacs/adapters/cursorrules_adapter.py +39 -0
- uacs/api.py +262 -0
- uacs/cli/__init__.py +6 -0
- uacs/cli/context.py +349 -0
- uacs/cli/main.py +195 -0
- uacs/cli/mcp.py +115 -0
- uacs/cli/memory.py +142 -0
- uacs/cli/packages.py +309 -0
- uacs/cli/skills.py +144 -0
- uacs/cli/utils.py +24 -0
- uacs/config/repositories.yaml +26 -0
- uacs/context/__init__.py +0 -0
- uacs/context/agent_context.py +406 -0
- uacs/context/shared_context.py +661 -0
- uacs/context/unified_context.py +332 -0
- uacs/mcp_server_entry.py +80 -0
- uacs/memory/__init__.py +5 -0
- uacs/memory/simple_memory.py +255 -0
- uacs/packages/__init__.py +26 -0
- uacs/packages/manager.py +413 -0
- uacs/packages/models.py +60 -0
- uacs/packages/sources.py +270 -0
- uacs/protocols/__init__.py +5 -0
- uacs/protocols/mcp/__init__.py +8 -0
- uacs/protocols/mcp/manager.py +77 -0
- uacs/protocols/mcp/skills_server.py +700 -0
- uacs/skills_validator.py +367 -0
- uacs/utils/__init__.py +5 -0
- uacs/utils/paths.py +24 -0
- uacs/visualization/README.md +132 -0
- uacs/visualization/__init__.py +36 -0
- uacs/visualization/models.py +195 -0
- uacs/visualization/static/index.html +857 -0
- uacs/visualization/storage.py +402 -0
- uacs/visualization/visualization.py +328 -0
- uacs/visualization/web_server.py +364 -0
- universal_agent_context-0.2.0.dist-info/METADATA +873 -0
- universal_agent_context-0.2.0.dist-info/RECORD +47 -0
- universal_agent_context-0.2.0.dist-info/WHEEL +4 -0
- universal_agent_context-0.2.0.dist-info/entry_points.txt +2 -0
- universal_agent_context-0.2.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,402 @@
|
|
|
1
|
+
"""Storage layer for trace visualization.
|
|
2
|
+
|
|
3
|
+
Stores sessions and events in JSONL format for simple, append-only storage.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
from uacs.visualization.models import Event, EventType, Session, CompressionTrigger
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TraceStorage:
|
|
15
|
+
"""Storage for session traces."""
|
|
16
|
+
|
|
17
|
+
def __init__(self, storage_path: Path):
|
|
18
|
+
"""Initialize trace storage.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
storage_path: Path to storage directory
|
|
22
|
+
"""
|
|
23
|
+
self.storage_path = Path(storage_path)
|
|
24
|
+
self.sessions_file = self.storage_path / "sessions.jsonl"
|
|
25
|
+
self.events_file = self.storage_path / "events.jsonl"
|
|
26
|
+
|
|
27
|
+
# Create storage directory
|
|
28
|
+
self.storage_path.mkdir(parents=True, exist_ok=True)
|
|
29
|
+
|
|
30
|
+
# Ensure files exist
|
|
31
|
+
self.sessions_file.touch(exist_ok=True)
|
|
32
|
+
self.events_file.touch(exist_ok=True)
|
|
33
|
+
|
|
34
|
+
def add_session(self, session: Session) -> None:
|
|
35
|
+
"""Add or update a session.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
session: Session to add
|
|
39
|
+
"""
|
|
40
|
+
# Append to file (JSONL format)
|
|
41
|
+
with open(self.sessions_file, "a", encoding="utf-8") as f:
|
|
42
|
+
f.write(json.dumps(session.model_dump()) + "\n")
|
|
43
|
+
|
|
44
|
+
def add_event(self, event: Event) -> None:
|
|
45
|
+
"""Add an event.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
event: Event to add
|
|
49
|
+
"""
|
|
50
|
+
# Append to file (JSONL format)
|
|
51
|
+
with open(self.events_file, "a", encoding="utf-8") as f:
|
|
52
|
+
f.write(json.dumps(event.model_dump()) + "\n")
|
|
53
|
+
|
|
54
|
+
def get_session(self, session_id: str) -> Session | None:
|
|
55
|
+
"""Get a specific session by ID.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
session_id: Session ID
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
Session if found, None otherwise
|
|
62
|
+
"""
|
|
63
|
+
# Read sessions file and find by ID (latest version wins)
|
|
64
|
+
session_data = None
|
|
65
|
+
|
|
66
|
+
with open(self.sessions_file, "r", encoding="utf-8") as f:
|
|
67
|
+
for line in f:
|
|
68
|
+
if line.strip():
|
|
69
|
+
data = json.loads(line)
|
|
70
|
+
if data.get("session_id") == session_id:
|
|
71
|
+
session_data = data
|
|
72
|
+
|
|
73
|
+
if session_data:
|
|
74
|
+
return Session(**session_data)
|
|
75
|
+
return None
|
|
76
|
+
|
|
77
|
+
def get_sessions(
|
|
78
|
+
self,
|
|
79
|
+
skip: int = 0,
|
|
80
|
+
limit: int = 20,
|
|
81
|
+
topic: str | None = None,
|
|
82
|
+
sort_by: str = "started_at",
|
|
83
|
+
sort_desc: bool = True,
|
|
84
|
+
) -> tuple[list[Session], int]:
|
|
85
|
+
"""Get paginated list of sessions.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
skip: Number of sessions to skip
|
|
89
|
+
limit: Maximum sessions to return
|
|
90
|
+
topic: Filter by topic
|
|
91
|
+
sort_by: Field to sort by
|
|
92
|
+
sort_desc: Sort in descending order
|
|
93
|
+
|
|
94
|
+
Returns:
|
|
95
|
+
Tuple of (sessions, total_count)
|
|
96
|
+
"""
|
|
97
|
+
# Load all sessions (deduplicate by session_id, keeping latest)
|
|
98
|
+
sessions_dict: dict[str, dict] = {}
|
|
99
|
+
|
|
100
|
+
with open(self.sessions_file, "r", encoding="utf-8") as f:
|
|
101
|
+
for line in f:
|
|
102
|
+
if line.strip():
|
|
103
|
+
data = json.loads(line)
|
|
104
|
+
sessions_dict[data["session_id"]] = data
|
|
105
|
+
|
|
106
|
+
# Convert to Session objects
|
|
107
|
+
sessions = [Session(**data) for data in sessions_dict.values()]
|
|
108
|
+
|
|
109
|
+
# Filter by topic if specified
|
|
110
|
+
if topic:
|
|
111
|
+
sessions = [s for s in sessions if topic in s.topics]
|
|
112
|
+
|
|
113
|
+
# Sort
|
|
114
|
+
sessions.sort(
|
|
115
|
+
key=lambda s: getattr(s, sort_by, ""), reverse=sort_desc
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
total = len(sessions)
|
|
119
|
+
|
|
120
|
+
# Paginate
|
|
121
|
+
sessions = sessions[skip : skip + limit]
|
|
122
|
+
|
|
123
|
+
return sessions, total
|
|
124
|
+
|
|
125
|
+
def get_events(
|
|
126
|
+
self,
|
|
127
|
+
session_id: str | None = None,
|
|
128
|
+
event_type: EventType | None = None,
|
|
129
|
+
skip: int = 0,
|
|
130
|
+
limit: int = 50,
|
|
131
|
+
) -> tuple[list[Event], int]:
|
|
132
|
+
"""Get paginated list of events.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
session_id: Filter by session ID
|
|
136
|
+
event_type: Filter by event type
|
|
137
|
+
skip: Number of events to skip
|
|
138
|
+
limit: Maximum events to return
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
Tuple of (events, total_count)
|
|
142
|
+
"""
|
|
143
|
+
events = []
|
|
144
|
+
|
|
145
|
+
with open(self.events_file, "r", encoding="utf-8") as f:
|
|
146
|
+
for line in f:
|
|
147
|
+
if line.strip():
|
|
148
|
+
data = json.loads(line)
|
|
149
|
+
|
|
150
|
+
# Apply filters
|
|
151
|
+
if session_id and data.get("session_id") != session_id:
|
|
152
|
+
continue
|
|
153
|
+
if event_type and data.get("type") != event_type:
|
|
154
|
+
continue
|
|
155
|
+
|
|
156
|
+
events.append(Event(**data))
|
|
157
|
+
|
|
158
|
+
total = len(events)
|
|
159
|
+
|
|
160
|
+
# Sort by timestamp (newest first)
|
|
161
|
+
events.sort(key=lambda e: e.timestamp, reverse=True)
|
|
162
|
+
|
|
163
|
+
# Paginate
|
|
164
|
+
events = events[skip : skip + limit]
|
|
165
|
+
|
|
166
|
+
return events, total
|
|
167
|
+
|
|
168
|
+
def get_event(self, event_id: str) -> Event | None:
|
|
169
|
+
"""Get a specific event by ID.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
event_id: Event ID
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
Event if found, None otherwise
|
|
176
|
+
"""
|
|
177
|
+
with open(self.events_file, "r", encoding="utf-8") as f:
|
|
178
|
+
for line in f:
|
|
179
|
+
if line.strip():
|
|
180
|
+
data = json.loads(line)
|
|
181
|
+
if data.get("event_id") == event_id:
|
|
182
|
+
return Event(**data)
|
|
183
|
+
return None
|
|
184
|
+
|
|
185
|
+
def search(
|
|
186
|
+
self,
|
|
187
|
+
query: str,
|
|
188
|
+
filters: dict[str, Any] | None = None,
|
|
189
|
+
limit: int = 50,
|
|
190
|
+
) -> tuple[list[Session], list[Event]]:
|
|
191
|
+
"""Search across sessions and events.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
query: Search query (searches in content, topics, tool names)
|
|
195
|
+
filters: Additional filters (topics, date_from, date_to, quality_min)
|
|
196
|
+
limit: Maximum results per type
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
Tuple of (matching_sessions, matching_events)
|
|
200
|
+
"""
|
|
201
|
+
query_lower = query.lower()
|
|
202
|
+
filters = filters or {}
|
|
203
|
+
|
|
204
|
+
# Search sessions
|
|
205
|
+
matching_sessions = []
|
|
206
|
+
sessions_dict: dict[str, dict] = {}
|
|
207
|
+
|
|
208
|
+
with open(self.sessions_file, "r", encoding="utf-8") as f:
|
|
209
|
+
for line in f:
|
|
210
|
+
if line.strip():
|
|
211
|
+
data = json.loads(line)
|
|
212
|
+
sessions_dict[data["session_id"]] = data
|
|
213
|
+
|
|
214
|
+
for data in sessions_dict.values():
|
|
215
|
+
# Check query match
|
|
216
|
+
topics_str = " ".join(data.get("topics", [])).lower()
|
|
217
|
+
metadata_str = json.dumps(data.get("metadata", {})).lower()
|
|
218
|
+
|
|
219
|
+
if query_lower in topics_str or query_lower in metadata_str:
|
|
220
|
+
# Apply filters
|
|
221
|
+
if "topics" in filters and not any(
|
|
222
|
+
t in data.get("topics", []) for t in filters["topics"]
|
|
223
|
+
):
|
|
224
|
+
continue
|
|
225
|
+
|
|
226
|
+
if "quality_min" in filters and data.get("quality_avg", 0) < filters["quality_min"]:
|
|
227
|
+
continue
|
|
228
|
+
|
|
229
|
+
matching_sessions.append(Session(**data))
|
|
230
|
+
|
|
231
|
+
# Search events
|
|
232
|
+
matching_events = []
|
|
233
|
+
|
|
234
|
+
with open(self.events_file, "r", encoding="utf-8") as f:
|
|
235
|
+
for line in f:
|
|
236
|
+
if line.strip():
|
|
237
|
+
data = json.loads(line)
|
|
238
|
+
|
|
239
|
+
# Check query match
|
|
240
|
+
content = data.get("content", "")
|
|
241
|
+
tool_name = data.get("tool_name", "")
|
|
242
|
+
topics_str = " ".join(data.get("topics", []))
|
|
243
|
+
|
|
244
|
+
searchable = f"{content} {tool_name} {topics_str}".lower()
|
|
245
|
+
|
|
246
|
+
if query_lower in searchable:
|
|
247
|
+
# Apply filters
|
|
248
|
+
if "topics" in filters and not any(
|
|
249
|
+
t in data.get("topics", []) for t in filters["topics"]
|
|
250
|
+
):
|
|
251
|
+
continue
|
|
252
|
+
|
|
253
|
+
matching_events.append(Event(**data))
|
|
254
|
+
|
|
255
|
+
# Sort by timestamp (newest first)
|
|
256
|
+
matching_sessions.sort(key=lambda s: s.started_at, reverse=True)
|
|
257
|
+
matching_events.sort(key=lambda e: e.timestamp, reverse=True)
|
|
258
|
+
|
|
259
|
+
# Limit results
|
|
260
|
+
matching_sessions = matching_sessions[:limit]
|
|
261
|
+
matching_events = matching_events[:limit]
|
|
262
|
+
|
|
263
|
+
return matching_sessions, matching_events
|
|
264
|
+
|
|
265
|
+
def get_token_analytics(self, days: int = 30) -> dict[str, Any]:
|
|
266
|
+
"""Get token usage analytics.
|
|
267
|
+
|
|
268
|
+
Args:
|
|
269
|
+
days: Number of days to analyze
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
Token analytics dictionary
|
|
273
|
+
"""
|
|
274
|
+
# Load all sessions
|
|
275
|
+
sessions_dict: dict[str, dict] = {}
|
|
276
|
+
|
|
277
|
+
with open(self.sessions_file, "r", encoding="utf-8") as f:
|
|
278
|
+
for line in f:
|
|
279
|
+
if line.strip():
|
|
280
|
+
data = json.loads(line)
|
|
281
|
+
sessions_dict[data["session_id"]] = data
|
|
282
|
+
|
|
283
|
+
sessions = [Session(**data) for data in sessions_dict.values()]
|
|
284
|
+
|
|
285
|
+
# Calculate totals
|
|
286
|
+
total_tokens = sum(s.total_tokens for s in sessions)
|
|
287
|
+
compressed_tokens = sum(s.compressed_tokens for s in sessions)
|
|
288
|
+
savings = total_tokens - compressed_tokens
|
|
289
|
+
avg_per_session = total_tokens // len(sessions) if sessions else 0
|
|
290
|
+
|
|
291
|
+
return {
|
|
292
|
+
"total_tokens": total_tokens,
|
|
293
|
+
"compressed_tokens": compressed_tokens,
|
|
294
|
+
"savings": savings,
|
|
295
|
+
"savings_percentage": f"{(savings / total_tokens * 100):.1f}%" if total_tokens > 0 else "0%",
|
|
296
|
+
"avg_per_session": avg_per_session,
|
|
297
|
+
"sessions_count": len(sessions),
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
def get_topic_analytics(self) -> dict[str, Any]:
|
|
301
|
+
"""Get topic distribution analytics.
|
|
302
|
+
|
|
303
|
+
Returns:
|
|
304
|
+
Topic analytics dictionary
|
|
305
|
+
"""
|
|
306
|
+
# Load all sessions
|
|
307
|
+
sessions_dict: dict[str, dict] = {}
|
|
308
|
+
|
|
309
|
+
with open(self.sessions_file, "r", encoding="utf-8") as f:
|
|
310
|
+
for line in f:
|
|
311
|
+
if line.strip():
|
|
312
|
+
data = json.loads(line)
|
|
313
|
+
sessions_dict[data["session_id"]] = data
|
|
314
|
+
|
|
315
|
+
# Count topics
|
|
316
|
+
topic_counts: dict[str, int] = {}
|
|
317
|
+
topic_sessions: dict[str, list[str]] = {}
|
|
318
|
+
|
|
319
|
+
for session_id, data in sessions_dict.items():
|
|
320
|
+
for topic in data.get("topics", []):
|
|
321
|
+
topic_counts[topic] = topic_counts.get(topic, 0) + 1
|
|
322
|
+
if topic not in topic_sessions:
|
|
323
|
+
topic_sessions[topic] = []
|
|
324
|
+
topic_sessions[topic].append(session_id)
|
|
325
|
+
|
|
326
|
+
# Create clusters
|
|
327
|
+
clusters = []
|
|
328
|
+
for topic, count in topic_counts.items():
|
|
329
|
+
clusters.append({
|
|
330
|
+
"topic": topic,
|
|
331
|
+
"count": count,
|
|
332
|
+
"session_ids": topic_sessions[topic],
|
|
333
|
+
})
|
|
334
|
+
|
|
335
|
+
# Sort by count
|
|
336
|
+
clusters.sort(key=lambda x: x["count"], reverse=True)
|
|
337
|
+
|
|
338
|
+
return {
|
|
339
|
+
"clusters": clusters,
|
|
340
|
+
"total_topics": len(clusters),
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
def get_compression_analytics(self) -> dict[str, Any]:
|
|
344
|
+
"""Get compression events analytics.
|
|
345
|
+
|
|
346
|
+
Returns:
|
|
347
|
+
Compression analytics dictionary
|
|
348
|
+
"""
|
|
349
|
+
# Count compression events by type
|
|
350
|
+
early_count = 0
|
|
351
|
+
early_savings_total = 0
|
|
352
|
+
precompact_count = 0
|
|
353
|
+
precompact_savings_total = 0
|
|
354
|
+
sessionend_count = 0
|
|
355
|
+
sessionend_savings_total = 0
|
|
356
|
+
|
|
357
|
+
compaction_prevented = 0
|
|
358
|
+
total_sessions = 0
|
|
359
|
+
|
|
360
|
+
with open(self.events_file, "r", encoding="utf-8") as f:
|
|
361
|
+
for line in f:
|
|
362
|
+
if line.strip():
|
|
363
|
+
data = json.loads(line)
|
|
364
|
+
|
|
365
|
+
if data.get("type") == EventType.COMPRESSION:
|
|
366
|
+
trigger = data.get("compression_trigger")
|
|
367
|
+
savings = data.get("tokens_saved", 0)
|
|
368
|
+
|
|
369
|
+
if trigger == CompressionTrigger.EARLY_COMPRESSION:
|
|
370
|
+
early_count += 1
|
|
371
|
+
early_savings_total += savings
|
|
372
|
+
elif trigger == CompressionTrigger.PRECOMPACT:
|
|
373
|
+
precompact_count += 1
|
|
374
|
+
precompact_savings_total += savings
|
|
375
|
+
elif trigger == CompressionTrigger.SESSIONEND:
|
|
376
|
+
sessionend_count += 1
|
|
377
|
+
sessionend_savings_total += savings
|
|
378
|
+
|
|
379
|
+
# Check if compaction was prevented
|
|
380
|
+
if data.get("metadata", {}).get("prevented_compaction"):
|
|
381
|
+
compaction_prevented += 1
|
|
382
|
+
|
|
383
|
+
# Count total sessions
|
|
384
|
+
sessions_dict: dict[str, dict] = {}
|
|
385
|
+
with open(self.sessions_file, "r", encoding="utf-8") as f:
|
|
386
|
+
for line in f:
|
|
387
|
+
if line.strip():
|
|
388
|
+
data = json.loads(line)
|
|
389
|
+
sessions_dict[data["session_id"]] = data
|
|
390
|
+
total_sessions = len(sessions_dict)
|
|
391
|
+
|
|
392
|
+
return {
|
|
393
|
+
"early_compression_count": early_count,
|
|
394
|
+
"early_compression_avg_savings": early_savings_total // early_count if early_count > 0 else 0,
|
|
395
|
+
"precompact_count": precompact_count,
|
|
396
|
+
"precompact_avg_savings": precompact_savings_total // precompact_count if precompact_count > 0 else 0,
|
|
397
|
+
"sessionend_count": sessionend_count,
|
|
398
|
+
"sessionend_avg_savings": sessionend_savings_total // sessionend_count if sessionend_count > 0 else 0,
|
|
399
|
+
"compaction_prevention_rate": f"{(compaction_prevented / total_sessions * 100):.1f}%" if total_sessions > 0 else "0%",
|
|
400
|
+
"compaction_prevention_count": compaction_prevented,
|
|
401
|
+
"compaction_prevention_total": total_sessions,
|
|
402
|
+
}
|