@anastops/cli 1.0.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/doctor.d.ts.map +1 -1
- package/dist/commands/doctor.js +8 -6
- package/dist/commands/doctor.js.map +1 -1
- package/dist/commands/init.d.ts +17 -0
- package/dist/commands/init.d.ts.map +1 -1
- package/dist/commands/init.js +315 -17
- package/dist/commands/init.js.map +1 -1
- package/dist/commands/ranger.d.ts +7 -2
- package/dist/commands/ranger.d.ts.map +1 -1
- package/dist/commands/ranger.js +99 -21
- package/dist/commands/ranger.js.map +1 -1
- package/dist/commands/uninstall.d.ts +16 -0
- package/dist/commands/uninstall.d.ts.map +1 -0
- package/dist/commands/uninstall.js +206 -0
- package/dist/commands/uninstall.js.map +1 -0
- package/dist/index.js +11 -0
- package/dist/index.js.map +1 -1
- package/package.json +8 -6
- package/ranger-tui/pyproject.toml +51 -0
- package/ranger-tui/ranger_tui/__init__.py +5 -0
- package/ranger-tui/ranger_tui/__main__.py +16 -0
- package/ranger-tui/ranger_tui/__pycache__/__init__.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/__pycache__/__main__.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/__pycache__/accessibility.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/__pycache__/app.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/__pycache__/config.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/__pycache__/theme.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/accessibility.py +499 -0
- package/ranger-tui/ranger_tui/actions/__init__.py +13 -0
- package/ranger-tui/ranger_tui/actions/agent_actions.py +74 -0
- package/ranger-tui/ranger_tui/actions/session_actions.py +110 -0
- package/ranger-tui/ranger_tui/actions/task_actions.py +107 -0
- package/ranger-tui/ranger_tui/app.py +93 -0
- package/ranger-tui/ranger_tui/assets/ranger_head.png +0 -0
- package/ranger-tui/ranger_tui/config.py +100 -0
- package/ranger-tui/ranger_tui/data/__init__.py +16 -0
- package/ranger-tui/ranger_tui/data/__pycache__/__init__.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/data/__pycache__/client.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/data/__pycache__/models.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/data/client.py +858 -0
- package/ranger-tui/ranger_tui/data/models.py +151 -0
- package/ranger-tui/ranger_tui/screens/__init__.py +16 -0
- package/ranger-tui/ranger_tui/screens/__pycache__/__init__.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/screens/__pycache__/dashboard.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/screens/__pycache__/modals.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/screens/__pycache__/session.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/screens/__pycache__/task.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/screens/command_palette.py +357 -0
- package/ranger-tui/ranger_tui/screens/dashboard.py +232 -0
- package/ranger-tui/ranger_tui/screens/help.py +103 -0
- package/ranger-tui/ranger_tui/screens/modals.py +95 -0
- package/ranger-tui/ranger_tui/screens/session.py +289 -0
- package/ranger-tui/ranger_tui/screens/task.py +187 -0
- package/ranger-tui/ranger_tui/styles/ranger.tcss +254 -0
- package/ranger-tui/ranger_tui/theme.py +93 -0
- package/ranger-tui/ranger_tui/widgets/__init__.py +23 -0
- package/ranger-tui/ranger_tui/widgets/__pycache__/__init__.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/widgets/__pycache__/accessible.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/widgets/__pycache__/logo.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/widgets/__pycache__/logo_assets.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/widgets/__pycache__/ranger_image.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/widgets/__pycache__/sidebar.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/widgets/__pycache__/topbar.cpython-314.pyc +0 -0
- package/ranger-tui/ranger_tui/widgets/accessible.py +176 -0
- package/ranger-tui/ranger_tui/widgets/agents_table.py +151 -0
- package/ranger-tui/ranger_tui/widgets/header.py +141 -0
- package/ranger-tui/ranger_tui/widgets/logo.py +258 -0
- package/ranger-tui/ranger_tui/widgets/logo_assets.py +62 -0
- package/ranger-tui/ranger_tui/widgets/metrics_panel.py +121 -0
- package/ranger-tui/ranger_tui/widgets/ranger_image.py +91 -0
- package/ranger-tui/ranger_tui/widgets/sessions_table.py +191 -0
- package/ranger-tui/ranger_tui/widgets/sidebar.py +91 -0
- package/ranger-tui/ranger_tui/widgets/tasks_table.py +189 -0
- package/ranger-tui/ranger_tui/widgets/topbar.py +168 -0
|
@@ -0,0 +1,858 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Data client for MongoDB and Redis connections.
|
|
3
|
+
Provides all read/write operations for sessions, tasks, agents, and artifacts.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import asyncio
|
|
7
|
+
from datetime import datetime, UTC
|
|
8
|
+
from typing import Optional, Any
|
|
9
|
+
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase
|
|
10
|
+
import redis.asyncio as aioredis
|
|
11
|
+
from nanoid import generate as nanoid
|
|
12
|
+
|
|
13
|
+
from ranger_tui.config import config
|
|
14
|
+
from .models import (
|
|
15
|
+
Session, Task, Agent, Artifact, SessionReport,
|
|
16
|
+
SessionMetadata, SessionStatistics, TaskStats,
|
|
17
|
+
TokenUsage, TaskInput, TaskOutput,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class DataClient:
|
|
22
|
+
"""
|
|
23
|
+
Async data client for MongoDB and Redis.
|
|
24
|
+
Provides all operations matching the 40 MCP tools.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
mongodb_url: Optional[str] = None,
|
|
30
|
+
redis_url: Optional[str] = None,
|
|
31
|
+
):
|
|
32
|
+
self._mongodb_url = mongodb_url or config.mongodb_url
|
|
33
|
+
self._redis_url = redis_url or config.redis_url
|
|
34
|
+
self._mongo_client: Optional[AsyncIOMotorClient] = None
|
|
35
|
+
self._db: Optional[AsyncIOMotorDatabase] = None
|
|
36
|
+
self._redis: Optional[aioredis.Redis] = None
|
|
37
|
+
self._connected = False
|
|
38
|
+
|
|
39
|
+
async def connect(self) -> bool:
|
|
40
|
+
"""Connect to MongoDB and Redis."""
|
|
41
|
+
try:
|
|
42
|
+
# Connect to MongoDB
|
|
43
|
+
self._mongo_client = AsyncIOMotorClient(
|
|
44
|
+
self._mongodb_url,
|
|
45
|
+
serverSelectionTimeoutMS=5000,
|
|
46
|
+
connectTimeoutMS=5000,
|
|
47
|
+
)
|
|
48
|
+
self._db = self._mongo_client.anastops
|
|
49
|
+
|
|
50
|
+
# Test MongoDB connection
|
|
51
|
+
await self._db.command("ping")
|
|
52
|
+
|
|
53
|
+
# Connect to Redis
|
|
54
|
+
self._redis = aioredis.from_url(
|
|
55
|
+
self._redis_url,
|
|
56
|
+
decode_responses=True,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# Test Redis connection
|
|
60
|
+
await self._redis.ping()
|
|
61
|
+
|
|
62
|
+
self._connected = True
|
|
63
|
+
return True
|
|
64
|
+
except Exception as e:
|
|
65
|
+
self._connected = False
|
|
66
|
+
print(f"Connection error: {e}")
|
|
67
|
+
return False
|
|
68
|
+
|
|
69
|
+
async def disconnect(self) -> None:
|
|
70
|
+
"""Disconnect from databases."""
|
|
71
|
+
if self._mongo_client:
|
|
72
|
+
self._mongo_client.close()
|
|
73
|
+
self._mongo_client = None
|
|
74
|
+
self._db = None
|
|
75
|
+
if self._redis:
|
|
76
|
+
await self._redis.close()
|
|
77
|
+
self._redis = None
|
|
78
|
+
self._connected = False
|
|
79
|
+
|
|
80
|
+
@property
|
|
81
|
+
def is_connected(self) -> bool:
|
|
82
|
+
"""Check if connected to databases."""
|
|
83
|
+
return self._connected
|
|
84
|
+
|
|
85
|
+
async def health_check(self) -> dict[str, Any]:
|
|
86
|
+
"""Check health of MongoDB and Redis connections."""
|
|
87
|
+
result = {
|
|
88
|
+
"mongodb": False,
|
|
89
|
+
"redis": False,
|
|
90
|
+
"mongodb_error": None,
|
|
91
|
+
"redis_error": None,
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if self._db is not None:
|
|
95
|
+
try:
|
|
96
|
+
await self._db.command("ping")
|
|
97
|
+
result["mongodb"] = True
|
|
98
|
+
except Exception as e:
|
|
99
|
+
result["mongodb_error"] = str(e)
|
|
100
|
+
|
|
101
|
+
if self._redis is not None:
|
|
102
|
+
try:
|
|
103
|
+
await self._redis.ping()
|
|
104
|
+
result["redis"] = True
|
|
105
|
+
except Exception as e:
|
|
106
|
+
result["redis_error"] = str(e)
|
|
107
|
+
|
|
108
|
+
return result
|
|
109
|
+
|
|
110
|
+
# ============ Session Operations ============
|
|
111
|
+
|
|
112
|
+
async def get_sessions(
|
|
113
|
+
self,
|
|
114
|
+
status: Optional[str] = None,
|
|
115
|
+
limit: int = 100,
|
|
116
|
+
) -> list[Session]:
|
|
117
|
+
"""Get all sessions from MongoDB with task counts."""
|
|
118
|
+
if self._db is None:
|
|
119
|
+
return []
|
|
120
|
+
|
|
121
|
+
query: dict[str, Any] = {}
|
|
122
|
+
if status:
|
|
123
|
+
query["status"] = status
|
|
124
|
+
|
|
125
|
+
cursor = self._db.sessions.find(query).sort("updated_at", -1).limit(limit)
|
|
126
|
+
sessions = []
|
|
127
|
+
|
|
128
|
+
async for doc in cursor:
|
|
129
|
+
sessions.append(self._doc_to_session(doc))
|
|
130
|
+
|
|
131
|
+
# Fetch task counts for all sessions in one aggregation
|
|
132
|
+
if sessions:
|
|
133
|
+
session_ids = [s.id for s in sessions]
|
|
134
|
+
task_counts = await self._get_task_counts_by_session(session_ids)
|
|
135
|
+
|
|
136
|
+
# Update session metadata with task counts
|
|
137
|
+
for session in sessions:
|
|
138
|
+
counts = task_counts.get(session.id, {})
|
|
139
|
+
session.metadata.tasks_total = counts.get("total", 0)
|
|
140
|
+
session.metadata.tasks_completed = counts.get("completed", 0)
|
|
141
|
+
session.metadata.tasks_failed = counts.get("failed", 0)
|
|
142
|
+
session.metadata.tasks_running = counts.get("running", 0)
|
|
143
|
+
session.metadata.tasks_pending = counts.get("pending", 0)
|
|
144
|
+
session.metadata.tasks_queued = counts.get("queued", 0)
|
|
145
|
+
|
|
146
|
+
return sessions
|
|
147
|
+
|
|
148
|
+
async def _get_task_counts_by_session(
|
|
149
|
+
self,
|
|
150
|
+
session_ids: list[str],
|
|
151
|
+
) -> dict[str, dict[str, int]]:
|
|
152
|
+
"""Get task counts grouped by session and status."""
|
|
153
|
+
if self._db is None:
|
|
154
|
+
return {}
|
|
155
|
+
|
|
156
|
+
pipeline = [
|
|
157
|
+
{"$match": {"session_id": {"$in": session_ids}}},
|
|
158
|
+
{"$group": {
|
|
159
|
+
"_id": {"session_id": "$session_id", "status": "$status"},
|
|
160
|
+
"count": {"$sum": 1}
|
|
161
|
+
}},
|
|
162
|
+
]
|
|
163
|
+
|
|
164
|
+
result: dict[str, dict[str, int]] = {}
|
|
165
|
+
async for doc in self._db.tasks.aggregate(pipeline):
|
|
166
|
+
session_id = doc["_id"]["session_id"]
|
|
167
|
+
status = doc["_id"]["status"]
|
|
168
|
+
count = doc["count"]
|
|
169
|
+
|
|
170
|
+
if session_id not in result:
|
|
171
|
+
result[session_id] = {"total": 0, "completed": 0, "failed": 0, "running": 0, "pending": 0, "queued": 0}
|
|
172
|
+
|
|
173
|
+
result[session_id][status] = count
|
|
174
|
+
result[session_id]["total"] += count
|
|
175
|
+
|
|
176
|
+
return result
|
|
177
|
+
|
|
178
|
+
async def get_session(self, session_id: str) -> Optional[Session]:
|
|
179
|
+
"""Get a single session by ID."""
|
|
180
|
+
if self._db is None:
|
|
181
|
+
return None
|
|
182
|
+
|
|
183
|
+
doc = await self._db.sessions.find_one({"_id": session_id})
|
|
184
|
+
if doc:
|
|
185
|
+
return self._doc_to_session(doc)
|
|
186
|
+
return None
|
|
187
|
+
|
|
188
|
+
async def create_session(self, objective: str) -> Session:
|
|
189
|
+
"""Create a new session (session_spawn)."""
|
|
190
|
+
if self._db is None:
|
|
191
|
+
raise RuntimeError("Not connected to database")
|
|
192
|
+
|
|
193
|
+
session_id = nanoid(12)
|
|
194
|
+
now = datetime.now(UTC)
|
|
195
|
+
|
|
196
|
+
doc = {
|
|
197
|
+
"_id": session_id,
|
|
198
|
+
"id": session_id,
|
|
199
|
+
"user_id": "local",
|
|
200
|
+
"parent_session_id": None,
|
|
201
|
+
"fork_point": None,
|
|
202
|
+
"fork_reason": None,
|
|
203
|
+
"status": "active",
|
|
204
|
+
"objective": objective,
|
|
205
|
+
"created_at": now,
|
|
206
|
+
"updated_at": now,
|
|
207
|
+
"metadata": {
|
|
208
|
+
"total_tokens": 0,
|
|
209
|
+
"total_cost": 0,
|
|
210
|
+
"agents_used": [],
|
|
211
|
+
"files_affected": [],
|
|
212
|
+
"tasks_completed": 0,
|
|
213
|
+
"tasks_failed": 0,
|
|
214
|
+
},
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
await self._db.sessions.insert_one(doc)
|
|
218
|
+
return self._doc_to_session(doc)
|
|
219
|
+
|
|
220
|
+
async def archive_session(self, session_id: str) -> bool:
|
|
221
|
+
"""Archive a session (session_archive)."""
|
|
222
|
+
if self._db is None:
|
|
223
|
+
return False
|
|
224
|
+
|
|
225
|
+
result = await self._db.sessions.update_one(
|
|
226
|
+
{"_id": session_id},
|
|
227
|
+
{"$set": {"status": "archived", "updated_at": datetime.now(UTC)}}
|
|
228
|
+
)
|
|
229
|
+
return result.modified_count > 0
|
|
230
|
+
|
|
231
|
+
async def purge_session(self, session_id: str) -> bool:
|
|
232
|
+
"""Permanently delete a session and all related data (session_purge)."""
|
|
233
|
+
if self._db is None:
|
|
234
|
+
return False
|
|
235
|
+
|
|
236
|
+
try:
|
|
237
|
+
await self._db.sessions.delete_one({"_id": session_id})
|
|
238
|
+
await self._db.tasks.delete_many({"session_id": session_id})
|
|
239
|
+
await self._db.agents.delete_many({"session_id": session_id})
|
|
240
|
+
await self._db.artifacts.delete_many({"session_id": session_id})
|
|
241
|
+
return True
|
|
242
|
+
except Exception:
|
|
243
|
+
return False
|
|
244
|
+
|
|
245
|
+
async def purge_sessions_by_status(self, status: str) -> dict[str, Any]:
|
|
246
|
+
"""Purge all sessions with a given status."""
|
|
247
|
+
if self._db is None:
|
|
248
|
+
return {"deleted_count": 0, "session_ids": []}
|
|
249
|
+
|
|
250
|
+
# Get session IDs first
|
|
251
|
+
cursor = self._db.sessions.find({"status": status}, {"_id": 1})
|
|
252
|
+
session_ids = [doc["_id"] async for doc in cursor]
|
|
253
|
+
|
|
254
|
+
if not session_ids:
|
|
255
|
+
return {"deleted_count": 0, "session_ids": []}
|
|
256
|
+
|
|
257
|
+
# Delete related data
|
|
258
|
+
await self._db.tasks.delete_many({"session_id": {"$in": session_ids}})
|
|
259
|
+
await self._db.agents.delete_many({"session_id": {"$in": session_ids}})
|
|
260
|
+
await self._db.artifacts.delete_many({"session_id": {"$in": session_ids}})
|
|
261
|
+
|
|
262
|
+
# Delete sessions
|
|
263
|
+
result = await self._db.sessions.delete_many({"_id": {"$in": session_ids}})
|
|
264
|
+
|
|
265
|
+
return {
|
|
266
|
+
"deleted_count": result.deleted_count,
|
|
267
|
+
"session_ids": session_ids,
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
async def fork_session(
|
|
271
|
+
self,
|
|
272
|
+
session_id: str,
|
|
273
|
+
reason: Optional[str] = None,
|
|
274
|
+
) -> Optional[Session]:
|
|
275
|
+
"""Fork a session (session_fork)."""
|
|
276
|
+
if self._db is None:
|
|
277
|
+
return None
|
|
278
|
+
|
|
279
|
+
parent = await self.get_session(session_id)
|
|
280
|
+
if not parent:
|
|
281
|
+
return None
|
|
282
|
+
|
|
283
|
+
fork_id = nanoid(12)
|
|
284
|
+
now = datetime.now(UTC)
|
|
285
|
+
|
|
286
|
+
doc = {
|
|
287
|
+
"_id": fork_id,
|
|
288
|
+
"id": fork_id,
|
|
289
|
+
"user_id": parent.user_id,
|
|
290
|
+
"parent_session_id": session_id,
|
|
291
|
+
"fork_point": 0,
|
|
292
|
+
"fork_reason": reason,
|
|
293
|
+
"status": "active",
|
|
294
|
+
"objective": parent.objective,
|
|
295
|
+
"created_at": now,
|
|
296
|
+
"updated_at": now,
|
|
297
|
+
"metadata": parent.metadata.model_dump(),
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
await self._db.sessions.insert_one(doc)
|
|
301
|
+
return self._doc_to_session(doc)
|
|
302
|
+
|
|
303
|
+
async def get_session_report(self, session_id: str) -> Optional[SessionReport]:
|
|
304
|
+
"""Get comprehensive session report (session_report)."""
|
|
305
|
+
session = await self.get_session(session_id)
|
|
306
|
+
if not session:
|
|
307
|
+
return None
|
|
308
|
+
|
|
309
|
+
tasks = await self.get_tasks(session_id)
|
|
310
|
+
agents = await self.get_agents(session_id)
|
|
311
|
+
artifacts = await self.get_artifacts(session_id)
|
|
312
|
+
|
|
313
|
+
# Calculate statistics
|
|
314
|
+
task_stats = TaskStats(
|
|
315
|
+
total=len(tasks),
|
|
316
|
+
pending=sum(1 for t in tasks if t.status == "pending"),
|
|
317
|
+
queued=sum(1 for t in tasks if t.status == "queued"),
|
|
318
|
+
running=sum(1 for t in tasks if t.status == "running"),
|
|
319
|
+
completed=sum(1 for t in tasks if t.status == "completed"),
|
|
320
|
+
failed=sum(1 for t in tasks if t.status == "failed"),
|
|
321
|
+
cancelled=sum(1 for t in tasks if t.status == "cancelled"),
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
total_tokens = sum(t.token_usage.total_tokens for t in tasks)
|
|
325
|
+
total_cost = sum(t.token_usage.cost for t in tasks)
|
|
326
|
+
|
|
327
|
+
statistics = SessionStatistics(
|
|
328
|
+
tasks=task_stats,
|
|
329
|
+
total_tokens=total_tokens,
|
|
330
|
+
total_cost_usd=total_cost,
|
|
331
|
+
agents_count=len(agents),
|
|
332
|
+
artifacts_count=len(artifacts),
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
return SessionReport(
|
|
336
|
+
session=session,
|
|
337
|
+
statistics=statistics,
|
|
338
|
+
tasks=tasks,
|
|
339
|
+
agents=agents,
|
|
340
|
+
artifacts=artifacts,
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
# ============ Task Operations ============
|
|
344
|
+
|
|
345
|
+
async def get_tasks(
|
|
346
|
+
self,
|
|
347
|
+
session_id: Optional[str] = None,
|
|
348
|
+
status: Optional[str] = None,
|
|
349
|
+
limit: int = 100,
|
|
350
|
+
) -> list[Task]:
|
|
351
|
+
"""Get tasks from MongoDB."""
|
|
352
|
+
if self._db is None:
|
|
353
|
+
return []
|
|
354
|
+
|
|
355
|
+
query: dict[str, Any] = {}
|
|
356
|
+
if session_id:
|
|
357
|
+
query["session_id"] = session_id
|
|
358
|
+
if status:
|
|
359
|
+
query["status"] = status
|
|
360
|
+
|
|
361
|
+
cursor = self._db.tasks.find(query).sort("created_at", -1).limit(limit)
|
|
362
|
+
tasks = []
|
|
363
|
+
|
|
364
|
+
async for doc in cursor:
|
|
365
|
+
tasks.append(self._doc_to_task(doc))
|
|
366
|
+
|
|
367
|
+
return tasks
|
|
368
|
+
|
|
369
|
+
async def get_task(self, task_id: str) -> Optional[Task]:
|
|
370
|
+
"""Get a single task by ID."""
|
|
371
|
+
if self._db is None:
|
|
372
|
+
return None
|
|
373
|
+
|
|
374
|
+
doc = await self._db.tasks.find_one({"_id": task_id})
|
|
375
|
+
if doc:
|
|
376
|
+
return self._doc_to_task(doc)
|
|
377
|
+
return None
|
|
378
|
+
|
|
379
|
+
async def create_task(
|
|
380
|
+
self,
|
|
381
|
+
session_id: str,
|
|
382
|
+
task_type: str,
|
|
383
|
+
description: str,
|
|
384
|
+
prompt: str,
|
|
385
|
+
provider: str = "claude",
|
|
386
|
+
model: str = "claude-sonnet",
|
|
387
|
+
) -> Task:
|
|
388
|
+
"""Create a new task (task_create)."""
|
|
389
|
+
if self._db is None:
|
|
390
|
+
raise RuntimeError("Not connected to database")
|
|
391
|
+
|
|
392
|
+
task_id = nanoid(12)
|
|
393
|
+
now = datetime.now(UTC)
|
|
394
|
+
|
|
395
|
+
doc = {
|
|
396
|
+
"_id": task_id,
|
|
397
|
+
"id": task_id,
|
|
398
|
+
"session_id": session_id,
|
|
399
|
+
"agent_id": None,
|
|
400
|
+
"type": task_type,
|
|
401
|
+
"status": "pending",
|
|
402
|
+
"description": description,
|
|
403
|
+
"input": {"prompt": prompt, "context_files": []},
|
|
404
|
+
"output": None,
|
|
405
|
+
"error": None,
|
|
406
|
+
"complexity_score": 0,
|
|
407
|
+
"routing_tier": 3,
|
|
408
|
+
"provider": provider,
|
|
409
|
+
"model": model,
|
|
410
|
+
"token_usage": {
|
|
411
|
+
"prompt_tokens": 0,
|
|
412
|
+
"completion_tokens": 0,
|
|
413
|
+
"total_tokens": 0,
|
|
414
|
+
"cost": 0,
|
|
415
|
+
},
|
|
416
|
+
"created_at": now,
|
|
417
|
+
"started_at": None,
|
|
418
|
+
"completed_at": None,
|
|
419
|
+
"dependencies": [],
|
|
420
|
+
"priority": 5,
|
|
421
|
+
"retry_count": 0,
|
|
422
|
+
"max_retries": 3,
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
await self._db.tasks.insert_one(doc)
|
|
426
|
+
return self._doc_to_task(doc)
|
|
427
|
+
|
|
428
|
+
async def cancel_task(self, task_id: str) -> bool:
|
|
429
|
+
"""Cancel a task (task_cancel)."""
|
|
430
|
+
if self._db is None:
|
|
431
|
+
return False
|
|
432
|
+
|
|
433
|
+
result = await self._db.tasks.update_one(
|
|
434
|
+
{"_id": task_id, "status": {"$in": ["pending", "queued", "running"]}},
|
|
435
|
+
{"$set": {"status": "cancelled", "completed_at": datetime.now(UTC)}}
|
|
436
|
+
)
|
|
437
|
+
return result.modified_count > 0
|
|
438
|
+
|
|
439
|
+
async def retry_task(self, task_id: str) -> bool:
|
|
440
|
+
"""Retry a failed task (task_retry)."""
|
|
441
|
+
if self._db is None:
|
|
442
|
+
return False
|
|
443
|
+
|
|
444
|
+
result = await self._db.tasks.update_one(
|
|
445
|
+
{"_id": task_id, "status": "failed"},
|
|
446
|
+
{
|
|
447
|
+
"$set": {
|
|
448
|
+
"status": "pending",
|
|
449
|
+
"error": None,
|
|
450
|
+
"started_at": None,
|
|
451
|
+
"completed_at": None,
|
|
452
|
+
},
|
|
453
|
+
"$inc": {"retry_count": 1},
|
|
454
|
+
}
|
|
455
|
+
)
|
|
456
|
+
return result.modified_count > 0
|
|
457
|
+
|
|
458
|
+
async def queue_task(self, task_id: str) -> bool:
|
|
459
|
+
"""Queue a task for execution (task_queue)."""
|
|
460
|
+
if self._db is None:
|
|
461
|
+
return False
|
|
462
|
+
|
|
463
|
+
result = await self._db.tasks.update_one(
|
|
464
|
+
{"_id": task_id, "status": "pending"},
|
|
465
|
+
{"$set": {"status": "queued"}}
|
|
466
|
+
)
|
|
467
|
+
return result.modified_count > 0
|
|
468
|
+
|
|
469
|
+
async def complete_task(
|
|
470
|
+
self,
|
|
471
|
+
task_id: str,
|
|
472
|
+
content: str,
|
|
473
|
+
artifacts: Optional[list[str]] = None,
|
|
474
|
+
) -> bool:
|
|
475
|
+
"""Mark a task as completed (task_complete)."""
|
|
476
|
+
if self._db is None:
|
|
477
|
+
return False
|
|
478
|
+
|
|
479
|
+
result = await self._db.tasks.update_one(
|
|
480
|
+
{"_id": task_id},
|
|
481
|
+
{
|
|
482
|
+
"$set": {
|
|
483
|
+
"status": "completed",
|
|
484
|
+
"completed_at": datetime.now(UTC),
|
|
485
|
+
"output": {
|
|
486
|
+
"content": content,
|
|
487
|
+
"artifacts": artifacts or [],
|
|
488
|
+
"files_modified": [],
|
|
489
|
+
"metadata": {},
|
|
490
|
+
},
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
)
|
|
494
|
+
return result.modified_count > 0
|
|
495
|
+
|
|
496
|
+
# ============ Agent Operations ============
|
|
497
|
+
|
|
498
|
+
async def get_agents(
|
|
499
|
+
self,
|
|
500
|
+
session_id: Optional[str] = None,
|
|
501
|
+
status: Optional[str] = None,
|
|
502
|
+
limit: int = 100,
|
|
503
|
+
) -> list[Agent]:
|
|
504
|
+
"""Get agents from MongoDB."""
|
|
505
|
+
if self._db is None:
|
|
506
|
+
return []
|
|
507
|
+
|
|
508
|
+
query: dict[str, Any] = {}
|
|
509
|
+
if session_id:
|
|
510
|
+
query["session_id"] = session_id
|
|
511
|
+
if status:
|
|
512
|
+
query["status"] = status
|
|
513
|
+
|
|
514
|
+
cursor = self._db.agents.find(query).sort("created_at", -1).limit(limit)
|
|
515
|
+
agents = []
|
|
516
|
+
|
|
517
|
+
async for doc in cursor:
|
|
518
|
+
agents.append(self._doc_to_agent(doc))
|
|
519
|
+
|
|
520
|
+
return agents
|
|
521
|
+
|
|
522
|
+
async def get_agent(self, agent_id: str) -> Optional[Agent]:
|
|
523
|
+
"""Get a single agent by ID."""
|
|
524
|
+
if self._db is None:
|
|
525
|
+
return None
|
|
526
|
+
|
|
527
|
+
doc = await self._db.agents.find_one({"_id": agent_id})
|
|
528
|
+
if doc:
|
|
529
|
+
return self._doc_to_agent(doc)
|
|
530
|
+
return None
|
|
531
|
+
|
|
532
|
+
async def create_agent(
|
|
533
|
+
self,
|
|
534
|
+
session_id: str,
|
|
535
|
+
role: str,
|
|
536
|
+
name: Optional[str] = None,
|
|
537
|
+
provider: str = "claude",
|
|
538
|
+
model: str = "claude-sonnet",
|
|
539
|
+
) -> Agent:
|
|
540
|
+
"""Create a new agent (agent_create)."""
|
|
541
|
+
if self._db is None:
|
|
542
|
+
raise RuntimeError("Not connected to database")
|
|
543
|
+
|
|
544
|
+
agent_id = nanoid(12)
|
|
545
|
+
now = datetime.now(UTC)
|
|
546
|
+
|
|
547
|
+
doc = {
|
|
548
|
+
"_id": agent_id,
|
|
549
|
+
"id": agent_id,
|
|
550
|
+
"session_id": session_id,
|
|
551
|
+
"role": role,
|
|
552
|
+
"name": name or f"agent-{agent_id[:4]}",
|
|
553
|
+
"status": "idle",
|
|
554
|
+
"provider": provider,
|
|
555
|
+
"model": model,
|
|
556
|
+
"current_task_id": None,
|
|
557
|
+
"tasks_completed": 0,
|
|
558
|
+
"tasks_failed": 0,
|
|
559
|
+
"tokens_used": 0,
|
|
560
|
+
"created_at": now,
|
|
561
|
+
"last_activity_at": now,
|
|
562
|
+
"config": {},
|
|
563
|
+
}
|
|
564
|
+
|
|
565
|
+
await self._db.agents.insert_one(doc)
|
|
566
|
+
return self._doc_to_agent(doc)
|
|
567
|
+
|
|
568
|
+
async def deploy_agent(self, agent_id: str, task_id: str) -> bool:
|
|
569
|
+
"""Deploy an agent to a task (agent_deploy)."""
|
|
570
|
+
if self._db is None:
|
|
571
|
+
return False
|
|
572
|
+
|
|
573
|
+
result = await self._db.agents.update_one(
|
|
574
|
+
{"_id": agent_id},
|
|
575
|
+
{
|
|
576
|
+
"$set": {
|
|
577
|
+
"current_task_id": task_id,
|
|
578
|
+
"status": "working",
|
|
579
|
+
"last_activity_at": datetime.now(UTC),
|
|
580
|
+
}
|
|
581
|
+
}
|
|
582
|
+
)
|
|
583
|
+
return result.modified_count > 0
|
|
584
|
+
|
|
585
|
+
async def retire_agent(self, agent_id: str) -> bool:
|
|
586
|
+
"""Retire an agent (agent_retire)."""
|
|
587
|
+
if self._db is None:
|
|
588
|
+
return False
|
|
589
|
+
|
|
590
|
+
result = await self._db.agents.update_one(
|
|
591
|
+
{"_id": agent_id},
|
|
592
|
+
{
|
|
593
|
+
"$set": {
|
|
594
|
+
"status": "completed",
|
|
595
|
+
"current_task_id": None,
|
|
596
|
+
"last_activity_at": datetime.now(UTC),
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
)
|
|
600
|
+
return result.modified_count > 0
|
|
601
|
+
|
|
602
|
+
# ============ Artifact Operations ============
|
|
603
|
+
|
|
604
|
+
async def get_artifacts(
|
|
605
|
+
self,
|
|
606
|
+
session_id: Optional[str] = None,
|
|
607
|
+
task_id: Optional[str] = None,
|
|
608
|
+
limit: int = 100,
|
|
609
|
+
) -> list[Artifact]:
|
|
610
|
+
"""Get artifacts from MongoDB."""
|
|
611
|
+
if self._db is None:
|
|
612
|
+
return []
|
|
613
|
+
|
|
614
|
+
query: dict[str, Any] = {}
|
|
615
|
+
if session_id:
|
|
616
|
+
query["session_id"] = session_id
|
|
617
|
+
if task_id:
|
|
618
|
+
query["task_id"] = task_id
|
|
619
|
+
|
|
620
|
+
cursor = self._db.artifacts.find(query).sort("created_at", -1).limit(limit)
|
|
621
|
+
artifacts = []
|
|
622
|
+
|
|
623
|
+
async for doc in cursor:
|
|
624
|
+
artifacts.append(self._doc_to_artifact(doc))
|
|
625
|
+
|
|
626
|
+
return artifacts
|
|
627
|
+
|
|
628
|
+
async def save_artifact(
|
|
629
|
+
self,
|
|
630
|
+
session_id: str,
|
|
631
|
+
artifact_type: str,
|
|
632
|
+
name: str,
|
|
633
|
+
content: str,
|
|
634
|
+
) -> Artifact:
|
|
635
|
+
"""Save an artifact (artifact_save)."""
|
|
636
|
+
if self._db is None:
|
|
637
|
+
raise RuntimeError("Not connected to database")
|
|
638
|
+
|
|
639
|
+
artifact_id = nanoid(12)
|
|
640
|
+
now = datetime.now(UTC)
|
|
641
|
+
|
|
642
|
+
doc = {
|
|
643
|
+
"_id": artifact_id,
|
|
644
|
+
"id": artifact_id,
|
|
645
|
+
"session_id": session_id,
|
|
646
|
+
"task_id": None,
|
|
647
|
+
"type": artifact_type,
|
|
648
|
+
"name": name,
|
|
649
|
+
"extension": "",
|
|
650
|
+
"content": content,
|
|
651
|
+
"content_hash": "",
|
|
652
|
+
"size_bytes": len(content),
|
|
653
|
+
"summary": content[:200],
|
|
654
|
+
"token_count": len(content) // 4,
|
|
655
|
+
"relevance_score": 50,
|
|
656
|
+
"metadata": {},
|
|
657
|
+
"created_at": now,
|
|
658
|
+
"updated_at": now,
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
await self._db.artifacts.insert_one(doc)
|
|
662
|
+
return self._doc_to_artifact(doc)
|
|
663
|
+
|
|
664
|
+
# ============ Metrics Operations ============
|
|
665
|
+
|
|
666
|
+
async def get_session_cost(self, session_id: str) -> dict[str, Any]:
|
|
667
|
+
"""Get session cost breakdown (session_cost)."""
|
|
668
|
+
tasks = await self.get_tasks(session_id)
|
|
669
|
+
total_cost = sum(t.token_usage.cost for t in tasks)
|
|
670
|
+
|
|
671
|
+
return {
|
|
672
|
+
"session_id": session_id,
|
|
673
|
+
"total_cost_usd": total_cost,
|
|
674
|
+
"currency": "USD",
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
async def get_session_metrics(self, session_id: str) -> dict[str, Any]:
|
|
678
|
+
"""Get session metrics (session_metrics)."""
|
|
679
|
+
tasks = await self.get_tasks(session_id)
|
|
680
|
+
|
|
681
|
+
input_tokens = sum(t.token_usage.prompt_tokens for t in tasks)
|
|
682
|
+
output_tokens = sum(t.token_usage.completion_tokens for t in tasks)
|
|
683
|
+
total_tokens = sum(t.token_usage.total_tokens for t in tasks)
|
|
684
|
+
|
|
685
|
+
return {
|
|
686
|
+
"session_id": session_id,
|
|
687
|
+
"request_count": len(tasks),
|
|
688
|
+
"token_usage": {
|
|
689
|
+
"input": input_tokens,
|
|
690
|
+
"output": output_tokens,
|
|
691
|
+
"total": total_tokens,
|
|
692
|
+
},
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
# ============ Redis Operations ============
|
|
696
|
+
|
|
697
|
+
async def get_active_session_count(self) -> int:
|
|
698
|
+
"""Get count of active sessions from Redis."""
|
|
699
|
+
if self._redis is None:
|
|
700
|
+
return 0
|
|
701
|
+
|
|
702
|
+
try:
|
|
703
|
+
keys = await self._redis.keys("session:*:status")
|
|
704
|
+
count = 0
|
|
705
|
+
for key in keys:
|
|
706
|
+
status = await self._redis.get(key)
|
|
707
|
+
if status == "active":
|
|
708
|
+
count += 1
|
|
709
|
+
return count
|
|
710
|
+
except Exception:
|
|
711
|
+
return 0
|
|
712
|
+
|
|
713
|
+
async def get_running_task_count(self) -> int:
|
|
714
|
+
"""Get count of running tasks from Redis."""
|
|
715
|
+
if self._redis is None:
|
|
716
|
+
return 0
|
|
717
|
+
|
|
718
|
+
try:
|
|
719
|
+
keys = await self._redis.keys("task:*:status")
|
|
720
|
+
count = 0
|
|
721
|
+
for key in keys:
|
|
722
|
+
status = await self._redis.get(key)
|
|
723
|
+
if status == "running":
|
|
724
|
+
count += 1
|
|
725
|
+
return count
|
|
726
|
+
except Exception:
|
|
727
|
+
return 0
|
|
728
|
+
|
|
729
|
+
# ============ Document Converters ============
|
|
730
|
+
|
|
731
|
+
def _doc_to_session(self, doc: dict[str, Any]) -> Session:
|
|
732
|
+
"""Convert MongoDB document to Session model."""
|
|
733
|
+
metadata_doc = doc.get("metadata", {})
|
|
734
|
+
metadata = SessionMetadata(
|
|
735
|
+
total_tokens=metadata_doc.get("total_tokens", 0),
|
|
736
|
+
total_cost=metadata_doc.get("total_cost", 0),
|
|
737
|
+
agents_used=metadata_doc.get("agents_used", []),
|
|
738
|
+
files_affected=metadata_doc.get("files_affected", []),
|
|
739
|
+
tasks_completed=metadata_doc.get("tasks_completed", 0),
|
|
740
|
+
tasks_failed=metadata_doc.get("tasks_failed", 0),
|
|
741
|
+
)
|
|
742
|
+
|
|
743
|
+
return Session(
|
|
744
|
+
id=doc.get("id", doc.get("_id")),
|
|
745
|
+
user_id=doc.get("user_id", "local"),
|
|
746
|
+
parent_session_id=doc.get("parent_session_id"),
|
|
747
|
+
fork_point=doc.get("fork_point"),
|
|
748
|
+
fork_reason=doc.get("fork_reason"),
|
|
749
|
+
status=doc.get("status", "active"),
|
|
750
|
+
objective=doc.get("objective", ""),
|
|
751
|
+
created_at=doc.get("created_at", datetime.now(UTC)),
|
|
752
|
+
updated_at=doc.get("updated_at", datetime.now(UTC)),
|
|
753
|
+
metadata=metadata,
|
|
754
|
+
)
|
|
755
|
+
|
|
756
|
+
def _doc_to_task(self, doc: dict[str, Any]) -> Task:
|
|
757
|
+
"""Convert MongoDB document to Task model."""
|
|
758
|
+
input_doc = doc.get("input", {})
|
|
759
|
+
task_input = TaskInput(
|
|
760
|
+
prompt=input_doc.get("prompt", ""),
|
|
761
|
+
context_files=input_doc.get("context_files", []),
|
|
762
|
+
agent=input_doc.get("agent"),
|
|
763
|
+
skills=input_doc.get("skills", []),
|
|
764
|
+
)
|
|
765
|
+
|
|
766
|
+
output_doc = doc.get("output")
|
|
767
|
+
task_output = None
|
|
768
|
+
if output_doc:
|
|
769
|
+
task_output = TaskOutput(
|
|
770
|
+
content=output_doc.get("content", ""),
|
|
771
|
+
artifacts=output_doc.get("artifacts", []),
|
|
772
|
+
files_modified=output_doc.get("files_modified", []),
|
|
773
|
+
metadata=output_doc.get("metadata", {}),
|
|
774
|
+
)
|
|
775
|
+
|
|
776
|
+
token_doc = doc.get("token_usage", {})
|
|
777
|
+
token_usage = TokenUsage(
|
|
778
|
+
prompt_tokens=token_doc.get("prompt_tokens", 0),
|
|
779
|
+
completion_tokens=token_doc.get("completion_tokens", 0),
|
|
780
|
+
total_tokens=token_doc.get("total_tokens", 0),
|
|
781
|
+
cost=token_doc.get("cost", 0),
|
|
782
|
+
)
|
|
783
|
+
|
|
784
|
+
return Task(
|
|
785
|
+
id=doc.get("id", doc.get("_id")),
|
|
786
|
+
session_id=doc.get("session_id", ""),
|
|
787
|
+
agent_id=doc.get("agent_id"),
|
|
788
|
+
type=doc.get("type", "other"),
|
|
789
|
+
status=doc.get("status", "pending"),
|
|
790
|
+
description=doc.get("description", ""),
|
|
791
|
+
input=task_input,
|
|
792
|
+
output=task_output,
|
|
793
|
+
error=doc.get("error"),
|
|
794
|
+
complexity_score=doc.get("complexity_score", 0),
|
|
795
|
+
routing_tier=doc.get("routing_tier", 3),
|
|
796
|
+
provider=doc.get("provider", "claude"),
|
|
797
|
+
model=doc.get("model", "claude-sonnet"),
|
|
798
|
+
token_usage=token_usage,
|
|
799
|
+
created_at=doc.get("created_at", datetime.now(UTC)),
|
|
800
|
+
started_at=doc.get("started_at"),
|
|
801
|
+
completed_at=doc.get("completed_at"),
|
|
802
|
+
dependencies=doc.get("dependencies", []),
|
|
803
|
+
priority=doc.get("priority", 5),
|
|
804
|
+
retry_count=doc.get("retry_count", 0),
|
|
805
|
+
max_retries=doc.get("max_retries", 3),
|
|
806
|
+
logs=doc.get("logs"),
|
|
807
|
+
)
|
|
808
|
+
|
|
809
|
+
def _doc_to_agent(self, doc: dict[str, Any]) -> Agent:
|
|
810
|
+
"""Convert MongoDB document to Agent model."""
|
|
811
|
+
return Agent(
|
|
812
|
+
id=doc.get("id", doc.get("_id")),
|
|
813
|
+
session_id=doc.get("session_id", ""),
|
|
814
|
+
role=doc.get("role", "implementer"),
|
|
815
|
+
name=doc.get("name", ""),
|
|
816
|
+
status=doc.get("status", "idle"),
|
|
817
|
+
provider=doc.get("provider", "claude"),
|
|
818
|
+
model=doc.get("model", "claude-sonnet"),
|
|
819
|
+
current_task_id=doc.get("current_task_id"),
|
|
820
|
+
tasks_completed=doc.get("tasks_completed", 0),
|
|
821
|
+
tasks_failed=doc.get("tasks_failed", 0),
|
|
822
|
+
tokens_used=doc.get("tokens_used", 0),
|
|
823
|
+
created_at=doc.get("created_at", datetime.now(UTC)),
|
|
824
|
+
last_activity_at=doc.get("last_activity_at", datetime.now(UTC)),
|
|
825
|
+
config=doc.get("config", {}),
|
|
826
|
+
)
|
|
827
|
+
|
|
828
|
+
def _doc_to_artifact(self, doc: dict[str, Any]) -> Artifact:
|
|
829
|
+
"""Convert MongoDB document to Artifact model."""
|
|
830
|
+
return Artifact(
|
|
831
|
+
id=doc.get("id", doc.get("_id")),
|
|
832
|
+
session_id=doc.get("session_id", ""),
|
|
833
|
+
task_id=doc.get("task_id"),
|
|
834
|
+
type=doc.get("type", "other"),
|
|
835
|
+
name=doc.get("name", ""),
|
|
836
|
+
extension=doc.get("extension", ""),
|
|
837
|
+
content=doc.get("content", ""),
|
|
838
|
+
content_hash=doc.get("content_hash", ""),
|
|
839
|
+
size_bytes=doc.get("size_bytes", 0),
|
|
840
|
+
summary=doc.get("summary", ""),
|
|
841
|
+
token_count=doc.get("token_count", 0),
|
|
842
|
+
relevance_score=doc.get("relevance_score", 50),
|
|
843
|
+
metadata=doc.get("metadata", {}),
|
|
844
|
+
created_at=doc.get("created_at", datetime.now(UTC)),
|
|
845
|
+
updated_at=doc.get("updated_at", datetime.now(UTC)),
|
|
846
|
+
)
|
|
847
|
+
|
|
848
|
+
|
|
849
|
+
# Singleton instance
|
|
850
|
+
_client_instance: Optional[DataClient] = None
|
|
851
|
+
|
|
852
|
+
|
|
853
|
+
def get_client() -> DataClient:
|
|
854
|
+
"""Get or create the singleton DataClient instance."""
|
|
855
|
+
global _client_instance
|
|
856
|
+
if _client_instance is None:
|
|
857
|
+
_client_instance = DataClient()
|
|
858
|
+
return _client_instance
|