dooers-workers 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dooers/__init__.py +73 -0
- dooers/broadcast.py +180 -0
- dooers/config.py +22 -0
- dooers/features/__init__.py +0 -0
- dooers/features/analytics/__init__.py +12 -0
- dooers/features/analytics/collector.py +219 -0
- dooers/features/analytics/models.py +50 -0
- dooers/features/analytics/worker_analytics.py +100 -0
- dooers/features/settings/__init__.py +12 -0
- dooers/features/settings/broadcaster.py +97 -0
- dooers/features/settings/models.py +72 -0
- dooers/features/settings/worker_settings.py +85 -0
- dooers/handlers/__init__.py +16 -0
- dooers/handlers/memory.py +105 -0
- dooers/handlers/request.py +12 -0
- dooers/handlers/response.py +66 -0
- dooers/handlers/router.py +957 -0
- dooers/migrations/__init__.py +3 -0
- dooers/migrations/schemas.py +126 -0
- dooers/persistence/__init__.py +9 -0
- dooers/persistence/base.py +42 -0
- dooers/persistence/postgres.py +459 -0
- dooers/persistence/sqlite.py +433 -0
- dooers/protocol/__init__.py +108 -0
- dooers/protocol/frames.py +298 -0
- dooers/protocol/models.py +72 -0
- dooers/protocol/parser.py +19 -0
- dooers/registry.py +101 -0
- dooers/server.py +162 -0
- dooers/settings.py +3 -0
- dooers_workers-0.2.0.dist-info/METADATA +228 -0
- dooers_workers-0.2.0.dist-info/RECORD +33 -0
- dooers_workers-0.2.0.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
from typing import Literal
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def get_migration_sql(
|
|
5
|
+
database_type: Literal["postgres", "sqlite"],
|
|
6
|
+
table_prefix: str = "worker_",
|
|
7
|
+
) -> str:
|
|
8
|
+
threads_table = f"{table_prefix}threads"
|
|
9
|
+
events_table = f"{table_prefix}events"
|
|
10
|
+
runs_table = f"{table_prefix}runs"
|
|
11
|
+
settings_table = f"{table_prefix}settings"
|
|
12
|
+
|
|
13
|
+
if database_type == "postgres":
|
|
14
|
+
return f"""
|
|
15
|
+
CREATE TABLE IF NOT EXISTS {threads_table} (
|
|
16
|
+
id TEXT PRIMARY KEY,
|
|
17
|
+
worker_id TEXT NOT NULL,
|
|
18
|
+
user_id TEXT,
|
|
19
|
+
title TEXT,
|
|
20
|
+
created_at TIMESTAMPTZ NOT NULL,
|
|
21
|
+
updated_at TIMESTAMPTZ NOT NULL,
|
|
22
|
+
last_event_at TIMESTAMPTZ NOT NULL
|
|
23
|
+
);
|
|
24
|
+
|
|
25
|
+
CREATE TABLE IF NOT EXISTS {events_table} (
|
|
26
|
+
id TEXT PRIMARY KEY,
|
|
27
|
+
thread_id TEXT NOT NULL REFERENCES {threads_table}(id),
|
|
28
|
+
run_id TEXT,
|
|
29
|
+
type TEXT NOT NULL,
|
|
30
|
+
actor TEXT NOT NULL,
|
|
31
|
+
user_id TEXT,
|
|
32
|
+
user_name TEXT,
|
|
33
|
+
user_email TEXT,
|
|
34
|
+
content JSONB,
|
|
35
|
+
data JSONB,
|
|
36
|
+
created_at TIMESTAMPTZ NOT NULL
|
|
37
|
+
);
|
|
38
|
+
|
|
39
|
+
CREATE TABLE IF NOT EXISTS {runs_table} (
|
|
40
|
+
id TEXT PRIMARY KEY,
|
|
41
|
+
thread_id TEXT NOT NULL REFERENCES {threads_table}(id),
|
|
42
|
+
agent_id TEXT,
|
|
43
|
+
status TEXT NOT NULL,
|
|
44
|
+
started_at TIMESTAMPTZ NOT NULL,
|
|
45
|
+
ended_at TIMESTAMPTZ,
|
|
46
|
+
error TEXT
|
|
47
|
+
);
|
|
48
|
+
|
|
49
|
+
CREATE TABLE IF NOT EXISTS {settings_table} (
|
|
50
|
+
worker_id TEXT PRIMARY KEY,
|
|
51
|
+
values JSONB NOT NULL DEFAULT '{{}}',
|
|
52
|
+
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
53
|
+
updated_at TIMESTAMPTZ DEFAULT NOW()
|
|
54
|
+
);
|
|
55
|
+
|
|
56
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}threads_worker_id
|
|
57
|
+
ON {threads_table}(worker_id);
|
|
58
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}threads_user_id
|
|
59
|
+
ON {threads_table}(user_id);
|
|
60
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}events_thread_id
|
|
61
|
+
ON {events_table}(thread_id);
|
|
62
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}events_user_id
|
|
63
|
+
ON {events_table}(user_id);
|
|
64
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}runs_thread_id
|
|
65
|
+
ON {runs_table}(thread_id);
|
|
66
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}settings_worker
|
|
67
|
+
ON {settings_table}(worker_id);
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
return f"""
|
|
71
|
+
CREATE TABLE IF NOT EXISTS {threads_table} (
|
|
72
|
+
id TEXT PRIMARY KEY,
|
|
73
|
+
worker_id TEXT NOT NULL,
|
|
74
|
+
user_id TEXT,
|
|
75
|
+
title TEXT,
|
|
76
|
+
created_at TEXT NOT NULL,
|
|
77
|
+
updated_at TEXT NOT NULL,
|
|
78
|
+
last_event_at TEXT NOT NULL
|
|
79
|
+
);
|
|
80
|
+
|
|
81
|
+
CREATE TABLE IF NOT EXISTS {events_table} (
|
|
82
|
+
id TEXT PRIMARY KEY,
|
|
83
|
+
thread_id TEXT NOT NULL,
|
|
84
|
+
run_id TEXT,
|
|
85
|
+
type TEXT NOT NULL,
|
|
86
|
+
actor TEXT NOT NULL,
|
|
87
|
+
user_id TEXT,
|
|
88
|
+
user_name TEXT,
|
|
89
|
+
user_email TEXT,
|
|
90
|
+
content TEXT,
|
|
91
|
+
data TEXT,
|
|
92
|
+
created_at TEXT NOT NULL,
|
|
93
|
+
FOREIGN KEY (thread_id) REFERENCES {threads_table}(id)
|
|
94
|
+
);
|
|
95
|
+
|
|
96
|
+
CREATE TABLE IF NOT EXISTS {runs_table} (
|
|
97
|
+
id TEXT PRIMARY KEY,
|
|
98
|
+
thread_id TEXT NOT NULL,
|
|
99
|
+
agent_id TEXT,
|
|
100
|
+
status TEXT NOT NULL,
|
|
101
|
+
started_at TEXT NOT NULL,
|
|
102
|
+
ended_at TEXT,
|
|
103
|
+
error TEXT,
|
|
104
|
+
FOREIGN KEY (thread_id) REFERENCES {threads_table}(id)
|
|
105
|
+
);
|
|
106
|
+
|
|
107
|
+
CREATE TABLE IF NOT EXISTS {settings_table} (
|
|
108
|
+
worker_id TEXT PRIMARY KEY,
|
|
109
|
+
values TEXT NOT NULL DEFAULT '{{}}',
|
|
110
|
+
created_at TEXT DEFAULT (datetime('now')),
|
|
111
|
+
updated_at TEXT DEFAULT (datetime('now'))
|
|
112
|
+
);
|
|
113
|
+
|
|
114
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}threads_worker_id
|
|
115
|
+
ON {threads_table}(worker_id);
|
|
116
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}threads_user_id
|
|
117
|
+
ON {threads_table}(user_id);
|
|
118
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}events_thread_id
|
|
119
|
+
ON {events_table}(thread_id);
|
|
120
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}events_user_id
|
|
121
|
+
ON {events_table}(user_id);
|
|
122
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}runs_thread_id
|
|
123
|
+
ON {runs_table}(thread_id);
|
|
124
|
+
CREATE INDEX IF NOT EXISTS idx_{table_prefix}settings_worker
|
|
125
|
+
ON {settings_table}(worker_id);
|
|
126
|
+
"""
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import Any, Protocol
|
|
3
|
+
|
|
4
|
+
from dooers.protocol.models import Run, Thread, ThreadEvent
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Persistence(Protocol):
|
|
8
|
+
async def connect(self) -> None: ...
|
|
9
|
+
async def disconnect(self) -> None: ...
|
|
10
|
+
async def migrate(self) -> None: ...
|
|
11
|
+
async def create_thread(self, thread: Thread) -> None: ...
|
|
12
|
+
async def get_thread(self, thread_id: str) -> Thread | None: ...
|
|
13
|
+
async def update_thread(self, thread: Thread) -> None: ...
|
|
14
|
+
async def list_threads(
|
|
15
|
+
self,
|
|
16
|
+
worker_id: str,
|
|
17
|
+
user_id: str | None,
|
|
18
|
+
cursor: str | None,
|
|
19
|
+
limit: int,
|
|
20
|
+
) -> list[Thread]: ...
|
|
21
|
+
async def create_event(self, event: ThreadEvent) -> None: ...
|
|
22
|
+
async def get_events(
|
|
23
|
+
self,
|
|
24
|
+
thread_id: str,
|
|
25
|
+
after_event_id: str | None,
|
|
26
|
+
limit: int,
|
|
27
|
+
) -> list[ThreadEvent]: ...
|
|
28
|
+
async def create_run(self, run: Run) -> None: ...
|
|
29
|
+
async def update_run(self, run: Run) -> None: ...
|
|
30
|
+
|
|
31
|
+
# Settings methods
|
|
32
|
+
async def get_settings(self, worker_id: str) -> dict[str, Any]:
|
|
33
|
+
"""Get all stored values for a worker. Returns empty dict if none."""
|
|
34
|
+
...
|
|
35
|
+
|
|
36
|
+
async def update_setting(self, worker_id: str, field_id: str, value: Any) -> datetime:
|
|
37
|
+
"""Update a single field value. Returns updated_at timestamp."""
|
|
38
|
+
...
|
|
39
|
+
|
|
40
|
+
async def set_settings(self, worker_id: str, values: dict[str, Any]) -> datetime:
|
|
41
|
+
"""Replace all settings values. Returns updated_at timestamp."""
|
|
42
|
+
...
|
|
@@ -0,0 +1,459 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from datetime import UTC, datetime
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
import asyncpg
|
|
6
|
+
|
|
7
|
+
from dooers.protocol.models import DocumentPart, ImagePart, Run, TextPart, Thread, ThreadEvent
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class PostgresPersistence:
|
|
11
|
+
def __init__(self, database_url: str, table_prefix: str = "worker_"):
|
|
12
|
+
self._database_url = database_url
|
|
13
|
+
self._prefix = table_prefix
|
|
14
|
+
self._pool: asyncpg.Pool | None = None
|
|
15
|
+
|
|
16
|
+
async def connect(self) -> None:
|
|
17
|
+
self._pool = await asyncpg.create_pool(self._database_url)
|
|
18
|
+
|
|
19
|
+
async def disconnect(self) -> None:
|
|
20
|
+
if self._pool:
|
|
21
|
+
await self._pool.close()
|
|
22
|
+
|
|
23
|
+
async def migrate(self) -> None:
|
|
24
|
+
if not self._pool:
|
|
25
|
+
raise RuntimeError("Not connected")
|
|
26
|
+
|
|
27
|
+
threads_table = f"{self._prefix}threads"
|
|
28
|
+
events_table = f"{self._prefix}events"
|
|
29
|
+
runs_table = f"{self._prefix}runs"
|
|
30
|
+
|
|
31
|
+
async with self._pool.acquire() as conn:
|
|
32
|
+
await conn.execute(f"""
|
|
33
|
+
CREATE TABLE IF NOT EXISTS {threads_table} (
|
|
34
|
+
id TEXT PRIMARY KEY,
|
|
35
|
+
worker_id TEXT NOT NULL,
|
|
36
|
+
user_id TEXT,
|
|
37
|
+
title TEXT,
|
|
38
|
+
created_at TIMESTAMPTZ NOT NULL,
|
|
39
|
+
updated_at TIMESTAMPTZ NOT NULL,
|
|
40
|
+
last_event_at TIMESTAMPTZ NOT NULL
|
|
41
|
+
)
|
|
42
|
+
""")
|
|
43
|
+
|
|
44
|
+
await conn.execute(f"""
|
|
45
|
+
CREATE TABLE IF NOT EXISTS {events_table} (
|
|
46
|
+
id TEXT PRIMARY KEY,
|
|
47
|
+
thread_id TEXT NOT NULL REFERENCES {threads_table}(id),
|
|
48
|
+
run_id TEXT,
|
|
49
|
+
type TEXT NOT NULL,
|
|
50
|
+
actor TEXT NOT NULL,
|
|
51
|
+
user_id TEXT,
|
|
52
|
+
user_name TEXT,
|
|
53
|
+
user_email TEXT,
|
|
54
|
+
content JSONB,
|
|
55
|
+
data JSONB,
|
|
56
|
+
created_at TIMESTAMPTZ NOT NULL
|
|
57
|
+
)
|
|
58
|
+
""")
|
|
59
|
+
|
|
60
|
+
await conn.execute(f"""
|
|
61
|
+
CREATE TABLE IF NOT EXISTS {runs_table} (
|
|
62
|
+
id TEXT PRIMARY KEY,
|
|
63
|
+
thread_id TEXT NOT NULL REFERENCES {threads_table}(id),
|
|
64
|
+
agent_id TEXT,
|
|
65
|
+
status TEXT NOT NULL,
|
|
66
|
+
started_at TIMESTAMPTZ NOT NULL,
|
|
67
|
+
ended_at TIMESTAMPTZ,
|
|
68
|
+
error TEXT
|
|
69
|
+
)
|
|
70
|
+
""")
|
|
71
|
+
|
|
72
|
+
await conn.execute(f"""
|
|
73
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}threads_worker_id
|
|
74
|
+
ON {threads_table}(worker_id)
|
|
75
|
+
""")
|
|
76
|
+
|
|
77
|
+
await conn.execute(f"""
|
|
78
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}threads_user_id
|
|
79
|
+
ON {threads_table}(user_id)
|
|
80
|
+
""")
|
|
81
|
+
|
|
82
|
+
await conn.execute(f"""
|
|
83
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}events_thread_id
|
|
84
|
+
ON {events_table}(thread_id)
|
|
85
|
+
""")
|
|
86
|
+
|
|
87
|
+
await conn.execute(f"""
|
|
88
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}events_user_id
|
|
89
|
+
ON {events_table}(user_id)
|
|
90
|
+
""")
|
|
91
|
+
|
|
92
|
+
await conn.execute(f"""
|
|
93
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}runs_thread_id
|
|
94
|
+
ON {runs_table}(thread_id)
|
|
95
|
+
""")
|
|
96
|
+
|
|
97
|
+
settings_table = f"{self._prefix}settings"
|
|
98
|
+
await conn.execute(f"""
|
|
99
|
+
CREATE TABLE IF NOT EXISTS {settings_table} (
|
|
100
|
+
worker_id TEXT PRIMARY KEY,
|
|
101
|
+
values JSONB NOT NULL DEFAULT '{{}}',
|
|
102
|
+
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
103
|
+
updated_at TIMESTAMPTZ DEFAULT NOW()
|
|
104
|
+
)
|
|
105
|
+
""")
|
|
106
|
+
|
|
107
|
+
await conn.execute(f"""
|
|
108
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}settings_worker
|
|
109
|
+
ON {settings_table}(worker_id)
|
|
110
|
+
""")
|
|
111
|
+
|
|
112
|
+
async def create_thread(self, thread: Thread) -> None:
|
|
113
|
+
if not self._pool:
|
|
114
|
+
raise RuntimeError("Not connected")
|
|
115
|
+
|
|
116
|
+
table = f"{self._prefix}threads"
|
|
117
|
+
async with self._pool.acquire() as conn:
|
|
118
|
+
await conn.execute(
|
|
119
|
+
f"""
|
|
120
|
+
INSERT INTO {table} (id, worker_id, user_id, title, created_at, updated_at, last_event_at)
|
|
121
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
|
122
|
+
""",
|
|
123
|
+
thread.id,
|
|
124
|
+
thread.worker_id,
|
|
125
|
+
thread.user_id,
|
|
126
|
+
thread.title,
|
|
127
|
+
thread.created_at,
|
|
128
|
+
thread.updated_at,
|
|
129
|
+
thread.last_event_at,
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
async def get_thread(self, thread_id: str) -> Thread | None:
|
|
133
|
+
if not self._pool:
|
|
134
|
+
raise RuntimeError("Not connected")
|
|
135
|
+
|
|
136
|
+
table = f"{self._prefix}threads"
|
|
137
|
+
async with self._pool.acquire() as conn:
|
|
138
|
+
row = await conn.fetchrow(
|
|
139
|
+
f"SELECT * FROM {table} WHERE id = $1",
|
|
140
|
+
thread_id,
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
if not row:
|
|
144
|
+
return None
|
|
145
|
+
|
|
146
|
+
return Thread(
|
|
147
|
+
id=row["id"],
|
|
148
|
+
worker_id=row["worker_id"],
|
|
149
|
+
user_id=row["user_id"],
|
|
150
|
+
title=row["title"],
|
|
151
|
+
created_at=row["created_at"],
|
|
152
|
+
updated_at=row["updated_at"],
|
|
153
|
+
last_event_at=row["last_event_at"],
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
async def update_thread(self, thread: Thread) -> None:
|
|
157
|
+
if not self._pool:
|
|
158
|
+
raise RuntimeError("Not connected")
|
|
159
|
+
|
|
160
|
+
table = f"{self._prefix}threads"
|
|
161
|
+
async with self._pool.acquire() as conn:
|
|
162
|
+
await conn.execute(
|
|
163
|
+
f"""
|
|
164
|
+
UPDATE {table}
|
|
165
|
+
SET user_id = $1, title = $2, updated_at = $3, last_event_at = $4
|
|
166
|
+
WHERE id = $5
|
|
167
|
+
""",
|
|
168
|
+
thread.user_id,
|
|
169
|
+
thread.title,
|
|
170
|
+
thread.updated_at,
|
|
171
|
+
thread.last_event_at,
|
|
172
|
+
thread.id,
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
async def list_threads(
|
|
176
|
+
self,
|
|
177
|
+
worker_id: str,
|
|
178
|
+
user_id: str | None,
|
|
179
|
+
cursor: str | None,
|
|
180
|
+
limit: int,
|
|
181
|
+
) -> list[Thread]:
|
|
182
|
+
if not self._pool:
|
|
183
|
+
raise RuntimeError("Not connected")
|
|
184
|
+
|
|
185
|
+
table = f"{self._prefix}threads"
|
|
186
|
+
async with self._pool.acquire() as conn:
|
|
187
|
+
if user_id:
|
|
188
|
+
rows = await conn.fetch(
|
|
189
|
+
f"""
|
|
190
|
+
SELECT * FROM {table}
|
|
191
|
+
WHERE worker_id = $1 AND user_id = $2
|
|
192
|
+
ORDER BY last_event_at DESC
|
|
193
|
+
LIMIT $3
|
|
194
|
+
""",
|
|
195
|
+
worker_id,
|
|
196
|
+
user_id,
|
|
197
|
+
limit,
|
|
198
|
+
)
|
|
199
|
+
else:
|
|
200
|
+
rows = await conn.fetch(
|
|
201
|
+
f"""
|
|
202
|
+
SELECT * FROM {table}
|
|
203
|
+
WHERE worker_id = $1
|
|
204
|
+
ORDER BY last_event_at DESC
|
|
205
|
+
LIMIT $2
|
|
206
|
+
""",
|
|
207
|
+
worker_id,
|
|
208
|
+
limit,
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
return [
|
|
212
|
+
Thread(
|
|
213
|
+
id=row["id"],
|
|
214
|
+
worker_id=row["worker_id"],
|
|
215
|
+
user_id=row["user_id"],
|
|
216
|
+
title=row["title"],
|
|
217
|
+
created_at=row["created_at"],
|
|
218
|
+
updated_at=row["updated_at"],
|
|
219
|
+
last_event_at=row["last_event_at"],
|
|
220
|
+
)
|
|
221
|
+
for row in rows
|
|
222
|
+
]
|
|
223
|
+
|
|
224
|
+
async def create_event(self, event: ThreadEvent) -> None:
|
|
225
|
+
if not self._pool:
|
|
226
|
+
raise RuntimeError("Not connected")
|
|
227
|
+
|
|
228
|
+
table = f"{self._prefix}events"
|
|
229
|
+
content_json = None
|
|
230
|
+
if event.content:
|
|
231
|
+
content_json = json.dumps([self._serialize_content_part(p) for p in event.content])
|
|
232
|
+
|
|
233
|
+
data_json = json.dumps(event.data) if event.data else None
|
|
234
|
+
|
|
235
|
+
async with self._pool.acquire() as conn:
|
|
236
|
+
await conn.execute(
|
|
237
|
+
f"""
|
|
238
|
+
INSERT INTO {table} (id, thread_id, run_id, type, actor, user_id, user_name, user_email, content, data, created_at)
|
|
239
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
|
240
|
+
""",
|
|
241
|
+
event.id,
|
|
242
|
+
event.thread_id,
|
|
243
|
+
event.run_id,
|
|
244
|
+
event.type,
|
|
245
|
+
event.actor,
|
|
246
|
+
event.user_id,
|
|
247
|
+
event.user_name,
|
|
248
|
+
event.user_email,
|
|
249
|
+
content_json,
|
|
250
|
+
data_json,
|
|
251
|
+
event.created_at,
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
async def get_events(
|
|
255
|
+
self,
|
|
256
|
+
thread_id: str,
|
|
257
|
+
after_event_id: str | None,
|
|
258
|
+
limit: int,
|
|
259
|
+
) -> list[ThreadEvent]:
|
|
260
|
+
if not self._pool:
|
|
261
|
+
raise RuntimeError("Not connected")
|
|
262
|
+
|
|
263
|
+
table = f"{self._prefix}events"
|
|
264
|
+
async with self._pool.acquire() as conn:
|
|
265
|
+
if after_event_id:
|
|
266
|
+
ref_row = await conn.fetchrow(
|
|
267
|
+
f"SELECT created_at FROM {table} WHERE id = $1",
|
|
268
|
+
after_event_id,
|
|
269
|
+
)
|
|
270
|
+
if ref_row:
|
|
271
|
+
rows = await conn.fetch(
|
|
272
|
+
f"""
|
|
273
|
+
SELECT * FROM {table}
|
|
274
|
+
WHERE thread_id = $1 AND created_at > $2
|
|
275
|
+
ORDER BY created_at ASC
|
|
276
|
+
LIMIT $3
|
|
277
|
+
""",
|
|
278
|
+
thread_id,
|
|
279
|
+
ref_row["created_at"],
|
|
280
|
+
limit,
|
|
281
|
+
)
|
|
282
|
+
else:
|
|
283
|
+
rows = await conn.fetch(
|
|
284
|
+
f"""
|
|
285
|
+
SELECT * FROM {table}
|
|
286
|
+
WHERE thread_id = $1
|
|
287
|
+
ORDER BY created_at ASC
|
|
288
|
+
LIMIT $2
|
|
289
|
+
""",
|
|
290
|
+
thread_id,
|
|
291
|
+
limit,
|
|
292
|
+
)
|
|
293
|
+
else:
|
|
294
|
+
rows = await conn.fetch(
|
|
295
|
+
f"""
|
|
296
|
+
SELECT * FROM {table}
|
|
297
|
+
WHERE thread_id = $1
|
|
298
|
+
ORDER BY created_at ASC
|
|
299
|
+
LIMIT $2
|
|
300
|
+
""",
|
|
301
|
+
thread_id,
|
|
302
|
+
limit,
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
events = []
|
|
306
|
+
for row in rows:
|
|
307
|
+
content = None
|
|
308
|
+
if row["content"]:
|
|
309
|
+
content_data = json.loads(row["content"])
|
|
310
|
+
content = [self._deserialize_content_part(p) for p in content_data]
|
|
311
|
+
|
|
312
|
+
data = json.loads(row["data"]) if row["data"] else None
|
|
313
|
+
|
|
314
|
+
events.append(
|
|
315
|
+
ThreadEvent(
|
|
316
|
+
id=row["id"],
|
|
317
|
+
thread_id=row["thread_id"],
|
|
318
|
+
run_id=row["run_id"],
|
|
319
|
+
type=row["type"],
|
|
320
|
+
actor=row["actor"],
|
|
321
|
+
user_id=row["user_id"],
|
|
322
|
+
user_name=row["user_name"],
|
|
323
|
+
user_email=row["user_email"],
|
|
324
|
+
content=content,
|
|
325
|
+
data=data,
|
|
326
|
+
created_at=row["created_at"],
|
|
327
|
+
)
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
return events
|
|
331
|
+
|
|
332
|
+
async def create_run(self, run: Run) -> None:
|
|
333
|
+
if not self._pool:
|
|
334
|
+
raise RuntimeError("Not connected")
|
|
335
|
+
|
|
336
|
+
table = f"{self._prefix}runs"
|
|
337
|
+
async with self._pool.acquire() as conn:
|
|
338
|
+
await conn.execute(
|
|
339
|
+
f"""
|
|
340
|
+
INSERT INTO {table} (id, thread_id, agent_id, status, started_at, ended_at, error)
|
|
341
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
|
342
|
+
""",
|
|
343
|
+
run.id,
|
|
344
|
+
run.thread_id,
|
|
345
|
+
run.agent_id,
|
|
346
|
+
run.status,
|
|
347
|
+
run.started_at,
|
|
348
|
+
run.ended_at,
|
|
349
|
+
run.error,
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
async def update_run(self, run: Run) -> None:
|
|
353
|
+
if not self._pool:
|
|
354
|
+
raise RuntimeError("Not connected")
|
|
355
|
+
|
|
356
|
+
table = f"{self._prefix}runs"
|
|
357
|
+
async with self._pool.acquire() as conn:
|
|
358
|
+
await conn.execute(
|
|
359
|
+
f"""
|
|
360
|
+
UPDATE {table}
|
|
361
|
+
SET agent_id = $1, status = $2, ended_at = $3, error = $4
|
|
362
|
+
WHERE id = $5
|
|
363
|
+
""",
|
|
364
|
+
run.agent_id,
|
|
365
|
+
run.status,
|
|
366
|
+
run.ended_at,
|
|
367
|
+
run.error,
|
|
368
|
+
run.id,
|
|
369
|
+
)
|
|
370
|
+
|
|
371
|
+
def _serialize_content_part(self, part) -> dict:
|
|
372
|
+
if hasattr(part, "model_dump"):
|
|
373
|
+
return part.model_dump()
|
|
374
|
+
return dict(part)
|
|
375
|
+
|
|
376
|
+
def _deserialize_content_part(self, data: dict):
|
|
377
|
+
part_type = data.get("type")
|
|
378
|
+
if part_type == "text":
|
|
379
|
+
return TextPart(**data)
|
|
380
|
+
elif part_type == "image":
|
|
381
|
+
return ImagePart(**data)
|
|
382
|
+
elif part_type == "document":
|
|
383
|
+
return DocumentPart(**data)
|
|
384
|
+
return data
|
|
385
|
+
|
|
386
|
+
async def get_settings(self, worker_id: str) -> dict[str, Any]:
|
|
387
|
+
"""Get all stored values for a worker. Returns empty dict if none."""
|
|
388
|
+
if not self._pool:
|
|
389
|
+
raise RuntimeError("Not connected")
|
|
390
|
+
|
|
391
|
+
table = f"{self._prefix}settings"
|
|
392
|
+
async with self._pool.acquire() as conn:
|
|
393
|
+
row = await conn.fetchrow(
|
|
394
|
+
f"SELECT values FROM {table} WHERE worker_id = $1",
|
|
395
|
+
worker_id,
|
|
396
|
+
)
|
|
397
|
+
|
|
398
|
+
if not row:
|
|
399
|
+
return {}
|
|
400
|
+
|
|
401
|
+
values = row["values"]
|
|
402
|
+
# asyncpg returns JSONB as dict directly
|
|
403
|
+
if isinstance(values, str):
|
|
404
|
+
return json.loads(values)
|
|
405
|
+
return values
|
|
406
|
+
|
|
407
|
+
async def update_setting(self, worker_id: str, field_id: str, value: Any) -> datetime:
|
|
408
|
+
"""Update a single field value. Returns updated_at timestamp."""
|
|
409
|
+
if not self._pool:
|
|
410
|
+
raise RuntimeError("Not connected")
|
|
411
|
+
|
|
412
|
+
table = f"{self._prefix}settings"
|
|
413
|
+
now = datetime.now(UTC)
|
|
414
|
+
|
|
415
|
+
# Get existing values
|
|
416
|
+
current_values = await self.get_settings(worker_id)
|
|
417
|
+
current_values[field_id] = value
|
|
418
|
+
values_json = json.dumps(current_values)
|
|
419
|
+
|
|
420
|
+
async with self._pool.acquire() as conn:
|
|
421
|
+
await conn.execute(
|
|
422
|
+
f"""
|
|
423
|
+
INSERT INTO {table} (worker_id, values, created_at, updated_at)
|
|
424
|
+
VALUES ($1, $2, $3, $4)
|
|
425
|
+
ON CONFLICT(worker_id) DO UPDATE SET
|
|
426
|
+
values = EXCLUDED.values,
|
|
427
|
+
updated_at = EXCLUDED.updated_at
|
|
428
|
+
""",
|
|
429
|
+
worker_id,
|
|
430
|
+
values_json,
|
|
431
|
+
now,
|
|
432
|
+
now,
|
|
433
|
+
)
|
|
434
|
+
return now
|
|
435
|
+
|
|
436
|
+
async def set_settings(self, worker_id: str, values: dict[str, Any]) -> datetime:
|
|
437
|
+
"""Replace all settings values. Returns updated_at timestamp."""
|
|
438
|
+
if not self._pool:
|
|
439
|
+
raise RuntimeError("Not connected")
|
|
440
|
+
|
|
441
|
+
table = f"{self._prefix}settings"
|
|
442
|
+
now = datetime.now(UTC)
|
|
443
|
+
values_json = json.dumps(values)
|
|
444
|
+
|
|
445
|
+
async with self._pool.acquire() as conn:
|
|
446
|
+
await conn.execute(
|
|
447
|
+
f"""
|
|
448
|
+
INSERT INTO {table} (worker_id, values, created_at, updated_at)
|
|
449
|
+
VALUES ($1, $2, $3, $4)
|
|
450
|
+
ON CONFLICT(worker_id) DO UPDATE SET
|
|
451
|
+
values = EXCLUDED.values,
|
|
452
|
+
updated_at = EXCLUDED.updated_at
|
|
453
|
+
""",
|
|
454
|
+
worker_id,
|
|
455
|
+
values_json,
|
|
456
|
+
now,
|
|
457
|
+
now,
|
|
458
|
+
)
|
|
459
|
+
return now
|