dooers-workers 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dooers/__init__.py +73 -0
- dooers/broadcast.py +180 -0
- dooers/config.py +22 -0
- dooers/features/__init__.py +0 -0
- dooers/features/analytics/__init__.py +12 -0
- dooers/features/analytics/collector.py +219 -0
- dooers/features/analytics/models.py +50 -0
- dooers/features/analytics/worker_analytics.py +100 -0
- dooers/features/settings/__init__.py +12 -0
- dooers/features/settings/broadcaster.py +97 -0
- dooers/features/settings/models.py +72 -0
- dooers/features/settings/worker_settings.py +85 -0
- dooers/handlers/__init__.py +16 -0
- dooers/handlers/memory.py +105 -0
- dooers/handlers/request.py +12 -0
- dooers/handlers/response.py +66 -0
- dooers/handlers/router.py +957 -0
- dooers/migrations/__init__.py +3 -0
- dooers/migrations/schemas.py +126 -0
- dooers/persistence/__init__.py +9 -0
- dooers/persistence/base.py +42 -0
- dooers/persistence/postgres.py +459 -0
- dooers/persistence/sqlite.py +433 -0
- dooers/protocol/__init__.py +108 -0
- dooers/protocol/frames.py +298 -0
- dooers/protocol/models.py +72 -0
- dooers/protocol/parser.py +19 -0
- dooers/registry.py +101 -0
- dooers/server.py +162 -0
- dooers/settings.py +3 -0
- dooers_workers-0.2.0.dist-info/METADATA +228 -0
- dooers_workers-0.2.0.dist-info/RECORD +33 -0
- dooers_workers-0.2.0.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,433 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from datetime import UTC, datetime
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
import aiosqlite
|
|
6
|
+
|
|
7
|
+
from dooers.protocol.models import DocumentPart, ImagePart, Run, TextPart, Thread, ThreadEvent
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SqlitePersistence:
|
|
11
|
+
def __init__(self, database_url: str, table_prefix: str = "worker_"):
|
|
12
|
+
self._database_url = database_url.replace("sqlite:///", "")
|
|
13
|
+
self._prefix = table_prefix
|
|
14
|
+
self._conn: aiosqlite.Connection | None = None
|
|
15
|
+
|
|
16
|
+
async def connect(self) -> None:
|
|
17
|
+
self._conn = await aiosqlite.connect(self._database_url)
|
|
18
|
+
self._conn.row_factory = aiosqlite.Row
|
|
19
|
+
|
|
20
|
+
async def disconnect(self) -> None:
|
|
21
|
+
if self._conn:
|
|
22
|
+
await self._conn.close()
|
|
23
|
+
|
|
24
|
+
async def migrate(self) -> None:
|
|
25
|
+
if not self._conn:
|
|
26
|
+
raise RuntimeError("Not connected")
|
|
27
|
+
|
|
28
|
+
threads_table = f"{self._prefix}threads"
|
|
29
|
+
events_table = f"{self._prefix}events"
|
|
30
|
+
runs_table = f"{self._prefix}runs"
|
|
31
|
+
|
|
32
|
+
await self._conn.executescript(f"""
|
|
33
|
+
CREATE TABLE IF NOT EXISTS {threads_table} (
|
|
34
|
+
id TEXT PRIMARY KEY,
|
|
35
|
+
worker_id TEXT NOT NULL,
|
|
36
|
+
user_id TEXT,
|
|
37
|
+
title TEXT,
|
|
38
|
+
created_at TEXT NOT NULL,
|
|
39
|
+
updated_at TEXT NOT NULL,
|
|
40
|
+
last_event_at TEXT NOT NULL
|
|
41
|
+
);
|
|
42
|
+
|
|
43
|
+
CREATE TABLE IF NOT EXISTS {events_table} (
|
|
44
|
+
id TEXT PRIMARY KEY,
|
|
45
|
+
thread_id TEXT NOT NULL,
|
|
46
|
+
run_id TEXT,
|
|
47
|
+
type TEXT NOT NULL,
|
|
48
|
+
actor TEXT NOT NULL,
|
|
49
|
+
user_id TEXT,
|
|
50
|
+
user_name TEXT,
|
|
51
|
+
user_email TEXT,
|
|
52
|
+
content TEXT,
|
|
53
|
+
data TEXT,
|
|
54
|
+
created_at TEXT NOT NULL,
|
|
55
|
+
FOREIGN KEY (thread_id) REFERENCES {threads_table}(id)
|
|
56
|
+
);
|
|
57
|
+
|
|
58
|
+
CREATE TABLE IF NOT EXISTS {runs_table} (
|
|
59
|
+
id TEXT PRIMARY KEY,
|
|
60
|
+
thread_id TEXT NOT NULL,
|
|
61
|
+
agent_id TEXT,
|
|
62
|
+
status TEXT NOT NULL,
|
|
63
|
+
started_at TEXT NOT NULL,
|
|
64
|
+
ended_at TEXT,
|
|
65
|
+
error TEXT,
|
|
66
|
+
FOREIGN KEY (thread_id) REFERENCES {threads_table}(id)
|
|
67
|
+
);
|
|
68
|
+
|
|
69
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}threads_worker_id
|
|
70
|
+
ON {threads_table}(worker_id);
|
|
71
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}threads_user_id
|
|
72
|
+
ON {threads_table}(user_id);
|
|
73
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}events_thread_id
|
|
74
|
+
ON {events_table}(thread_id);
|
|
75
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}events_user_id
|
|
76
|
+
ON {events_table}(user_id);
|
|
77
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}runs_thread_id
|
|
78
|
+
ON {runs_table}(thread_id);
|
|
79
|
+
|
|
80
|
+
CREATE TABLE IF NOT EXISTS {self._prefix}settings (
|
|
81
|
+
worker_id TEXT PRIMARY KEY,
|
|
82
|
+
values TEXT NOT NULL DEFAULT '{{}}',
|
|
83
|
+
created_at TEXT DEFAULT (datetime('now')),
|
|
84
|
+
updated_at TEXT DEFAULT (datetime('now'))
|
|
85
|
+
);
|
|
86
|
+
CREATE INDEX IF NOT EXISTS idx_{self._prefix}settings_worker
|
|
87
|
+
ON {self._prefix}settings(worker_id);
|
|
88
|
+
""")
|
|
89
|
+
await self._conn.commit()
|
|
90
|
+
|
|
91
|
+
async def create_thread(self, thread: Thread) -> None:
|
|
92
|
+
if not self._conn:
|
|
93
|
+
raise RuntimeError("Not connected")
|
|
94
|
+
|
|
95
|
+
table = f"{self._prefix}threads"
|
|
96
|
+
await self._conn.execute(
|
|
97
|
+
f"""
|
|
98
|
+
INSERT INTO {table} (id, worker_id, user_id, title, created_at, updated_at, last_event_at)
|
|
99
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
100
|
+
""",
|
|
101
|
+
(
|
|
102
|
+
thread.id,
|
|
103
|
+
thread.worker_id,
|
|
104
|
+
thread.user_id,
|
|
105
|
+
thread.title,
|
|
106
|
+
thread.created_at.isoformat(),
|
|
107
|
+
thread.updated_at.isoformat(),
|
|
108
|
+
thread.last_event_at.isoformat(),
|
|
109
|
+
),
|
|
110
|
+
)
|
|
111
|
+
await self._conn.commit()
|
|
112
|
+
|
|
113
|
+
async def get_thread(self, thread_id: str) -> Thread | None:
|
|
114
|
+
if not self._conn:
|
|
115
|
+
raise RuntimeError("Not connected")
|
|
116
|
+
|
|
117
|
+
table = f"{self._prefix}threads"
|
|
118
|
+
cursor = await self._conn.execute(
|
|
119
|
+
f"SELECT * FROM {table} WHERE id = ?",
|
|
120
|
+
(thread_id,),
|
|
121
|
+
)
|
|
122
|
+
row = await cursor.fetchone()
|
|
123
|
+
|
|
124
|
+
if not row:
|
|
125
|
+
return None
|
|
126
|
+
|
|
127
|
+
return Thread(
|
|
128
|
+
id=row["id"],
|
|
129
|
+
worker_id=row["worker_id"],
|
|
130
|
+
user_id=row["user_id"],
|
|
131
|
+
title=row["title"],
|
|
132
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
133
|
+
updated_at=datetime.fromisoformat(row["updated_at"]),
|
|
134
|
+
last_event_at=datetime.fromisoformat(row["last_event_at"]),
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
async def update_thread(self, thread: Thread) -> None:
|
|
138
|
+
if not self._conn:
|
|
139
|
+
raise RuntimeError("Not connected")
|
|
140
|
+
|
|
141
|
+
table = f"{self._prefix}threads"
|
|
142
|
+
await self._conn.execute(
|
|
143
|
+
f"""
|
|
144
|
+
UPDATE {table}
|
|
145
|
+
SET user_id = ?, title = ?, updated_at = ?, last_event_at = ?
|
|
146
|
+
WHERE id = ?
|
|
147
|
+
""",
|
|
148
|
+
(
|
|
149
|
+
thread.user_id,
|
|
150
|
+
thread.title,
|
|
151
|
+
thread.updated_at.isoformat(),
|
|
152
|
+
thread.last_event_at.isoformat(),
|
|
153
|
+
thread.id,
|
|
154
|
+
),
|
|
155
|
+
)
|
|
156
|
+
await self._conn.commit()
|
|
157
|
+
|
|
158
|
+
async def list_threads(
|
|
159
|
+
self,
|
|
160
|
+
worker_id: str,
|
|
161
|
+
user_id: str | None,
|
|
162
|
+
cursor: str | None,
|
|
163
|
+
limit: int,
|
|
164
|
+
) -> list[Thread]:
|
|
165
|
+
if not self._conn:
|
|
166
|
+
raise RuntimeError("Not connected")
|
|
167
|
+
|
|
168
|
+
table = f"{self._prefix}threads"
|
|
169
|
+
|
|
170
|
+
if user_id:
|
|
171
|
+
query = f"""
|
|
172
|
+
SELECT * FROM {table}
|
|
173
|
+
WHERE worker_id = ? AND user_id = ?
|
|
174
|
+
ORDER BY last_event_at DESC
|
|
175
|
+
LIMIT ?
|
|
176
|
+
"""
|
|
177
|
+
cursor_result = await self._conn.execute(query, (worker_id, user_id, limit))
|
|
178
|
+
else:
|
|
179
|
+
query = f"""
|
|
180
|
+
SELECT * FROM {table}
|
|
181
|
+
WHERE worker_id = ?
|
|
182
|
+
ORDER BY last_event_at DESC
|
|
183
|
+
LIMIT ?
|
|
184
|
+
"""
|
|
185
|
+
cursor_result = await self._conn.execute(query, (worker_id, limit))
|
|
186
|
+
|
|
187
|
+
rows = await cursor_result.fetchall()
|
|
188
|
+
|
|
189
|
+
return [
|
|
190
|
+
Thread(
|
|
191
|
+
id=row["id"],
|
|
192
|
+
worker_id=row["worker_id"],
|
|
193
|
+
user_id=row["user_id"],
|
|
194
|
+
title=row["title"],
|
|
195
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
196
|
+
updated_at=datetime.fromisoformat(row["updated_at"]),
|
|
197
|
+
last_event_at=datetime.fromisoformat(row["last_event_at"]),
|
|
198
|
+
)
|
|
199
|
+
for row in rows
|
|
200
|
+
]
|
|
201
|
+
|
|
202
|
+
async def create_event(self, event: ThreadEvent) -> None:
|
|
203
|
+
if not self._conn:
|
|
204
|
+
raise RuntimeError("Not connected")
|
|
205
|
+
|
|
206
|
+
table = f"{self._prefix}events"
|
|
207
|
+
content_json = None
|
|
208
|
+
if event.content:
|
|
209
|
+
content_json = json.dumps([self._serialize_content_part(p) for p in event.content])
|
|
210
|
+
|
|
211
|
+
data_json = json.dumps(event.data) if event.data else None
|
|
212
|
+
|
|
213
|
+
await self._conn.execute(
|
|
214
|
+
f"""
|
|
215
|
+
INSERT INTO {table} (id, thread_id, run_id, type, actor, user_id, user_name, user_email, content, data, created_at)
|
|
216
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
217
|
+
""",
|
|
218
|
+
(
|
|
219
|
+
event.id,
|
|
220
|
+
event.thread_id,
|
|
221
|
+
event.run_id,
|
|
222
|
+
event.type,
|
|
223
|
+
event.actor,
|
|
224
|
+
event.user_id,
|
|
225
|
+
event.user_name,
|
|
226
|
+
event.user_email,
|
|
227
|
+
content_json,
|
|
228
|
+
data_json,
|
|
229
|
+
event.created_at.isoformat(),
|
|
230
|
+
),
|
|
231
|
+
)
|
|
232
|
+
await self._conn.commit()
|
|
233
|
+
|
|
234
|
+
async def get_events(
|
|
235
|
+
self,
|
|
236
|
+
thread_id: str,
|
|
237
|
+
after_event_id: str | None,
|
|
238
|
+
limit: int,
|
|
239
|
+
) -> list[ThreadEvent]:
|
|
240
|
+
if not self._conn:
|
|
241
|
+
raise RuntimeError("Not connected")
|
|
242
|
+
|
|
243
|
+
table = f"{self._prefix}events"
|
|
244
|
+
|
|
245
|
+
if after_event_id:
|
|
246
|
+
cursor_result = await self._conn.execute(
|
|
247
|
+
f"SELECT created_at FROM {table} WHERE id = ?",
|
|
248
|
+
(after_event_id,),
|
|
249
|
+
)
|
|
250
|
+
ref_row = await cursor_result.fetchone()
|
|
251
|
+
if ref_row:
|
|
252
|
+
query = f"""
|
|
253
|
+
SELECT * FROM {table}
|
|
254
|
+
WHERE thread_id = ? AND created_at > ?
|
|
255
|
+
ORDER BY created_at ASC
|
|
256
|
+
LIMIT ?
|
|
257
|
+
"""
|
|
258
|
+
cursor_result = await self._conn.execute(
|
|
259
|
+
query,
|
|
260
|
+
(thread_id, ref_row["created_at"], limit),
|
|
261
|
+
)
|
|
262
|
+
else:
|
|
263
|
+
query = f"""
|
|
264
|
+
SELECT * FROM {table}
|
|
265
|
+
WHERE thread_id = ?
|
|
266
|
+
ORDER BY created_at ASC
|
|
267
|
+
LIMIT ?
|
|
268
|
+
"""
|
|
269
|
+
cursor_result = await self._conn.execute(query, (thread_id, limit))
|
|
270
|
+
else:
|
|
271
|
+
query = f"""
|
|
272
|
+
SELECT * FROM {table}
|
|
273
|
+
WHERE thread_id = ?
|
|
274
|
+
ORDER BY created_at ASC
|
|
275
|
+
LIMIT ?
|
|
276
|
+
"""
|
|
277
|
+
cursor_result = await self._conn.execute(query, (thread_id, limit))
|
|
278
|
+
|
|
279
|
+
rows = await cursor_result.fetchall()
|
|
280
|
+
|
|
281
|
+
events = []
|
|
282
|
+
for row in rows:
|
|
283
|
+
content = None
|
|
284
|
+
if row["content"]:
|
|
285
|
+
content_data = json.loads(row["content"])
|
|
286
|
+
content = [self._deserialize_content_part(p) for p in content_data]
|
|
287
|
+
|
|
288
|
+
data = json.loads(row["data"]) if row["data"] else None
|
|
289
|
+
|
|
290
|
+
events.append(
|
|
291
|
+
ThreadEvent(
|
|
292
|
+
id=row["id"],
|
|
293
|
+
thread_id=row["thread_id"],
|
|
294
|
+
run_id=row["run_id"],
|
|
295
|
+
type=row["type"],
|
|
296
|
+
actor=row["actor"],
|
|
297
|
+
user_id=row["user_id"],
|
|
298
|
+
user_name=row["user_name"],
|
|
299
|
+
user_email=row["user_email"],
|
|
300
|
+
content=content,
|
|
301
|
+
data=data,
|
|
302
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
303
|
+
)
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
return events
|
|
307
|
+
|
|
308
|
+
async def create_run(self, run: Run) -> None:
|
|
309
|
+
if not self._conn:
|
|
310
|
+
raise RuntimeError("Not connected")
|
|
311
|
+
|
|
312
|
+
table = f"{self._prefix}runs"
|
|
313
|
+
await self._conn.execute(
|
|
314
|
+
f"""
|
|
315
|
+
INSERT INTO {table} (id, thread_id, agent_id, status, started_at, ended_at, error)
|
|
316
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
317
|
+
""",
|
|
318
|
+
(
|
|
319
|
+
run.id,
|
|
320
|
+
run.thread_id,
|
|
321
|
+
run.agent_id,
|
|
322
|
+
run.status,
|
|
323
|
+
run.started_at.isoformat(),
|
|
324
|
+
run.ended_at.isoformat() if run.ended_at else None,
|
|
325
|
+
run.error,
|
|
326
|
+
),
|
|
327
|
+
)
|
|
328
|
+
await self._conn.commit()
|
|
329
|
+
|
|
330
|
+
async def update_run(self, run: Run) -> None:
|
|
331
|
+
if not self._conn:
|
|
332
|
+
raise RuntimeError("Not connected")
|
|
333
|
+
|
|
334
|
+
table = f"{self._prefix}runs"
|
|
335
|
+
await self._conn.execute(
|
|
336
|
+
f"""
|
|
337
|
+
UPDATE {table}
|
|
338
|
+
SET agent_id = ?, status = ?, ended_at = ?, error = ?
|
|
339
|
+
WHERE id = ?
|
|
340
|
+
""",
|
|
341
|
+
(
|
|
342
|
+
run.agent_id,
|
|
343
|
+
run.status,
|
|
344
|
+
run.ended_at.isoformat() if run.ended_at else None,
|
|
345
|
+
run.error,
|
|
346
|
+
run.id,
|
|
347
|
+
),
|
|
348
|
+
)
|
|
349
|
+
await self._conn.commit()
|
|
350
|
+
|
|
351
|
+
def _serialize_content_part(self, part) -> dict:
|
|
352
|
+
if hasattr(part, "model_dump"):
|
|
353
|
+
return part.model_dump()
|
|
354
|
+
return dict(part)
|
|
355
|
+
|
|
356
|
+
def _deserialize_content_part(self, data: dict):
|
|
357
|
+
part_type = data.get("type")
|
|
358
|
+
if part_type == "text":
|
|
359
|
+
return TextPart(**data)
|
|
360
|
+
elif part_type == "image":
|
|
361
|
+
return ImagePart(**data)
|
|
362
|
+
elif part_type == "document":
|
|
363
|
+
return DocumentPart(**data)
|
|
364
|
+
return data
|
|
365
|
+
|
|
366
|
+
async def get_settings(self, worker_id: str) -> dict[str, Any]:
|
|
367
|
+
"""Get all stored values for a worker. Returns empty dict if none."""
|
|
368
|
+
if not self._conn:
|
|
369
|
+
raise RuntimeError("Not connected")
|
|
370
|
+
|
|
371
|
+
table = f"{self._prefix}settings"
|
|
372
|
+
cursor = await self._conn.execute(
|
|
373
|
+
f"SELECT values FROM {table} WHERE worker_id = ?",
|
|
374
|
+
(worker_id,),
|
|
375
|
+
)
|
|
376
|
+
row = await cursor.fetchone()
|
|
377
|
+
|
|
378
|
+
if not row:
|
|
379
|
+
return {}
|
|
380
|
+
|
|
381
|
+
return json.loads(row["values"])
|
|
382
|
+
|
|
383
|
+
async def update_setting(self, worker_id: str, field_id: str, value: Any) -> datetime:
|
|
384
|
+
"""Update a single field value. Returns updated_at timestamp."""
|
|
385
|
+
if not self._conn:
|
|
386
|
+
raise RuntimeError("Not connected")
|
|
387
|
+
|
|
388
|
+
table = f"{self._prefix}settings"
|
|
389
|
+
now = datetime.now(UTC)
|
|
390
|
+
now_str = now.isoformat()
|
|
391
|
+
|
|
392
|
+
# Get existing values
|
|
393
|
+
current_values = await self.get_settings(worker_id)
|
|
394
|
+
current_values[field_id] = value
|
|
395
|
+
values_json = json.dumps(current_values)
|
|
396
|
+
|
|
397
|
+
# Upsert
|
|
398
|
+
await self._conn.execute(
|
|
399
|
+
f"""
|
|
400
|
+
INSERT INTO {table} (worker_id, values, created_at, updated_at)
|
|
401
|
+
VALUES (?, ?, ?, ?)
|
|
402
|
+
ON CONFLICT(worker_id) DO UPDATE SET
|
|
403
|
+
values = excluded.values,
|
|
404
|
+
updated_at = excluded.updated_at
|
|
405
|
+
""",
|
|
406
|
+
(worker_id, values_json, now_str, now_str),
|
|
407
|
+
)
|
|
408
|
+
await self._conn.commit()
|
|
409
|
+
return now
|
|
410
|
+
|
|
411
|
+
async def set_settings(self, worker_id: str, values: dict[str, Any]) -> datetime:
|
|
412
|
+
"""Replace all settings values. Returns updated_at timestamp."""
|
|
413
|
+
if not self._conn:
|
|
414
|
+
raise RuntimeError("Not connected")
|
|
415
|
+
|
|
416
|
+
table = f"{self._prefix}settings"
|
|
417
|
+
now = datetime.now(UTC)
|
|
418
|
+
now_str = now.isoformat()
|
|
419
|
+
values_json = json.dumps(values)
|
|
420
|
+
|
|
421
|
+
# Upsert
|
|
422
|
+
await self._conn.execute(
|
|
423
|
+
f"""
|
|
424
|
+
INSERT INTO {table} (worker_id, values, created_at, updated_at)
|
|
425
|
+
VALUES (?, ?, ?, ?)
|
|
426
|
+
ON CONFLICT(worker_id) DO UPDATE SET
|
|
427
|
+
values = excluded.values,
|
|
428
|
+
updated_at = excluded.updated_at
|
|
429
|
+
""",
|
|
430
|
+
(worker_id, values_json, now_str, now_str),
|
|
431
|
+
)
|
|
432
|
+
await self._conn.commit()
|
|
433
|
+
return now
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
from dooers.protocol.frames import (
|
|
2
|
+
# Payload types
|
|
3
|
+
AnalyticsSubscribePayload,
|
|
4
|
+
AnalyticsUnsubscribePayload,
|
|
5
|
+
# Analytics C2S frames
|
|
6
|
+
C2S_AnalyticsSubscribe,
|
|
7
|
+
C2S_AnalyticsUnsubscribe,
|
|
8
|
+
# Existing C2S frames
|
|
9
|
+
C2S_Connect,
|
|
10
|
+
C2S_EventCreate,
|
|
11
|
+
C2S_Feedback,
|
|
12
|
+
# Settings C2S frames
|
|
13
|
+
C2S_SettingsPatch,
|
|
14
|
+
C2S_SettingsSubscribe,
|
|
15
|
+
C2S_SettingsUnsubscribe,
|
|
16
|
+
C2S_ThreadList,
|
|
17
|
+
C2S_ThreadSubscribe,
|
|
18
|
+
C2S_ThreadUnsubscribe,
|
|
19
|
+
FeedbackAckPayload,
|
|
20
|
+
FeedbackPayload,
|
|
21
|
+
# Existing S2C frames
|
|
22
|
+
S2C_Ack,
|
|
23
|
+
# Analytics S2C frames
|
|
24
|
+
S2C_AnalyticsEvent,
|
|
25
|
+
S2C_EventAppend,
|
|
26
|
+
S2C_FeedbackAck,
|
|
27
|
+
S2C_RunUpsert,
|
|
28
|
+
# Settings S2C frames
|
|
29
|
+
S2C_SettingsPatch,
|
|
30
|
+
S2C_SettingsSnapshot,
|
|
31
|
+
S2C_ThreadListResult,
|
|
32
|
+
S2C_ThreadSnapshot,
|
|
33
|
+
S2C_ThreadUpsert,
|
|
34
|
+
SettingsPatchBroadcastPayload,
|
|
35
|
+
SettingsPatchPayload,
|
|
36
|
+
SettingsSnapshotPayload,
|
|
37
|
+
SettingsSubscribePayload,
|
|
38
|
+
SettingsUnsubscribePayload,
|
|
39
|
+
WSFrame,
|
|
40
|
+
)
|
|
41
|
+
from dooers.protocol.models import (
|
|
42
|
+
Actor,
|
|
43
|
+
ContentPart,
|
|
44
|
+
DocumentPart,
|
|
45
|
+
EventType,
|
|
46
|
+
ImagePart,
|
|
47
|
+
Run,
|
|
48
|
+
RunStatus,
|
|
49
|
+
TextPart,
|
|
50
|
+
Thread,
|
|
51
|
+
ThreadEvent,
|
|
52
|
+
)
|
|
53
|
+
from dooers.protocol.parser import parse_frame, serialize_frame
|
|
54
|
+
|
|
55
|
+
__all__ = [
|
|
56
|
+
# Models
|
|
57
|
+
"ContentPart",
|
|
58
|
+
"TextPart",
|
|
59
|
+
"ImagePart",
|
|
60
|
+
"DocumentPart",
|
|
61
|
+
"Thread",
|
|
62
|
+
"ThreadEvent",
|
|
63
|
+
"Run",
|
|
64
|
+
"RunStatus",
|
|
65
|
+
"Actor",
|
|
66
|
+
"EventType",
|
|
67
|
+
"WSFrame",
|
|
68
|
+
# Existing C2S frames
|
|
69
|
+
"C2S_Connect",
|
|
70
|
+
"C2S_ThreadList",
|
|
71
|
+
"C2S_ThreadSubscribe",
|
|
72
|
+
"C2S_ThreadUnsubscribe",
|
|
73
|
+
"C2S_EventCreate",
|
|
74
|
+
# Analytics C2S frames
|
|
75
|
+
"C2S_AnalyticsSubscribe",
|
|
76
|
+
"C2S_AnalyticsUnsubscribe",
|
|
77
|
+
"C2S_Feedback",
|
|
78
|
+
# Settings C2S frames
|
|
79
|
+
"C2S_SettingsSubscribe",
|
|
80
|
+
"C2S_SettingsUnsubscribe",
|
|
81
|
+
"C2S_SettingsPatch",
|
|
82
|
+
# Existing S2C frames
|
|
83
|
+
"S2C_Ack",
|
|
84
|
+
"S2C_ThreadListResult",
|
|
85
|
+
"S2C_ThreadSnapshot",
|
|
86
|
+
"S2C_EventAppend",
|
|
87
|
+
"S2C_ThreadUpsert",
|
|
88
|
+
"S2C_RunUpsert",
|
|
89
|
+
# Analytics S2C frames
|
|
90
|
+
"S2C_AnalyticsEvent",
|
|
91
|
+
"S2C_FeedbackAck",
|
|
92
|
+
# Settings S2C frames
|
|
93
|
+
"S2C_SettingsSnapshot",
|
|
94
|
+
"S2C_SettingsPatch",
|
|
95
|
+
# Payload types
|
|
96
|
+
"AnalyticsSubscribePayload",
|
|
97
|
+
"AnalyticsUnsubscribePayload",
|
|
98
|
+
"FeedbackPayload",
|
|
99
|
+
"FeedbackAckPayload",
|
|
100
|
+
"SettingsSubscribePayload",
|
|
101
|
+
"SettingsUnsubscribePayload",
|
|
102
|
+
"SettingsPatchPayload",
|
|
103
|
+
"SettingsSnapshotPayload",
|
|
104
|
+
"SettingsPatchBroadcastPayload",
|
|
105
|
+
# Parser
|
|
106
|
+
"parse_frame",
|
|
107
|
+
"serialize_frame",
|
|
108
|
+
]
|