@agentunion/kite 1.0.6 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. package/cli.js +127 -25
  2. package/core/event_hub/entry.py +384 -61
  3. package/core/event_hub/hub.py +8 -0
  4. package/core/event_hub/module.md +0 -1
  5. package/core/event_hub/server.py +169 -38
  6. package/core/kite_log.py +241 -0
  7. package/core/launcher/entry.py +1306 -425
  8. package/core/launcher/module_scanner.py +10 -9
  9. package/core/launcher/process_manager.py +555 -121
  10. package/core/registry/entry.py +335 -30
  11. package/core/registry/server.py +339 -256
  12. package/core/registry/store.py +13 -2
  13. package/extensions/agents/__init__.py +1 -0
  14. package/extensions/agents/assistant/__init__.py +1 -0
  15. package/extensions/agents/assistant/entry.py +380 -0
  16. package/extensions/agents/assistant/module.md +22 -0
  17. package/extensions/agents/assistant/server.py +236 -0
  18. package/extensions/channels/__init__.py +1 -0
  19. package/extensions/channels/acp_channel/__init__.py +1 -0
  20. package/extensions/channels/acp_channel/entry.py +380 -0
  21. package/extensions/channels/acp_channel/module.md +22 -0
  22. package/extensions/channels/acp_channel/server.py +236 -0
  23. package/{core → extensions}/event_hub_bench/entry.py +664 -371
  24. package/{core → extensions}/event_hub_bench/module.md +4 -2
  25. package/extensions/services/backup/__init__.py +1 -0
  26. package/extensions/services/backup/entry.py +380 -0
  27. package/extensions/services/backup/module.md +22 -0
  28. package/extensions/services/backup/server.py +244 -0
  29. package/extensions/services/model_service/__init__.py +1 -0
  30. package/extensions/services/model_service/entry.py +380 -0
  31. package/extensions/services/model_service/module.md +22 -0
  32. package/extensions/services/model_service/server.py +236 -0
  33. package/extensions/services/watchdog/entry.py +460 -143
  34. package/extensions/services/watchdog/module.md +3 -0
  35. package/extensions/services/watchdog/monitor.py +128 -13
  36. package/extensions/services/watchdog/server.py +75 -13
  37. package/extensions/services/web/__init__.py +1 -0
  38. package/extensions/services/web/config.yaml +149 -0
  39. package/extensions/services/web/entry.py +487 -0
  40. package/extensions/services/web/module.md +24 -0
  41. package/extensions/services/web/routes/__init__.py +1 -0
  42. package/extensions/services/web/routes/routes_call.py +189 -0
  43. package/extensions/services/web/routes/routes_config.py +512 -0
  44. package/extensions/services/web/routes/routes_contacts.py +98 -0
  45. package/extensions/services/web/routes/routes_devlog.py +99 -0
  46. package/extensions/services/web/routes/routes_phone.py +81 -0
  47. package/extensions/services/web/routes/routes_sms.py +48 -0
  48. package/extensions/services/web/routes/routes_stats.py +17 -0
  49. package/extensions/services/web/routes/routes_voicechat.py +554 -0
  50. package/extensions/services/web/routes/schemas.py +216 -0
  51. package/extensions/services/web/server.py +332 -0
  52. package/extensions/services/web/static/css/style.css +1064 -0
  53. package/extensions/services/web/static/index.html +1445 -0
  54. package/extensions/services/web/static/js/app.js +4671 -0
  55. package/extensions/services/web/vendor/__init__.py +1 -0
  56. package/extensions/services/web/vendor/bluetooth/audio.py +348 -0
  57. package/extensions/services/web/vendor/bluetooth/contacts.py +251 -0
  58. package/extensions/services/web/vendor/bluetooth/manager.py +395 -0
  59. package/extensions/services/web/vendor/bluetooth/sms.py +290 -0
  60. package/extensions/services/web/vendor/bluetooth/telephony.py +274 -0
  61. package/extensions/services/web/vendor/config.py +139 -0
  62. package/extensions/services/web/vendor/conversation/__init__.py +0 -0
  63. package/extensions/services/web/vendor/conversation/asr.py +936 -0
  64. package/extensions/services/web/vendor/conversation/engine.py +548 -0
  65. package/extensions/services/web/vendor/conversation/llm.py +534 -0
  66. package/extensions/services/web/vendor/conversation/mcp_tools.py +190 -0
  67. package/extensions/services/web/vendor/conversation/tts.py +322 -0
  68. package/extensions/services/web/vendor/conversation/vad.py +138 -0
  69. package/extensions/services/web/vendor/storage/__init__.py +1 -0
  70. package/extensions/services/web/vendor/storage/identity.py +312 -0
  71. package/extensions/services/web/vendor/storage/store.py +507 -0
  72. package/extensions/services/web/vendor/task/__init__.py +0 -0
  73. package/extensions/services/web/vendor/task/manager.py +864 -0
  74. package/extensions/services/web/vendor/task/models.py +45 -0
  75. package/extensions/services/web/vendor/task/webhook.py +263 -0
  76. package/extensions/services/web/vendor/tools/__init__.py +0 -0
  77. package/extensions/services/web/vendor/tools/registry.py +321 -0
  78. package/main.py +344 -4
  79. package/package.json +11 -2
  80. package/core/__pycache__/__init__.cpython-313.pyc +0 -0
  81. package/core/__pycache__/data_dir.cpython-313.pyc +0 -0
  82. package/core/data_dir.py +0 -62
  83. package/core/event_hub/__pycache__/__init__.cpython-313.pyc +0 -0
  84. package/core/event_hub/__pycache__/bench.cpython-313.pyc +0 -0
  85. package/core/event_hub/__pycache__/bench_perf.cpython-313.pyc +0 -0
  86. package/core/event_hub/__pycache__/dedup.cpython-313.pyc +0 -0
  87. package/core/event_hub/__pycache__/entry.cpython-313.pyc +0 -0
  88. package/core/event_hub/__pycache__/hub.cpython-313.pyc +0 -0
  89. package/core/event_hub/__pycache__/router.cpython-313.pyc +0 -0
  90. package/core/event_hub/__pycache__/server.cpython-313.pyc +0 -0
  91. package/core/event_hub/bench_results/2026-02-28_13-26-48.json +0 -51
  92. package/core/event_hub/bench_results/2026-02-28_13-44-45.json +0 -51
  93. package/core/event_hub/bench_results/2026-02-28_13-45-39.json +0 -51
  94. package/core/launcher/__pycache__/__init__.cpython-313.pyc +0 -0
  95. package/core/launcher/__pycache__/entry.cpython-313.pyc +0 -0
  96. package/core/launcher/__pycache__/module_scanner.cpython-313.pyc +0 -0
  97. package/core/launcher/__pycache__/process_manager.cpython-313.pyc +0 -0
  98. package/core/launcher/data/log/lifecycle.jsonl +0 -1158
  99. package/core/launcher/data/token.txt +0 -1
  100. package/core/registry/__pycache__/__init__.cpython-313.pyc +0 -0
  101. package/core/registry/__pycache__/entry.cpython-313.pyc +0 -0
  102. package/core/registry/__pycache__/server.cpython-313.pyc +0 -0
  103. package/core/registry/__pycache__/store.cpython-313.pyc +0 -0
  104. package/core/registry/data/port.txt +0 -1
  105. package/core/registry/data/port_484.txt +0 -1
  106. package/extensions/__pycache__/__init__.cpython-313.pyc +0 -0
  107. package/extensions/services/__pycache__/__init__.cpython-313.pyc +0 -0
  108. package/extensions/services/watchdog/__pycache__/__init__.cpython-313.pyc +0 -0
  109. package/extensions/services/watchdog/__pycache__/entry.cpython-313.pyc +0 -0
  110. package/extensions/services/watchdog/__pycache__/monitor.cpython-313.pyc +0 -0
  111. package/extensions/services/watchdog/__pycache__/server.cpython-313.pyc +0 -0
  112. /package/{core/event_hub/bench_results/.gitkeep → extensions/services/web/vendor/bluetooth/__init__.py} +0 -0
@@ -0,0 +1,507 @@
1
+ """File-based storage engine using JSONL and Markdown."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ import uuid
7
+ from collections import defaultdict
8
+ from datetime import datetime, timedelta, timezone
9
+ from pathlib import Path
10
+ from typing import Any
11
+
12
+ import aiofiles
13
+ import aiofiles.os
14
+
15
+ from .. import config as cfg
16
+
17
+
18
+ def _data_path(*parts: str) -> Path:
19
+ return cfg.data_dir().joinpath(*parts)
20
+
21
+
22
+ def _now_iso() -> str:
23
+ return datetime.now(timezone.utc).isoformat()
24
+
25
+
26
+ # ---------------------------------------------------------------------------
27
+ # Generic JSONL helpers
28
+ # ---------------------------------------------------------------------------
29
+
30
+ async def append_jsonl(filepath: Path, record: dict[str, Any]) -> None:
31
+ """Append a single JSON line to *filepath*."""
32
+ filepath.parent.mkdir(parents=True, exist_ok=True)
33
+ async with aiofiles.open(filepath, "a", encoding="utf-8") as f:
34
+ await f.write(json.dumps(record, ensure_ascii=False) + "\n")
35
+
36
+
37
+ async def read_jsonl(filepath: Path) -> list[dict[str, Any]]:
38
+ """Read all lines from a JSONL file, returning a list of dicts."""
39
+ if not filepath.exists():
40
+ return []
41
+ records: list[dict[str, Any]] = []
42
+ async with aiofiles.open(filepath, "r", encoding="utf-8") as f:
43
+ async for line in f:
44
+ line = line.strip()
45
+ if line:
46
+ try:
47
+ records.append(json.loads(line))
48
+ except json.JSONDecodeError:
49
+ continue
50
+ return records
51
+
52
+
53
+ async def read_jsonl_paginated(
54
+ filepath: Path,
55
+ page: int = 1,
56
+ page_size: int = 20,
57
+ filter_fn=None,
58
+ ) -> tuple[list[dict[str, Any]], int]:
59
+ """Read JSONL with optional filter and pagination.
60
+
61
+ Returns (page_records, total_matching).
62
+ """
63
+ all_records = await read_jsonl(filepath)
64
+ if filter_fn:
65
+ all_records = [r for r in all_records if filter_fn(r)]
66
+ # newest first
67
+ all_records.reverse()
68
+ total = len(all_records)
69
+ start = (page - 1) * page_size
70
+ end = start + page_size
71
+ return all_records[start:end], total
72
+
73
+
74
+ async def update_jsonl_record(
75
+ filepath: Path, match_key: str, match_value: Any, updates: dict[str, Any]
76
+ ) -> bool:
77
+ """Update first record in JSONL where record[match_key] == match_value."""
78
+ if not filepath.exists():
79
+ return False
80
+ records = await read_jsonl(filepath)
81
+ found = False
82
+ for rec in records:
83
+ if rec.get(match_key) == match_value:
84
+ rec.update(updates)
85
+ found = True
86
+ break
87
+ if found:
88
+ async with aiofiles.open(filepath, "w", encoding="utf-8") as f:
89
+ for rec in records:
90
+ await f.write(json.dumps(rec, ensure_ascii=False) + "\n")
91
+ return found
92
+
93
+
94
+ async def delete_jsonl_record(filepath: Path, match_key: str, match_value: Any) -> bool:
95
+ """Delete first record where record[match_key] == match_value."""
96
+ if not filepath.exists():
97
+ return False
98
+ records = await read_jsonl(filepath)
99
+ new_records = [r for r in records if r.get(match_key) != match_value]
100
+ if len(new_records) == len(records):
101
+ return False
102
+ async with aiofiles.open(filepath, "w", encoding="utf-8") as f:
103
+ for rec in new_records:
104
+ await f.write(json.dumps(rec, ensure_ascii=False) + "\n")
105
+ return True
106
+
107
+
108
+ # ---------------------------------------------------------------------------
109
+ # Markdown helpers
110
+ # ---------------------------------------------------------------------------
111
+
112
+ async def write_md(filepath: Path, content: str) -> None:
113
+ filepath.parent.mkdir(parents=True, exist_ok=True)
114
+ async with aiofiles.open(filepath, "w", encoding="utf-8") as f:
115
+ await f.write(content)
116
+
117
+
118
+ async def read_md(filepath: Path) -> str | None:
119
+ if not filepath.exists():
120
+ return None
121
+ async with aiofiles.open(filepath, "r", encoding="utf-8") as f:
122
+ return await f.read()
123
+
124
+
125
+ # ---------------------------------------------------------------------------
126
+ # Domain-specific storage
127
+ # ---------------------------------------------------------------------------
128
+
129
+ # --- Contacts ---
130
+
131
+ def contacts_path() -> Path:
132
+ return _data_path("contacts", "contacts.jsonl")
133
+
134
+
135
+ async def list_contacts(page: int = 1, page_size: int = 50, query: str | None = None):
136
+ def _filter(r):
137
+ if not query:
138
+ return True
139
+ q = query.lower()
140
+ return (
141
+ q in r.get("name", "").lower()
142
+ or q in r.get("phone", "").lower()
143
+ or q in r.get("company", "").lower()
144
+ or q in r.get("notes", "").lower()
145
+ or any(q in t.lower() for t in r.get("tags", []))
146
+ )
147
+ return await read_jsonl_paginated(contacts_path(), page, page_size, _filter)
148
+
149
+
150
+ async def get_contact(contact_id: str) -> dict[str, Any] | None:
151
+ records = await read_jsonl(contacts_path())
152
+ for r in records:
153
+ if r.get("id") == contact_id:
154
+ return r
155
+ return None
156
+
157
+
158
+ async def find_contact_by_phone(phone: str) -> dict[str, Any] | None:
159
+ records = await read_jsonl(contacts_path())
160
+ for r in records:
161
+ if r.get("phone") == phone:
162
+ return r
163
+ return None
164
+
165
+
166
+ async def search_contacts(query: str) -> list[dict[str, Any]]:
167
+ records = await read_jsonl(contacts_path())
168
+ q = query.lower()
169
+ return [
170
+ r for r in records
171
+ if q in r.get("name", "").lower()
172
+ or q in r.get("phone", "").lower()
173
+ or q in r.get("company", "").lower()
174
+ or q in r.get("notes", "").lower()
175
+ ]
176
+
177
+
178
+ async def add_contact(contact: dict[str, Any]) -> dict[str, Any]:
179
+ if "id" not in contact:
180
+ contact["id"] = str(uuid.uuid4())
181
+ contact.setdefault("created_at", _now_iso())
182
+ contact.setdefault("updated_at", _now_iso())
183
+ contact.setdefault("source", "manual")
184
+ contact.setdefault("tags", [])
185
+ await append_jsonl(contacts_path(), contact)
186
+ return contact
187
+
188
+
189
+ async def update_contact(contact_id: str, updates: dict[str, Any]) -> bool:
190
+ updates["updated_at"] = _now_iso()
191
+ return await update_jsonl_record(contacts_path(), "id", contact_id, updates)
192
+
193
+
194
+ async def delete_contact(contact_id: str) -> bool:
195
+ return await delete_jsonl_record(contacts_path(), "id", contact_id)
196
+
197
+
198
+ # --- SMS ---
199
+
200
+ def sms_path() -> Path:
201
+ return _data_path("sms", "messages.jsonl")
202
+
203
+
204
+ async def save_sms(record: dict[str, Any]) -> dict[str, Any]:
205
+ if "id" not in record:
206
+ record["id"] = str(uuid.uuid4())
207
+ record.setdefault("timestamp", _now_iso())
208
+ await append_jsonl(sms_path(), record)
209
+ return record
210
+
211
+
212
+ async def list_sms(
213
+ page: int = 1, page_size: int = 20, phone_number: str | None = None
214
+ ):
215
+ def _filter(r):
216
+ if phone_number and r.get("phone_number") != phone_number:
217
+ return False
218
+ return True
219
+ return await read_jsonl_paginated(sms_path(), page, page_size, _filter)
220
+
221
+
222
+ # --- Tasks ---
223
+
224
+ def tasks_path() -> Path:
225
+ return _data_path("tasks", "tasks.jsonl")
226
+
227
+
228
+ async def save_task(record: dict[str, Any]) -> None:
229
+ await append_jsonl(tasks_path(), record)
230
+
231
+
232
+ async def update_task(task_id: str, updates: dict[str, Any]) -> None:
233
+ await update_jsonl_record(tasks_path(), "task_id", task_id, updates)
234
+
235
+
236
+ async def get_task(task_id: str) -> dict[str, Any] | None:
237
+ records = await read_jsonl(tasks_path())
238
+ for r in records:
239
+ if r.get("task_id") == task_id:
240
+ return r
241
+ return None
242
+
243
+
244
+ async def list_tasks(
245
+ page: int = 1, page_size: int = 20, **filters
246
+ ) -> tuple[list[dict[str, Any]], int]:
247
+ def _filter(r):
248
+ for k, v in filters.items():
249
+ if v is not None and r.get(k) != v:
250
+ return False
251
+ return True
252
+ return await read_jsonl_paginated(tasks_path(), page, page_size, _filter)
253
+
254
+
255
+ # --- Dev Log ---
256
+
257
+ def devlog_path() -> Path:
258
+ return _data_path("devlog", "records.jsonl")
259
+
260
+
261
+ async def add_devlog(record: dict[str, Any]) -> dict[str, Any]:
262
+ if "id" not in record:
263
+ record["id"] = str(uuid.uuid4())
264
+ record.setdefault("created_at", _now_iso())
265
+ record.setdefault("status", "pending")
266
+ await append_jsonl(devlog_path(), record)
267
+ return record
268
+
269
+
270
+ async def list_devlog(
271
+ page: int = 1, page_size: int = 20, status: str | None = None
272
+ ) -> tuple[list[dict[str, Any]], int]:
273
+ def _filter(r):
274
+ if status and r.get("status") != status:
275
+ return False
276
+ return True
277
+ return await read_jsonl_paginated(devlog_path(), page, page_size, _filter)
278
+
279
+
280
+ async def get_devlog(record_id: str) -> dict[str, Any] | None:
281
+ records = await read_jsonl(devlog_path())
282
+ for r in records:
283
+ if r.get("id") == record_id:
284
+ return r
285
+ return None
286
+
287
+
288
+ async def update_devlog(record_id: str, updates: dict[str, Any]) -> bool:
289
+ updates["updated_at"] = _now_iso()
290
+ # Auto-set completed_at when status changes to done
291
+ if updates.get("status") == "done":
292
+ updates.setdefault("completed_at", _now_iso())
293
+ elif updates.get("status") and updates["status"] != "done":
294
+ updates["completed_at"] = None
295
+ return await update_jsonl_record(devlog_path(), "id", record_id, updates)
296
+
297
+
298
+ async def delete_devlog(record_id: str) -> bool:
299
+ return await delete_jsonl_record(devlog_path(), "id", record_id)
300
+
301
+
302
+ def _devlog_archive_dir() -> Path:
303
+ return _data_path("devlog", "archive")
304
+
305
+
306
+ async def archive_devlog(cutoff_iso: str | None = None) -> dict[str, Any]:
307
+ """Archive done records completed before the given cutoff time.
308
+
309
+ If *cutoff_iso* is provided (ISO-8601 string), use it as the cutoff.
310
+ Otherwise default to the end of the day before yesterday (UTC).
311
+
312
+ For records without ``completed_at``, fall back to ``updated_at`` then
313
+ ``created_at`` so that old done records without an explicit completion
314
+ timestamp can still be archived.
315
+
316
+ Returns {archived_count, files, skipped_no_time, total_done, cutoff}.
317
+ """
318
+ records = await read_jsonl(devlog_path())
319
+ if not records:
320
+ return {"archived_count": 0, "files": [], "skipped_no_time": 0,
321
+ "total_done": 0, "cutoff": None, "message": "records.jsonl 为空"}
322
+
323
+ # Determine cutoff
324
+ if cutoff_iso:
325
+ try:
326
+ cutoff = datetime.fromisoformat(cutoff_iso.replace("Z", "+00:00"))
327
+ except (ValueError, TypeError):
328
+ return {"archived_count": 0, "files": [],
329
+ "message": f"无效的时间格式: {cutoff_iso}"}
330
+ else:
331
+ now = datetime.now(timezone.utc)
332
+ cutoff = (now - timedelta(days=2)).replace(
333
+ hour=23, minute=59, second=59, microsecond=999999
334
+ )
335
+
336
+ to_archive: list[dict[str, Any]] = []
337
+ to_keep: list[dict[str, Any]] = []
338
+ skipped_no_time = 0
339
+ total_done = 0
340
+
341
+ for rec in records:
342
+ if rec.get("status") == "done":
343
+ total_done += 1
344
+ # Try completed_at → updated_at → created_at
345
+ time_str = (rec.get("completed_at")
346
+ or rec.get("updated_at")
347
+ or rec.get("created_at"))
348
+ if not time_str:
349
+ skipped_no_time += 1
350
+ to_keep.append(rec)
351
+ continue
352
+ try:
353
+ t = datetime.fromisoformat(time_str.replace("Z", "+00:00"))
354
+ if t <= cutoff:
355
+ # Ensure completed_at is set for archived records
356
+ if not rec.get("completed_at"):
357
+ rec["completed_at"] = time_str
358
+ to_archive.append(rec)
359
+ continue
360
+ except (ValueError, TypeError):
361
+ skipped_no_time += 1
362
+ to_keep.append(rec)
363
+
364
+ if not to_archive:
365
+ return {
366
+ "archived_count": 0,
367
+ "files": [],
368
+ "skipped_no_time": skipped_no_time,
369
+ "total_done": total_done,
370
+ "cutoff": cutoff.isoformat(),
371
+ "message": f"没有符合条件的记录(共 {total_done} 条已完成,截止时间 {cutoff.isoformat()})",
372
+ }
373
+
374
+ # Group by date (YYYYMMDD based on completed_at)
375
+ grouped: dict[str, list[dict[str, Any]]] = defaultdict(list)
376
+ for rec in to_archive:
377
+ try:
378
+ completed = datetime.fromisoformat(
379
+ rec["completed_at"].replace("Z", "+00:00")
380
+ )
381
+ date_str = completed.strftime("%Y%m%d")
382
+ except (ValueError, TypeError):
383
+ date_str = "unknown"
384
+ grouped[date_str].append(rec)
385
+
386
+ archive_dir = _devlog_archive_dir()
387
+ archive_dir.mkdir(parents=True, exist_ok=True)
388
+
389
+ written_files: list[str] = []
390
+ for date_str, recs in sorted(grouped.items()):
391
+ filepath = archive_dir / f"{date_str}.jsonl"
392
+ # Append to existing archive file (in case of repeated archiving)
393
+ async with aiofiles.open(filepath, "a", encoding="utf-8") as f:
394
+ for rec in recs:
395
+ await f.write(json.dumps(rec, ensure_ascii=False) + "\n")
396
+ written_files.append(f"{date_str}.jsonl")
397
+
398
+ # Rewrite records.jsonl with only kept records
399
+ async with aiofiles.open(devlog_path(), "w", encoding="utf-8") as f:
400
+ for rec in to_keep:
401
+ await f.write(json.dumps(rec, ensure_ascii=False) + "\n")
402
+
403
+ await _save_archive_meta(cutoff.isoformat())
404
+
405
+ return {"archived_count": len(to_archive), "files": written_files,
406
+ "cutoff": cutoff.isoformat(), "total_done": total_done}
407
+
408
+
409
+ def _devlog_archive_meta_path() -> Path:
410
+ return _data_path("devlog", "archive", "meta.json")
411
+
412
+
413
+ async def _save_archive_meta(cutoff_iso: str) -> None:
414
+ """Save last archive cutoff time."""
415
+ meta_path = _devlog_archive_meta_path()
416
+ meta_path.parent.mkdir(parents=True, exist_ok=True)
417
+ async with aiofiles.open(meta_path, "w", encoding="utf-8") as f:
418
+ await f.write(json.dumps({"last_cutoff": cutoff_iso}, ensure_ascii=False))
419
+
420
+
421
+ async def get_last_archive_cutoff() -> str | None:
422
+ """Return the last archive cutoff ISO string, or None if never archived."""
423
+ meta_path = _devlog_archive_meta_path()
424
+ if not meta_path.exists():
425
+ return None
426
+ try:
427
+ async with aiofiles.open(meta_path, "r", encoding="utf-8") as f:
428
+ data = json.loads(await f.read())
429
+ return data.get("last_cutoff")
430
+ except (json.JSONDecodeError, OSError):
431
+ return None
432
+
433
+
434
+ async def list_devlog_archives() -> list[str]:
435
+ """List archive date strings (without extension), sorted newest first."""
436
+ archive_dir = _devlog_archive_dir()
437
+ if not archive_dir.exists():
438
+ return []
439
+ files = sorted(
440
+ (f.stem for f in archive_dir.iterdir() if f.suffix == ".jsonl"),
441
+ reverse=True,
442
+ )
443
+ return files
444
+
445
+
446
+ async def read_devlog_archive(date_str: str) -> list[dict[str, Any]]:
447
+ """Read all records from a specific archive file."""
448
+ filepath = _devlog_archive_dir() / f"{date_str}.jsonl"
449
+ return await read_jsonl(filepath)
450
+
451
+
452
+ # --- Stats ---
453
+
454
+ async def get_stats() -> dict[str, Any]:
455
+ from datetime import timedelta
456
+
457
+ all_tasks = await read_jsonl(tasks_path())
458
+ # Only count tasks that became actual calls (have a direction field)
459
+ calls = [t for t in all_tasks if t.get("direction") is not None]
460
+ sms_records = await read_jsonl(sms_path())
461
+ contacts_list = await read_jsonl(contacts_path())
462
+
463
+ now = datetime.now(timezone.utc)
464
+ today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
465
+ week_start = today_start - timedelta(days=today_start.weekday())
466
+
467
+ total_duration = sum(c.get("duration_seconds", 0) for c in calls)
468
+
469
+ calls_today = 0
470
+ calls_this_week = 0
471
+ for c in calls:
472
+ started = c.get("started_at", "")
473
+ if started:
474
+ try:
475
+ t = datetime.fromisoformat(started.replace("Z", "+00:00"))
476
+ if t >= today_start:
477
+ calls_today += 1
478
+ if t >= week_start:
479
+ calls_this_week += 1
480
+ except (ValueError, TypeError):
481
+ pass
482
+
483
+ calls_by_result: dict[str, int] = {}
484
+ for c in calls:
485
+ r = c.get("result", "unknown")
486
+ calls_by_result[r] = calls_by_result.get(r, 0) + 1
487
+
488
+ calls_by_direction: dict[str, int] = {}
489
+ for c in calls:
490
+ d = c.get("direction", "unknown")
491
+ calls_by_direction[d] = calls_by_direction.get(d, 0) + 1
492
+
493
+ sms_sent = sum(1 for s in sms_records if s.get("direction") == "outgoing")
494
+ sms_received = sum(1 for s in sms_records if s.get("direction") == "incoming")
495
+
496
+ return {
497
+ "total_calls": len(calls),
498
+ "total_duration_seconds": total_duration,
499
+ "avg_duration_seconds": round(total_duration / max(len(calls), 1), 1),
500
+ "calls_today": calls_today,
501
+ "calls_this_week": calls_this_week,
502
+ "calls_by_result": calls_by_result,
503
+ "calls_by_direction": calls_by_direction,
504
+ "total_sms_sent": sms_sent,
505
+ "total_sms_received": sms_received,
506
+ "total_contacts": len(contacts_list),
507
+ }