duh-cli 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. duh/__init__.py +6 -0
  2. duh/__main__.py +5 -0
  3. duh/adapters/__init__.py +6 -0
  4. duh/adapters/anthropic.py +324 -0
  5. duh/adapters/approvers.py +88 -0
  6. duh/adapters/file_store.py +151 -0
  7. duh/adapters/mcp_executor.py +314 -0
  8. duh/adapters/memory_store.py +323 -0
  9. duh/adapters/native_executor.py +147 -0
  10. duh/adapters/ollama.py +359 -0
  11. duh/adapters/openai.py +309 -0
  12. duh/adapters/renderers.py +165 -0
  13. duh/adapters/simple_compactor.py +362 -0
  14. duh/adapters/structured_logging.py +169 -0
  15. duh/agents.py +219 -0
  16. duh/cli/__init__.py +5 -0
  17. duh/cli/doctor.py +144 -0
  18. duh/cli/main.py +63 -0
  19. duh/cli/ndjson.py +37 -0
  20. duh/cli/parser.py +81 -0
  21. duh/cli/repl.py +1178 -0
  22. duh/cli/runner.py +474 -0
  23. duh/cli/sdk_runner.py +321 -0
  24. duh/config.py +306 -0
  25. duh/hooks.py +382 -0
  26. duh/kernel/__init__.py +27 -0
  27. duh/kernel/backoff.py +167 -0
  28. duh/kernel/deps.py +56 -0
  29. duh/kernel/engine.py +344 -0
  30. duh/kernel/file_tracker.py +153 -0
  31. duh/kernel/git_context.py +142 -0
  32. duh/kernel/health_check.py +150 -0
  33. duh/kernel/job_queue.py +132 -0
  34. duh/kernel/loop.py +225 -0
  35. duh/kernel/memory.py +118 -0
  36. duh/kernel/messages.py +112 -0
  37. duh/kernel/plan_mode.py +214 -0
  38. duh/kernel/skill.py +314 -0
  39. duh/kernel/tasks.py +104 -0
  40. duh/kernel/templates.py +205 -0
  41. duh/kernel/tokens.py +162 -0
  42. duh/kernel/tool.py +126 -0
  43. duh/kernel/undo.py +103 -0
  44. duh/plugins.py +313 -0
  45. duh/ports/__init__.py +21 -0
  46. duh/ports/approver.py +26 -0
  47. duh/ports/context.py +26 -0
  48. duh/ports/executor.py +30 -0
  49. duh/ports/memory.py +57 -0
  50. duh/ports/provider.py +48 -0
  51. duh/ports/renderer.py +68 -0
  52. duh/ports/store.py +26 -0
  53. duh/tools/__init__.py +43 -0
  54. duh/tools/agent_tool.py +38 -0
  55. duh/tools/bash.py +247 -0
  56. duh/tools/bash_security.py +306 -0
  57. duh/tools/db_tool.py +283 -0
  58. duh/tools/docker_tool.py +223 -0
  59. duh/tools/edit.py +144 -0
  60. duh/tools/github_tool.py +255 -0
  61. duh/tools/glob_tool.py +68 -0
  62. duh/tools/grep.py +101 -0
  63. duh/tools/http_tool.py +195 -0
  64. duh/tools/lsp_tool.py +428 -0
  65. duh/tools/mcp_tool.py +76 -0
  66. duh/tools/memory_tool.py +150 -0
  67. duh/tools/multi_edit.py +173 -0
  68. duh/tools/notebook_edit.py +243 -0
  69. duh/tools/read.py +180 -0
  70. duh/tools/registry.py +205 -0
  71. duh/tools/skill_tool.py +103 -0
  72. duh/tools/task_tool.py +140 -0
  73. duh/tools/test_impact.py +237 -0
  74. duh/tools/tool_search.py +206 -0
  75. duh/tools/web_fetch.py +142 -0
  76. duh/tools/web_search.py +124 -0
  77. duh/tools/worktree.py +237 -0
  78. duh/tools/write.py +82 -0
  79. duh_cli-0.2.0.dist-info/METADATA +249 -0
  80. duh_cli-0.2.0.dist-info/RECORD +83 -0
  81. duh_cli-0.2.0.dist-info/WHEEL +4 -0
  82. duh_cli-0.2.0.dist-info/entry_points.txt +2 -0
  83. duh_cli-0.2.0.dist-info/licenses/LICENSE +15 -0
duh/__init__.py ADDED
@@ -0,0 +1,6 @@
1
+ """D.U.H. — Duh is a Universal Harness.
2
+
3
+ Because connecting AI to your codebase should be obvious.
4
+ """
5
+
6
+ __version__ = "0.2.0"
duh/__main__.py ADDED
@@ -0,0 +1,5 @@
1
+ """Allow running duh as `python -m duh`."""
2
+
3
+ from duh.cli.main import main
4
+
5
+ raise SystemExit(main())
@@ -0,0 +1,6 @@
1
+ """Adapters — concrete implementations of ports.
2
+
3
+ Each adapter wraps an external SDK/service and translates it into
4
+ D.U.H.'s uniform interface. The kernel never imports these directly;
5
+ they're injected via Deps.
6
+ """
@@ -0,0 +1,324 @@
1
+ """Anthropic adapter — wraps the anthropic Python SDK into D.U.H. events.
2
+
3
+ This adapter translates between:
4
+ - D.U.H. Messages → Anthropic API format (role/content dicts)
5
+ - Anthropic streaming events → D.U.H. uniform events
6
+ - Anthropic tool schemas → D.U.H. tool format
7
+
8
+ Usage:
9
+ from duh.adapters.anthropic import AnthropicProvider
10
+ provider = AnthropicProvider(api_key="sk-ant-...")
11
+ deps = Deps(call_model=provider.stream)
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ import asyncio
17
+ import os
18
+ from typing import Any, AsyncGenerator
19
+
20
+ import httpx
21
+
22
+ from duh.kernel.backoff import with_backoff
23
+ from duh.kernel.messages import Message
24
+
25
+
26
+ class AnthropicProvider:
27
+ """Wraps the Anthropic Python SDK to produce D.U.H. uniform events.
28
+
29
+ Implements the ModelProvider port contract.
30
+ """
31
+
32
+ def __init__(
33
+ self,
34
+ api_key: str | None = None,
35
+ model: str = "claude-sonnet-4-6",
36
+ max_retries: int = 2,
37
+ timeout: float = 600.0,
38
+ base_url: str | None = None,
39
+ ):
40
+ import anthropic
41
+
42
+ self._default_model = model
43
+ self._client = anthropic.AsyncAnthropic(
44
+ api_key=api_key or os.environ.get("ANTHROPIC_API_KEY", ""),
45
+ max_retries=max_retries,
46
+ timeout=timeout,
47
+ **({"base_url": base_url} if base_url else {}),
48
+ )
49
+
50
+ async def stream(
51
+ self,
52
+ *,
53
+ messages: list[Any],
54
+ system_prompt: str | list[str] = "",
55
+ model: str = "",
56
+ tools: list[Any] | None = None,
57
+ thinking: dict[str, Any] | None = None,
58
+ max_tokens: int | None = None,
59
+ tool_choice: str | dict[str, Any] | None = None,
60
+ **kwargs: Any,
61
+ ) -> AsyncGenerator[dict[str, Any], None]:
62
+ """Stream model responses, yielding D.U.H. uniform events."""
63
+ resolved_model = model or self._default_model
64
+ resolved_max_tokens = max_tokens or _default_max_tokens(resolved_model)
65
+
66
+ # Build API params
67
+ api_messages = _to_api_messages(messages)
68
+ params: dict[str, Any] = {
69
+ "model": resolved_model,
70
+ "max_tokens": resolved_max_tokens,
71
+ "messages": api_messages,
72
+ }
73
+
74
+ # System prompt
75
+ system_text = _build_system_text(system_prompt)
76
+ if system_text:
77
+ params["system"] = system_text
78
+
79
+ # Tools
80
+ if tools:
81
+ params["tools"] = _to_api_tools(tools)
82
+
83
+ # Thinking
84
+ if thinking:
85
+ thinking_type = thinking.get("type", "disabled")
86
+ if thinking_type in ("adaptive", "enabled"):
87
+ supports_adaptive = any(
88
+ tag in resolved_model
89
+ for tag in ("opus-4-6", "sonnet-4-6")
90
+ )
91
+ if supports_adaptive:
92
+ params["thinking"] = {"type": "adaptive"}
93
+ elif thinking_type == "enabled":
94
+ budget = thinking.get("budget_tokens", resolved_max_tokens - 1)
95
+ params["thinking"] = {"type": "enabled", "budget_tokens": budget}
96
+
97
+ # Tool choice — Anthropic supports natively
98
+ if tool_choice and tools:
99
+ if isinstance(tool_choice, dict):
100
+ params["tool_choice"] = tool_choice
101
+ elif tool_choice == "none":
102
+ # Don't send tools at all — simplest way to prevent tool use
103
+ del params["tools"]
104
+ elif tool_choice == "auto":
105
+ params["tool_choice"] = {"type": "auto"}
106
+ elif tool_choice == "any":
107
+ params["tool_choice"] = {"type": "any"}
108
+ else:
109
+ # Assume it's a tool name — force that specific tool
110
+ params["tool_choice"] = {"type": "tool", "name": tool_choice}
111
+
112
+ # Stream with exponential backoff for transient errors
113
+ content_blocks: list[Any] = []
114
+ accumulated_text: list[str] = []
115
+ usage: dict[str, int] = {}
116
+
117
+ async def _do_stream() -> AsyncGenerator[dict[str, Any], None]:
118
+ nonlocal content_blocks, accumulated_text, usage
119
+ # Reset accumulators on each retry attempt
120
+ content_blocks = []
121
+ accumulated_text = []
122
+ usage = {}
123
+
124
+ async with self._client.messages.stream(**params) as stream:
125
+ try:
126
+ async for event in stream:
127
+ event_type = getattr(event, "type", "")
128
+
129
+ if event_type == "content_block_start":
130
+ block = getattr(event, "content_block", None)
131
+ if block:
132
+ content_blocks.append(block)
133
+ yield {
134
+ "type": "content_block_start",
135
+ "index": getattr(event, "index", len(content_blocks) - 1),
136
+ "content_block": _block_to_dict(block) if block else {},
137
+ }
138
+
139
+ elif event_type == "content_block_delta":
140
+ delta = getattr(event, "delta", None)
141
+ if delta:
142
+ delta_type = getattr(delta, "type", "")
143
+ if delta_type == "text_delta":
144
+ text = getattr(delta, "text", "")
145
+ accumulated_text.append(text)
146
+ yield {"type": "text_delta", "text": text}
147
+ elif delta_type == "thinking_delta":
148
+ yield {"type": "thinking_delta", "text": getattr(delta, "thinking", "")}
149
+ elif delta_type == "input_json_delta":
150
+ yield {"type": "input_json_delta", "partial_json": getattr(delta, "partial_json", "")}
151
+ elif delta_type == "signature_delta":
152
+ pass # Ignore signature deltas
153
+
154
+ elif event_type == "content_block_stop":
155
+ yield {
156
+ "type": "content_block_stop",
157
+ "index": getattr(event, "index", 0),
158
+ }
159
+
160
+ elif event_type == "message_start":
161
+ msg = getattr(event, "message", None)
162
+ if msg:
163
+ msg_usage = getattr(msg, "usage", None)
164
+ if msg_usage:
165
+ usage = {
166
+ "input_tokens": getattr(msg_usage, "input_tokens", 0),
167
+ "output_tokens": getattr(msg_usage, "output_tokens", 0),
168
+ }
169
+
170
+ elif event_type == "message_delta":
171
+ delta_usage = getattr(event, "usage", None)
172
+ if delta_usage:
173
+ usage["output_tokens"] = getattr(delta_usage, "output_tokens", 0)
174
+
175
+ except (ConnectionError, httpx.ReadError, asyncio.TimeoutError) as mid_err:
176
+ # Mid-stream error — yield partial content if we have any
177
+ partial_text = "".join(accumulated_text)
178
+ if partial_text:
179
+ yield {
180
+ "type": "assistant",
181
+ "message": Message(
182
+ role="assistant",
183
+ content=[{"type": "text", "text": partial_text}],
184
+ metadata={
185
+ "partial": True,
186
+ "model": resolved_model,
187
+ "stop_reason": "error",
188
+ "usage": usage,
189
+ },
190
+ ),
191
+ }
192
+ yield {"type": "error", "error": f"Stream interrupted: {mid_err}"}
193
+ return
194
+
195
+ # Build final assistant message
196
+ final = await stream.get_final_message()
197
+ content = _normalize_content(list(final.content)) if final else []
198
+
199
+ assistant_msg = Message(
200
+ role="assistant",
201
+ content=content,
202
+ id=getattr(final, "id", ""),
203
+ metadata={
204
+ "model": getattr(final, "model", resolved_model),
205
+ "stop_reason": getattr(final, "stop_reason", "end_turn"),
206
+ "usage": usage,
207
+ },
208
+ )
209
+ yield {"type": "assistant", "message": assistant_msg}
210
+
211
+ try:
212
+ async for event in with_backoff(_do_stream):
213
+ yield event
214
+ except Exception as e:
215
+ error_text = str(e)
216
+ # Yield error as an assistant message with error content
217
+ yield {
218
+ "type": "assistant",
219
+ "message": Message(
220
+ role="assistant",
221
+ content=[{"type": "text", "text": f"API Error: {error_text}"}],
222
+ metadata={"is_error": True, "error": error_text},
223
+ ),
224
+ }
225
+
226
+
227
+ # ---------------------------------------------------------------------------
228
+ # Translation helpers
229
+ # ---------------------------------------------------------------------------
230
+
231
+ def _to_api_messages(messages: list[Any]) -> list[dict[str, Any]]:
232
+ """Translate D.U.H. Messages → Anthropic API format."""
233
+ result = []
234
+ for msg in messages:
235
+ if isinstance(msg, Message):
236
+ content = msg.content
237
+ if isinstance(content, list):
238
+ # Convert dataclass blocks to dicts
239
+ api_content = []
240
+ for block in content:
241
+ if isinstance(block, dict):
242
+ # Strip to API-allowed fields per block type
243
+ api_content.append(_sanitize_block(block))
244
+ elif hasattr(block, "__dataclass_fields__"):
245
+ from dataclasses import asdict
246
+ api_content.append(_sanitize_block(asdict(block)))
247
+ else:
248
+ api_content.append({"type": "text", "text": str(block)})
249
+ result.append({"role": msg.role, "content": api_content})
250
+ else:
251
+ result.append({"role": msg.role, "content": str(content)})
252
+ elif isinstance(msg, dict):
253
+ result.append({"role": msg.get("role", "user"), "content": msg.get("content", "")})
254
+ else:
255
+ result.append({"role": "user", "content": str(msg)})
256
+ return result
257
+
258
+
259
+ def _sanitize_block(block: dict[str, Any]) -> dict[str, Any]:
260
+ """Strip non-API fields from content blocks."""
261
+ ALLOWED = {
262
+ "text": {"type", "text"},
263
+ "tool_use": {"type", "id", "name", "input"},
264
+ "tool_result": {"type", "tool_use_id", "content", "is_error"},
265
+ "thinking": {"type", "thinking", "signature"},
266
+ }
267
+ bt = block.get("type", "")
268
+ allowed = ALLOWED.get(bt)
269
+ if allowed:
270
+ return {k: v for k, v in block.items() if k in allowed}
271
+ return block
272
+
273
+
274
+ def _to_api_tools(tools: list[Any]) -> list[dict[str, Any]]:
275
+ """Translate D.U.H. Tool objects → Anthropic API tool schemas."""
276
+ result = []
277
+ for tool in tools:
278
+ if isinstance(tool, dict):
279
+ result.append(tool)
280
+ elif hasattr(tool, "name") and hasattr(tool, "input_schema"):
281
+ desc = getattr(tool, "description", "")
282
+ if callable(desc):
283
+ desc = desc()
284
+ result.append({
285
+ "name": tool.name,
286
+ "description": str(desc) if desc else "",
287
+ "input_schema": tool.input_schema,
288
+ })
289
+ return result
290
+
291
+
292
+ def _build_system_text(system_prompt: str | list[str]) -> str:
293
+ """Build system prompt text."""
294
+ if isinstance(system_prompt, list):
295
+ return "\n\n".join(p for p in system_prompt if p)
296
+ return system_prompt
297
+
298
+
299
+ def _default_max_tokens(model: str) -> int:
300
+ """Get default max tokens for a model."""
301
+ if "opus" in model:
302
+ return 16384
303
+ if "haiku" in model:
304
+ return 8192
305
+ return 16384 # sonnet default
306
+
307
+
308
+ def _block_to_dict(block: Any) -> dict[str, Any]:
309
+ """Convert an SDK content block object to a dict."""
310
+ if isinstance(block, dict):
311
+ return block
312
+ if hasattr(block, "model_dump"):
313
+ return block.model_dump()
314
+ d: dict[str, Any] = {"type": getattr(block, "type", "unknown")}
315
+ for attr in ("text", "thinking", "id", "name", "input", "signature"):
316
+ val = getattr(block, attr, None)
317
+ if val is not None:
318
+ d[attr] = val
319
+ return d
320
+
321
+
322
+ def _normalize_content(blocks: list[Any]) -> list[dict[str, Any]]:
323
+ """Normalize SDK content blocks to dicts."""
324
+ return [_block_to_dict(b) for b in blocks]
@@ -0,0 +1,88 @@
1
+ """Approval gate adapters — permission checking implementations.
2
+
3
+ AutoApprover: allows everything (sandbox/bypass mode)
4
+ InteractiveApprover: asks the user y/n in the terminal
5
+ RuleApprover: deny rules from config (path restrictions, command blocklists)
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import sys
11
+ from typing import Any
12
+
13
+
14
+ class AutoApprover:
15
+ """Allows all tool calls without prompting. For sandboxed environments."""
16
+
17
+ async def check(self, tool_name: str, input: dict[str, Any]) -> dict[str, Any]:
18
+ return {"allowed": True}
19
+
20
+
21
+ class InteractiveApprover:
22
+ """Asks the user for permission before tool execution."""
23
+
24
+ def __init__(self, *, default_allow: bool = False):
25
+ self._default_allow = default_allow
26
+
27
+ async def check(self, tool_name: str, tool_input: dict[str, Any]) -> dict[str, Any]:
28
+ import builtins
29
+
30
+ # Format input summary
31
+ summary = ", ".join(f"{k}={v!r}" for k, v in list(tool_input.items())[:3])
32
+ if len(summary) > 120:
33
+ summary = summary[:117] + "..."
34
+
35
+ # Show prompt
36
+ sys.stderr.write(f"\n Tool: {tool_name}\n")
37
+ if summary:
38
+ sys.stderr.write(f" Input: {summary}\n")
39
+ sys.stderr.write(" Allow? [y/n] ")
40
+ sys.stderr.flush()
41
+
42
+ try:
43
+ response = builtins.input("").strip().lower()
44
+ except (EOFError, KeyboardInterrupt):
45
+ return {"allowed": False, "reason": "User cancelled"}
46
+
47
+ if response in ("y", "yes", ""):
48
+ return {"allowed": True}
49
+ return {"allowed": False, "reason": "User denied"}
50
+
51
+
52
+ class RuleApprover:
53
+ """Checks tool calls against configurable deny rules.
54
+
55
+ Rules can deny by tool name, by input patterns, or by path restrictions.
56
+ """
57
+
58
+ def __init__(
59
+ self,
60
+ *,
61
+ denied_tools: set[str] | None = None,
62
+ denied_commands: set[str] | None = None,
63
+ allowed_paths: list[str] | None = None,
64
+ ):
65
+ self._denied_tools = denied_tools or set()
66
+ self._denied_commands = denied_commands or set()
67
+ self._allowed_paths = allowed_paths
68
+
69
+ async def check(self, tool_name: str, input: dict[str, Any]) -> dict[str, Any]:
70
+ # Check denied tools
71
+ if tool_name in self._denied_tools:
72
+ return {"allowed": False, "reason": f"Tool '{tool_name}' is denied by policy"}
73
+
74
+ # Check denied commands (for Bash tool)
75
+ if tool_name == "Bash":
76
+ cmd = input.get("command", "")
77
+ for denied in self._denied_commands:
78
+ if denied in cmd:
79
+ return {"allowed": False, "reason": f"Command contains denied pattern: {denied}"}
80
+
81
+ # Check path restrictions
82
+ if self._allowed_paths is not None:
83
+ for key in ("path", "file_path"):
84
+ path = input.get(key)
85
+ if path and not any(path.startswith(p) for p in self._allowed_paths):
86
+ return {"allowed": False, "reason": f"Path '{path}' outside allowed directories"}
87
+
88
+ return {"allowed": True}
@@ -0,0 +1,151 @@
1
+ """FileStore adapter — JSONL-based session persistence.
2
+
3
+ Stores each session as a .jsonl file under ~/.config/duh/sessions/.
4
+ One JSON object per line = one message. Atomic writes via
5
+ temp-file-then-rename for thread safety.
6
+
7
+ store = FileStore()
8
+ await store.save("abc-123", messages)
9
+ history = await store.load("abc-123")
10
+ """
11
+
12
+ from __future__ import annotations
13
+
14
+ import json
15
+ import os
16
+ import tempfile
17
+ from dataclasses import asdict
18
+ from datetime import datetime, timezone
19
+ from pathlib import Path
20
+ from typing import Any
21
+
22
+ from duh.kernel.messages import Message
23
+
24
+
25
+ def _default_base_dir() -> Path:
26
+ return Path.home() / ".config" / "duh" / "sessions"
27
+
28
+
29
+ class FileStore:
30
+ """JSONL file-backed SessionStore implementation."""
31
+
32
+ def __init__(self, base_dir: Path | str | None = None):
33
+ self._base_dir = Path(base_dir) if base_dir else _default_base_dir()
34
+
35
+ def _session_path(self, session_id: str) -> Path:
36
+ return self._base_dir / f"{session_id}.jsonl"
37
+
38
+ def _ensure_dir(self) -> None:
39
+ self._base_dir.mkdir(parents=True, exist_ok=True)
40
+
41
+ # ------------------------------------------------------------------
42
+ # SessionStore protocol
43
+ # ------------------------------------------------------------------
44
+
45
+ async def save(self, session_id: str, messages: list[Any]) -> None:
46
+ """Append *new* messages to the session file.
47
+
48
+ Messages are serialised with ``dataclasses.asdict`` when they are
49
+ Message dataclass instances; plain dicts pass through as-is.
50
+ Writes are atomic: we write to a temporary file in the same
51
+ directory, then ``os.replace`` into the final path — so a crash
52
+ mid-write never corrupts existing data.
53
+ """
54
+ self._ensure_dir()
55
+ path = self._session_path(session_id)
56
+
57
+ # Read existing lines so we only *append* the delta.
58
+ existing_count = 0
59
+ if path.exists():
60
+ with open(path, "r", encoding="utf-8") as f:
61
+ existing_count = sum(1 for line in f if line.strip())
62
+
63
+ new_messages = messages[existing_count:]
64
+ if not new_messages:
65
+ return
66
+
67
+ lines: list[str] = []
68
+ for msg in new_messages:
69
+ if isinstance(msg, Message):
70
+ lines.append(json.dumps(asdict(msg), ensure_ascii=False))
71
+ else:
72
+ lines.append(json.dumps(msg, ensure_ascii=False))
73
+
74
+ # Atomic write: copy existing content + new lines → temp → rename.
75
+ fd, tmp_path = tempfile.mkstemp(
76
+ dir=str(self._base_dir), suffix=".tmp",
77
+ )
78
+ try:
79
+ with os.fdopen(fd, "w", encoding="utf-8") as tmp:
80
+ # Copy existing content
81
+ if path.exists():
82
+ with open(path, "r", encoding="utf-8") as orig:
83
+ tmp.write(orig.read())
84
+ # Append new lines
85
+ for line in lines:
86
+ tmp.write(line + "\n")
87
+ os.replace(tmp_path, str(path))
88
+ except BaseException:
89
+ # Clean up temp file on any error
90
+ try:
91
+ os.unlink(tmp_path)
92
+ except OSError:
93
+ pass
94
+ raise
95
+
96
+ async def load(self, session_id: str) -> list[dict[str, Any]] | None:
97
+ """Load messages for a session, returning dicts (not Message objects).
98
+
99
+ Returns ``None`` when the session file does not exist.
100
+ """
101
+ path = self._session_path(session_id)
102
+ if not path.exists():
103
+ return None
104
+
105
+ messages: list[dict[str, Any]] = []
106
+ with open(path, "r", encoding="utf-8") as f:
107
+ for line in f:
108
+ stripped = line.strip()
109
+ if stripped:
110
+ messages.append(json.loads(stripped))
111
+ return messages
112
+
113
+ async def list_sessions(self) -> list[dict[str, Any]]:
114
+ """Return metadata for every persisted session.
115
+
116
+ Each entry contains:
117
+ - ``session_id``
118
+ - ``created`` — ISO-8601 timestamp (file ctime)
119
+ - ``modified`` — ISO-8601 timestamp (file mtime)
120
+ - ``message_count``
121
+ """
122
+ if not self._base_dir.exists():
123
+ return []
124
+
125
+ sessions: list[dict[str, Any]] = []
126
+ for entry in sorted(self._base_dir.iterdir()):
127
+ if entry.suffix != ".jsonl" or not entry.is_file():
128
+ continue
129
+ stat = entry.stat()
130
+ with open(entry, "r", encoding="utf-8") as f:
131
+ count = sum(1 for line in f if line.strip())
132
+ sessions.append({
133
+ "session_id": entry.stem,
134
+ "created": datetime.fromtimestamp(
135
+ stat.st_birthtime if hasattr(stat, "st_birthtime") else stat.st_ctime,
136
+ tz=timezone.utc,
137
+ ).isoformat(),
138
+ "modified": datetime.fromtimestamp(
139
+ stat.st_mtime, tz=timezone.utc,
140
+ ).isoformat(),
141
+ "message_count": count,
142
+ })
143
+ return sessions
144
+
145
+ async def delete(self, session_id: str) -> bool:
146
+ """Delete a session file. Returns True if it existed."""
147
+ path = self._session_path(session_id)
148
+ if path.exists():
149
+ path.unlink()
150
+ return True
151
+ return False