daimon-sdk 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
daimon_sdk/__init__.py ADDED
@@ -0,0 +1,35 @@
1
+ from .client import DaimonClient
2
+ from .exceptions import (
3
+ DaimonConnectionError,
4
+ DaimonError,
5
+ DaimonProtocolError,
6
+ DaimonToolError,
7
+ )
8
+ from .models import (
9
+ BashResult,
10
+ EditResult,
11
+ ExecResult,
12
+ GlobResult,
13
+ GrepResult,
14
+ RuntimeContextResult,
15
+ SessionHandle,
16
+ WebFetchResult,
17
+ WriteResult,
18
+ )
19
+
20
+ __all__ = [
21
+ "BashResult",
22
+ "EditResult",
23
+ "ExecResult",
24
+ "GlobResult",
25
+ "GrepResult",
26
+ "DaimonClient",
27
+ "DaimonConnectionError",
28
+ "DaimonError",
29
+ "DaimonProtocolError",
30
+ "DaimonToolError",
31
+ "RuntimeContextResult",
32
+ "SessionHandle",
33
+ "WebFetchResult",
34
+ "WriteResult",
35
+ ]
@@ -0,0 +1,136 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from dataclasses import dataclass
5
+ from typing import Any
6
+
7
+ import httpx
8
+ from fastmcp import Client
9
+ from fastmcp.client.transports import StreamableHttpTransport
10
+
11
+ from .exceptions import DaimonConnectionError, DaimonProtocolError, DaimonToolError
12
+
13
+
14
+ @dataclass(slots=True)
15
+ class ToolCallEnvelope:
16
+ tool_name: str
17
+ payload: dict[str, Any]
18
+ content_blocks: list[dict[str, Any]]
19
+ raw_result: Any
20
+
21
+
22
+ def _content_block_to_dict(block: Any) -> dict[str, Any]:
23
+ if isinstance(block, dict):
24
+ return dict(block)
25
+ data: dict[str, Any] = {}
26
+ for key in ("type", "text", "data", "mimeType", "mime_type", "annotations"):
27
+ value = getattr(block, key, None)
28
+ if value is not None:
29
+ data[key] = value
30
+ if not data and hasattr(block, "model_dump"):
31
+ dumped = block.model_dump()
32
+ if isinstance(dumped, dict):
33
+ data = dumped
34
+ if not data and hasattr(block, "__dict__"):
35
+ data = {
36
+ key: value
37
+ for key, value in vars(block).items()
38
+ if not key.startswith("_")
39
+ }
40
+ return data
41
+
42
+
43
+ def decode_tool_result(result: Any) -> tuple[dict[str, Any], list[dict[str, Any]]]:
44
+ if isinstance(getattr(result, "structured_content", None), dict):
45
+ payload = dict(result.structured_content)
46
+ content = [_content_block_to_dict(block) for block in getattr(result, "content", []) or []]
47
+ return payload, content
48
+ if getattr(result, "data", None) is not None:
49
+ data = result.data
50
+ if isinstance(data, dict):
51
+ payload = dict(data)
52
+ elif isinstance(data, str):
53
+ try:
54
+ payload = json.loads(data)
55
+ except json.JSONDecodeError as exc:
56
+ raise DaimonProtocolError(f"tool response data was not valid JSON: {data}") from exc
57
+ else:
58
+ raise DaimonProtocolError(f"unsupported tool response data type: {type(data)!r}")
59
+ content = [_content_block_to_dict(block) for block in getattr(result, "content", []) or []]
60
+ return payload, content
61
+ content = getattr(result, "content", None) or []
62
+ if content:
63
+ text = getattr(content[0], "text", None)
64
+ if isinstance(text, str):
65
+ try:
66
+ payload = json.loads(text)
67
+ except json.JSONDecodeError as exc:
68
+ raise DaimonProtocolError(f"tool response text was not valid JSON: {text}") from exc
69
+ return payload, [_content_block_to_dict(block) for block in content]
70
+ raise DaimonProtocolError(f"unable to decode tool result: {result!r}")
71
+
72
+
73
+ class FastMCPTransportAdapter:
74
+ def __init__(self, base_url: str, *, access_token: str | None, timeout_s: float) -> None:
75
+ self.base_url = base_url
76
+ self.access_token = access_token
77
+ self.timeout_s = timeout_s
78
+ self._client: Client | None = None
79
+
80
+ @property
81
+ def client(self) -> Client:
82
+ if self._client is None:
83
+ raise DaimonConnectionError("client is not connected")
84
+ return self._client
85
+
86
+ async def connect(self) -> None:
87
+ if self._client is not None:
88
+ return
89
+ headers = {"X-Access-Token": self.access_token} if self.access_token else None
90
+ transport = StreamableHttpTransport(
91
+ self.base_url,
92
+ headers=headers,
93
+ httpx_client_factory=self._httpx_client_factory,
94
+ )
95
+ client = Client(transport, timeout=self.timeout_s)
96
+ try:
97
+ await client.__aenter__()
98
+ except Exception as exc: # pragma: no cover - fastmcp exception types vary
99
+ raise DaimonConnectionError(str(exc)) from exc
100
+ self._client = client
101
+
102
+ async def close(self) -> None:
103
+ if self._client is None:
104
+ return
105
+ try:
106
+ await self._client.__aexit__(None, None, None)
107
+ finally:
108
+ self._client = None
109
+
110
+ async def call_tool(
111
+ self,
112
+ tool_name: str,
113
+ arguments: dict[str, Any],
114
+ *,
115
+ raise_on_error: bool = True,
116
+ ) -> ToolCallEnvelope:
117
+ await self.connect()
118
+ try:
119
+ result = await self.client.call_tool(tool_name, arguments, raise_on_error=False)
120
+ except Exception as exc: # pragma: no cover - transport-side failures vary
121
+ raise DaimonConnectionError(str(exc)) from exc
122
+ payload, content_blocks = decode_tool_result(result)
123
+ if raise_on_error and isinstance(payload.get("error"), str):
124
+ raise DaimonToolError(payload["error"], tool_name=tool_name, payload=payload)
125
+ return ToolCallEnvelope(
126
+ tool_name=tool_name,
127
+ payload=payload,
128
+ content_blocks=content_blocks,
129
+ raw_result=result,
130
+ )
131
+
132
+ def _httpx_client_factory(self, **kwargs: Any) -> httpx.AsyncClient:
133
+ headers = dict(kwargs.pop("headers", {}) or {})
134
+ kwargs.setdefault("timeout", self.timeout_s)
135
+ kwargs.setdefault("follow_redirects", True)
136
+ return httpx.AsyncClient(headers=headers, **kwargs)
daimon_sdk/client.py ADDED
@@ -0,0 +1,437 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+
5
+ from ._transport import FastMCPTransportAdapter, ToolCallEnvelope
6
+ from .models import (
7
+ BashResult,
8
+ ContentBlock,
9
+ EditResult,
10
+ ExecResult,
11
+ GlobResult,
12
+ GrepResult,
13
+ ReadImageFile,
14
+ ReadPartsFile,
15
+ ReadResult,
16
+ ReadTextFile,
17
+ RuntimeContextResult,
18
+ SessionHandle,
19
+ WebFetchResult,
20
+ WriteResult,
21
+ )
22
+
23
+
24
+ def _content_blocks(envelope: ToolCallEnvelope) -> list[ContentBlock]:
25
+ return [ContentBlock.from_dict(block) for block in envelope.content_blocks]
26
+
27
+
28
+ def _int_or_none(value: Any) -> int | None:
29
+ if isinstance(value, bool):
30
+ return int(value)
31
+ if isinstance(value, int):
32
+ return value
33
+ if isinstance(value, str) and value.isdigit():
34
+ return int(value)
35
+ return None
36
+
37
+
38
+ class RawAPI:
39
+ def __init__(self, client: "DaimonClient") -> None:
40
+ self._client = client
41
+
42
+ async def call_tool(
43
+ self,
44
+ name: str,
45
+ arguments: dict[str, Any] | None = None,
46
+ *,
47
+ raise_on_error: bool = True,
48
+ ) -> dict[str, Any]:
49
+ envelope = await self._client._call_tool(
50
+ name,
51
+ arguments or {},
52
+ raise_on_error=raise_on_error,
53
+ )
54
+ return envelope.payload
55
+
56
+
57
+ class RuntimeAPI:
58
+ def __init__(self, client: "DaimonClient") -> None:
59
+ self._client = client
60
+
61
+ async def get_context(self) -> RuntimeContextResult:
62
+ envelope = await self._client._call_tool("GetRuntimeContext", {})
63
+ return RuntimeContextResult(payload=envelope.payload)
64
+
65
+
66
+ class FilesAPI:
67
+ def __init__(self, client: "DaimonClient") -> None:
68
+ self._client = client
69
+
70
+ async def read(
71
+ self,
72
+ file_path: str,
73
+ *,
74
+ offset: int | None = None,
75
+ limit: int | None = None,
76
+ pages: str | None = None,
77
+ ) -> ReadResult:
78
+ arguments: dict[str, Any] = {"file_path": file_path}
79
+ if offset is not None:
80
+ arguments["offset"] = offset
81
+ if limit is not None:
82
+ arguments["limit"] = limit
83
+ if pages is not None:
84
+ arguments["pages"] = pages
85
+ envelope = await self._client._call_tool("Read", arguments)
86
+ payload = envelope.payload
87
+ kind = payload.get("type")
88
+ file_data = payload.get("file") or {}
89
+ if kind == "text":
90
+ file_model = ReadTextFile(
91
+ file_path=file_data["filePath"],
92
+ content=file_data["content"],
93
+ num_lines=file_data["numLines"],
94
+ start_line=file_data["startLine"],
95
+ total_lines=file_data["totalLines"],
96
+ )
97
+ elif kind == "image":
98
+ file_model = ReadImageFile(
99
+ file_path=file_data["filePath"],
100
+ mime_type=file_data["mimeType"],
101
+ )
102
+ elif kind == "parts":
103
+ file_model = ReadPartsFile(
104
+ file_path=file_data["filePath"],
105
+ count=file_data["count"],
106
+ pages=file_data["pages"],
107
+ )
108
+ else:
109
+ raise ValueError(f"unsupported read result type: {kind!r}")
110
+ return ReadResult(
111
+ kind=kind,
112
+ file=file_model,
113
+ extra_content=_content_blocks(envelope),
114
+ raw_payload=payload,
115
+ )
116
+
117
+ async def edit(
118
+ self,
119
+ file_path: str,
120
+ *,
121
+ old_string: str,
122
+ new_string: str,
123
+ replace_all: bool = False,
124
+ ) -> EditResult:
125
+ envelope = await self._client._call_tool(
126
+ "Edit",
127
+ {
128
+ "file_path": file_path,
129
+ "old_string": old_string,
130
+ "new_string": new_string,
131
+ "replace_all": replace_all,
132
+ },
133
+ )
134
+ payload = envelope.payload
135
+ return EditResult(
136
+ file_path=payload["filePath"],
137
+ old_string=payload["oldString"],
138
+ new_string=payload["newString"],
139
+ original_file=payload["originalFile"],
140
+ structured_patch=payload["structuredPatch"],
141
+ user_modified=payload["userModified"],
142
+ replace_all=payload["replaceAll"],
143
+ git_diff=payload.get("gitDiff"),
144
+ raw_payload=payload,
145
+ )
146
+
147
+ async def write(self, file_path: str, content: str) -> WriteResult:
148
+ envelope = await self._client._call_tool(
149
+ "Write",
150
+ {"file_path": file_path, "content": content},
151
+ )
152
+ payload = envelope.payload
153
+ return WriteResult(
154
+ type=payload["type"],
155
+ file_path=payload["filePath"],
156
+ content=payload["content"],
157
+ structured_patch=payload["structuredPatch"],
158
+ original_file=payload.get("originalFile"),
159
+ git_diff=payload.get("gitDiff"),
160
+ raw_payload=payload,
161
+ )
162
+
163
+ async def glob(self, pattern: str, *, path: str | None = None) -> GlobResult:
164
+ arguments: dict[str, Any] = {"pattern": pattern}
165
+ if path is not None:
166
+ arguments["path"] = path
167
+ envelope = await self._client._call_tool("Glob", arguments)
168
+ payload = envelope.payload
169
+ return GlobResult(
170
+ search_path=payload["searchPath"],
171
+ filenames=list(payload["filenames"]),
172
+ num_files=payload["numFiles"],
173
+ truncated=payload["truncated"],
174
+ duration_ms=payload["durationMs"],
175
+ raw_payload=payload,
176
+ )
177
+
178
+ async def grep(
179
+ self,
180
+ pattern: str,
181
+ *,
182
+ path: str | None = None,
183
+ glob: str | None = None,
184
+ output_mode: str | None = None,
185
+ before: int | None = None,
186
+ after: int | None = None,
187
+ context_n: int | None = None,
188
+ context: int | None = None,
189
+ line_number: bool | None = None,
190
+ ignore_case: bool | None = None,
191
+ file_type: str | None = None,
192
+ head_limit: int | None = None,
193
+ offset: int | None = None,
194
+ multiline: bool | None = None,
195
+ ) -> GrepResult:
196
+ arguments: dict[str, Any] = {"pattern": pattern}
197
+ optional_values = {
198
+ "path": path,
199
+ "glob": glob,
200
+ "output_mode": output_mode,
201
+ "-B": before,
202
+ "-A": after,
203
+ "-C": context_n,
204
+ "context": context,
205
+ "-n": line_number,
206
+ "-i": ignore_case,
207
+ "type": file_type,
208
+ "head_limit": head_limit,
209
+ "offset": offset,
210
+ "multiline": multiline,
211
+ }
212
+ for key, value in optional_values.items():
213
+ if value is not None:
214
+ arguments[key] = value
215
+ envelope = await self._client._call_tool("Grep", arguments)
216
+ payload = envelope.payload
217
+ return GrepResult(
218
+ mode=payload["mode"],
219
+ filenames=list(payload["filenames"]),
220
+ num_files=payload["numFiles"],
221
+ content=payload.get("content"),
222
+ num_lines=payload.get("numLines"),
223
+ num_matches=payload.get("numMatches"),
224
+ applied_limit=payload.get("appliedLimit"),
225
+ applied_offset=payload.get("appliedOffset"),
226
+ raw_payload=payload,
227
+ )
228
+
229
+
230
+ class ExecAPI:
231
+ def __init__(self, client: "DaimonClient") -> None:
232
+ self._client = client
233
+
234
+ async def bash(
235
+ self,
236
+ command: str,
237
+ *,
238
+ timeout_ms: int | None = None,
239
+ description: str | None = None,
240
+ run_in_background: bool = False,
241
+ dangerously_disable_sandbox: bool = False,
242
+ ) -> BashResult:
243
+ arguments: dict[str, Any] = {
244
+ "command": command,
245
+ "run_in_background": run_in_background,
246
+ "dangerouslyDisableSandbox": dangerously_disable_sandbox,
247
+ }
248
+ if timeout_ms is not None:
249
+ arguments["timeout"] = timeout_ms
250
+ if description is not None:
251
+ arguments["description"] = description
252
+ envelope = await self._client._call_tool("Bash", arguments)
253
+ payload = envelope.payload
254
+ return BashResult(
255
+ stdout=payload["stdout"],
256
+ stderr=payload["stderr"],
257
+ interrupted=payload["interrupted"],
258
+ dangerously_disable_sandbox=payload["dangerouslyDisableSandbox"],
259
+ persisted_output_path=payload.get("persistedOutputPath"),
260
+ persisted_output_size=payload.get("persistedOutputSize"),
261
+ background_task_id=payload.get("backgroundTaskId"),
262
+ raw_payload=payload,
263
+ )
264
+
265
+ async def exec_command(
266
+ self,
267
+ cmd: str,
268
+ *,
269
+ workdir: str | None = None,
270
+ shell: str | None = None,
271
+ tty: bool = False,
272
+ yield_time_ms: int | None = None,
273
+ max_output_tokens: int | None = None,
274
+ login: bool | None = None,
275
+ sandbox_permissions: str | None = None,
276
+ justification: str | None = None,
277
+ prefix_rule: list[str] | None = None,
278
+ ) -> ExecResult:
279
+ arguments: dict[str, Any] = {"cmd": cmd, "tty": tty}
280
+ optional_values = {
281
+ "workdir": workdir,
282
+ "shell": shell,
283
+ "yield_time_ms": yield_time_ms,
284
+ "max_output_tokens": max_output_tokens,
285
+ "login": login,
286
+ "sandbox_permissions": sandbox_permissions,
287
+ "justification": justification,
288
+ "prefix_rule": prefix_rule,
289
+ }
290
+ for key, value in optional_values.items():
291
+ if value is not None:
292
+ arguments[key] = value
293
+ envelope = await self._client._call_tool("exec_command", arguments)
294
+ return self._exec_from_payload(envelope.payload)
295
+
296
+ async def start_session(
297
+ self,
298
+ cmd: str,
299
+ *,
300
+ workdir: str | None = None,
301
+ shell: str | None = None,
302
+ tty: bool = False,
303
+ yield_time_ms: int | None = None,
304
+ max_output_tokens: int | None = None,
305
+ login: bool | None = None,
306
+ sandbox_permissions: str | None = None,
307
+ justification: str | None = None,
308
+ prefix_rule: list[str] | None = None,
309
+ ) -> SessionHandle:
310
+ result = await self.exec_command(
311
+ cmd,
312
+ workdir=workdir,
313
+ shell=shell,
314
+ tty=tty,
315
+ yield_time_ms=yield_time_ms,
316
+ max_output_tokens=max_output_tokens,
317
+ login=login,
318
+ sandbox_permissions=sandbox_permissions,
319
+ justification=justification,
320
+ prefix_rule=prefix_rule,
321
+ )
322
+ if result.session_id is None:
323
+ raise RuntimeError("processd did not return a running session")
324
+ return SessionHandle(self._client, result.session_id)
325
+
326
+ async def write_stdin(
327
+ self,
328
+ session_id: int,
329
+ *,
330
+ chars: str = "",
331
+ yield_time_ms: int | None = None,
332
+ max_output_tokens: int | None = None,
333
+ ) -> ExecResult:
334
+ arguments: dict[str, Any] = {"session_id": session_id, "chars": chars}
335
+ if yield_time_ms is not None:
336
+ arguments["yield_time_ms"] = yield_time_ms
337
+ if max_output_tokens is not None:
338
+ arguments["max_output_tokens"] = max_output_tokens
339
+ envelope = await self._client._call_tool("write_stdin", arguments)
340
+ return self._exec_from_payload(envelope.payload)
341
+
342
+ @staticmethod
343
+ def _exec_from_payload(payload: dict[str, Any]) -> ExecResult:
344
+ return ExecResult(
345
+ output=payload.get("output", ""),
346
+ wall_time_seconds=float(payload.get("wall_time_seconds", 0.0)),
347
+ chunk_id=str(payload.get("chunk_id", "")),
348
+ original_token_count=int(payload.get("original_token_count", 0)),
349
+ session_id=_int_or_none(payload.get("session_id")),
350
+ exit_code=_int_or_none(payload.get("exit_code")),
351
+ raw_payload=payload,
352
+ )
353
+
354
+
355
+ class WebAPI:
356
+ def __init__(self, client: "DaimonClient") -> None:
357
+ self._client = client
358
+
359
+ async def fetch(
360
+ self,
361
+ url: str,
362
+ *,
363
+ timeout_ms: int | None = None,
364
+ max_bytes: int | None = None,
365
+ follow_same_host_redirects: bool | None = None,
366
+ ) -> WebFetchResult:
367
+ arguments: dict[str, Any] = {"url": url}
368
+ if timeout_ms is not None:
369
+ arguments["timeout_ms"] = timeout_ms
370
+ if max_bytes is not None:
371
+ arguments["max_bytes"] = max_bytes
372
+ if follow_same_host_redirects is not None:
373
+ arguments["follow_same_host_redirects"] = follow_same_host_redirects
374
+ envelope = await self._client._call_tool("WebFetch", arguments)
375
+ payload = envelope.payload
376
+ return WebFetchResult(
377
+ url=payload["url"],
378
+ status_code=payload["statusCode"],
379
+ content_type=payload["contentType"],
380
+ bytes=payload["bytes"],
381
+ result_type=payload["resultType"],
382
+ content=payload["content"],
383
+ redirect_url=payload.get("redirectUrl"),
384
+ persisted_path=payload.get("persistedPath"),
385
+ persisted_size=payload.get("persistedSize"),
386
+ duration_ms=payload["durationMs"],
387
+ raw_payload=payload,
388
+ )
389
+
390
+
391
+ class DaimonClient:
392
+ def __init__(
393
+ self,
394
+ base_url: str,
395
+ *,
396
+ access_token: str | None = None,
397
+ timeout_s: float = 30.0,
398
+ ) -> None:
399
+ self.base_url = base_url
400
+ self.access_token = access_token
401
+ self.timeout_s = timeout_s
402
+ self._transport = FastMCPTransportAdapter(
403
+ base_url,
404
+ access_token=access_token,
405
+ timeout_s=timeout_s,
406
+ )
407
+ self.raw = RawAPI(self)
408
+ self.runtime = RuntimeAPI(self)
409
+ self.files = FilesAPI(self)
410
+ self.exec = ExecAPI(self)
411
+ self.web = WebAPI(self)
412
+
413
+ async def connect(self) -> "DaimonClient":
414
+ await self._transport.connect()
415
+ return self
416
+
417
+ async def close(self) -> None:
418
+ await self._transport.close()
419
+
420
+ async def __aenter__(self) -> "DaimonClient":
421
+ return await self.connect()
422
+
423
+ async def __aexit__(self, exc_type: object, exc: object, tb: object) -> None:
424
+ await self.close()
425
+
426
+ async def _call_tool(
427
+ self,
428
+ tool_name: str,
429
+ arguments: dict[str, Any],
430
+ *,
431
+ raise_on_error: bool = True,
432
+ ) -> ToolCallEnvelope:
433
+ return await self._transport.call_tool(
434
+ tool_name,
435
+ arguments,
436
+ raise_on_error=raise_on_error,
437
+ )
@@ -0,0 +1,25 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+
5
+
6
+ class DaimonError(Exception):
7
+ """Base SDK error."""
8
+
9
+
10
+ class DaimonConnectionError(DaimonError):
11
+ """Raised when the SDK cannot connect to the MCP endpoint."""
12
+
13
+
14
+ class DaimonProtocolError(DaimonError):
15
+ """Raised when the MCP response cannot be decoded into a processd payload."""
16
+
17
+
18
+ class DaimonToolError(DaimonError):
19
+ """Raised when processd returns a structured tool error."""
20
+
21
+ def __init__(self, message: str, *, tool_name: str, payload: dict[str, Any] | None = None) -> None:
22
+ super().__init__(message)
23
+ self.message = message
24
+ self.tool_name = tool_name
25
+ self.payload = payload or {}
daimon_sdk/models.py ADDED
@@ -0,0 +1,242 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import time
5
+ from dataclasses import dataclass, field
6
+ from typing import TYPE_CHECKING, Any
7
+
8
+ if TYPE_CHECKING:
9
+ from .client import DaimonClient
10
+
11
+
12
+ @dataclass(slots=True)
13
+ class ContentBlock:
14
+ type: str | None
15
+ text: str | None = None
16
+ mime_type: str | None = None
17
+ raw: dict[str, Any] = field(default_factory=dict)
18
+
19
+ @classmethod
20
+ def from_dict(cls, payload: dict[str, Any]) -> "ContentBlock":
21
+ return cls(
22
+ type=payload.get("type"),
23
+ text=payload.get("text"),
24
+ mime_type=payload.get("mimeType") or payload.get("mime_type"),
25
+ raw=payload,
26
+ )
27
+
28
+
29
+ @dataclass(slots=True)
30
+ class ReadTextFile:
31
+ file_path: str
32
+ content: str
33
+ num_lines: int
34
+ start_line: int
35
+ total_lines: int
36
+
37
+
38
+ @dataclass(slots=True)
39
+ class ReadImageFile:
40
+ file_path: str
41
+ mime_type: str
42
+
43
+
44
+ @dataclass(slots=True)
45
+ class ReadPartsFile:
46
+ file_path: str
47
+ count: int
48
+ pages: str
49
+
50
+
51
+ @dataclass(slots=True)
52
+ class ReadResult:
53
+ kind: str
54
+ file: ReadTextFile | ReadImageFile | ReadPartsFile
55
+ extra_content: list[ContentBlock]
56
+ raw_payload: dict[str, Any]
57
+
58
+
59
+ @dataclass(slots=True)
60
+ class EditResult:
61
+ file_path: str
62
+ old_string: str
63
+ new_string: str
64
+ original_file: str
65
+ structured_patch: list[dict[str, Any]]
66
+ user_modified: bool
67
+ replace_all: bool
68
+ git_diff: dict[str, Any] | None
69
+ raw_payload: dict[str, Any]
70
+
71
+
72
+ @dataclass(slots=True)
73
+ class WriteResult:
74
+ type: str
75
+ file_path: str
76
+ content: str
77
+ structured_patch: list[dict[str, Any]]
78
+ original_file: str | None
79
+ git_diff: dict[str, Any] | None
80
+ raw_payload: dict[str, Any]
81
+
82
+
83
+ @dataclass(slots=True)
84
+ class GlobResult:
85
+ search_path: str
86
+ filenames: list[str]
87
+ num_files: int
88
+ truncated: bool
89
+ duration_ms: int
90
+ raw_payload: dict[str, Any]
91
+
92
+
93
+ @dataclass(slots=True)
94
+ class GrepResult:
95
+ mode: str
96
+ filenames: list[str]
97
+ num_files: int
98
+ content: str | None
99
+ num_lines: int | None
100
+ num_matches: int | None
101
+ applied_limit: int | None
102
+ applied_offset: int | None
103
+ raw_payload: dict[str, Any]
104
+
105
+
106
+ @dataclass(slots=True)
107
+ class ExecResult:
108
+ output: str
109
+ wall_time_seconds: float
110
+ chunk_id: str
111
+ original_token_count: int
112
+ session_id: int | None
113
+ exit_code: int | None
114
+ raw_payload: dict[str, Any]
115
+
116
+ @property
117
+ def is_running(self) -> bool:
118
+ return self.session_id is not None and self.exit_code is None
119
+
120
+ @property
121
+ def has_exited(self) -> bool:
122
+ return self.exit_code is not None
123
+
124
+
125
+ @dataclass(slots=True)
126
+ class BashResult:
127
+ stdout: str
128
+ stderr: str
129
+ interrupted: bool
130
+ dangerously_disable_sandbox: bool
131
+ persisted_output_path: str | None
132
+ persisted_output_size: int | None
133
+ background_task_id: str | None
134
+ raw_payload: dict[str, Any]
135
+
136
+ @property
137
+ def is_background(self) -> bool:
138
+ return self.background_task_id is not None
139
+
140
+
141
+ @dataclass(slots=True)
142
+ class WebFetchResult:
143
+ url: str
144
+ status_code: int
145
+ content_type: str
146
+ bytes: int
147
+ result_type: str
148
+ content: str
149
+ redirect_url: str | None
150
+ persisted_path: str | None
151
+ persisted_size: int | None
152
+ duration_ms: int
153
+ raw_payload: dict[str, Any]
154
+
155
+
156
+ @dataclass(slots=True)
157
+ class RuntimeContextResult:
158
+ payload: dict[str, Any]
159
+
160
+ @property
161
+ def base_workdir(self) -> str | None:
162
+ return self.payload.get("baseWorkdir")
163
+
164
+ @property
165
+ def summary(self) -> str | None:
166
+ value = self.payload.get("summary")
167
+ return value if isinstance(value, str) else None
168
+
169
+ @property
170
+ def filesystem(self) -> dict[str, Any]:
171
+ value = self.payload.get("filesystem")
172
+ return value if isinstance(value, dict) else {}
173
+
174
+ @property
175
+ def network(self) -> dict[str, Any]:
176
+ value = self.payload.get("network")
177
+ return value if isinstance(value, dict) else {}
178
+
179
+
180
+ @dataclass(slots=True)
181
+ class SessionHandle:
182
+ _client: "DaimonClient"
183
+ session_id: int
184
+
185
+ async def write(
186
+ self,
187
+ chars: str = "",
188
+ *,
189
+ yield_time_ms: int | None = None,
190
+ max_output_tokens: int | None = None,
191
+ ) -> ExecResult:
192
+ return await self._client.exec.write_stdin(
193
+ self.session_id,
194
+ chars=chars,
195
+ yield_time_ms=yield_time_ms,
196
+ max_output_tokens=max_output_tokens,
197
+ )
198
+
199
+ async def poll(
200
+ self,
201
+ *,
202
+ yield_time_ms: int | None = None,
203
+ max_output_tokens: int | None = None,
204
+ ) -> ExecResult:
205
+ return await self.write(
206
+ "",
207
+ yield_time_ms=yield_time_ms,
208
+ max_output_tokens=max_output_tokens,
209
+ )
210
+
211
+ async def wait_for_exit(
212
+ self,
213
+ *,
214
+ timeout_s: float = 10.0,
215
+ yield_time_ms: int = 5_000,
216
+ poll_interval_s: float = 0.05,
217
+ max_output_tokens: int | None = None,
218
+ ) -> ExecResult:
219
+ deadline = time.monotonic() + timeout_s
220
+ last_result: ExecResult | None = None
221
+ while time.monotonic() < deadline:
222
+ last_result = await self.poll(
223
+ yield_time_ms=yield_time_ms,
224
+ max_output_tokens=max_output_tokens,
225
+ )
226
+ if last_result.has_exited:
227
+ return last_result
228
+ await asyncio.sleep(poll_interval_s)
229
+ raise TimeoutError(f"session {self.session_id} did not exit within {timeout_s} seconds")
230
+
231
+ async def close(
232
+ self,
233
+ *,
234
+ exit_payload: str = "__EXIT__\n",
235
+ yield_time_ms: int = 500,
236
+ max_output_tokens: int | None = None,
237
+ ) -> ExecResult:
238
+ return await self.write(
239
+ exit_payload,
240
+ yield_time_ms=yield_time_ms,
241
+ max_output_tokens=max_output_tokens,
242
+ )
@@ -0,0 +1,116 @@
1
+ Metadata-Version: 2.4
2
+ Name: daimon-sdk
3
+ Version: 0.1.0
4
+ Summary: Typed async Python SDK for daimon MCP services.
5
+ Author: processd contributors
6
+ License: MIT
7
+ Requires-Python: >=3.12
8
+ Requires-Dist: fastmcp<4,>=3.1.1
9
+ Requires-Dist: httpx<1,>=0.28
10
+ Provides-Extra: dev
11
+ Requires-Dist: pytest-asyncio<1,>=0.24; extra == 'dev'
12
+ Requires-Dist: pytest<9,>=8.3; extra == 'dev'
13
+ Description-Content-Type: text/markdown
14
+
15
+ # daimon-sdk
16
+
17
+ Typed async Python SDK for `processd-mcp`.
18
+
19
+ `daimon-sdk` wraps the raw MCP tool surface exposed by `processd-mcp` and presents it as grouped Python APIs such as `client.files.read()` and `client.exec.start_session()`. The SDK keeps `processd-standalone` as the contract source of truth and focuses on:
20
+
21
+ - connection and token wiring
22
+ - typed request/response handling
23
+ - structured tool error mapping
24
+ - interactive session helpers
25
+ - compatibility tests against a real `processd-mcp` binary
26
+
27
+ ## Install
28
+
29
+ ```bash
30
+ pip install daimon-sdk
31
+ ```
32
+
33
+ For local development:
34
+
35
+ ```bash
36
+ pip install -e ".[dev]"
37
+ ```
38
+
39
+ ## Quickstart
40
+
41
+ ```python
42
+ import asyncio
43
+
44
+ from daimon_sdk import DaimonClient
45
+
46
+
47
+ async def main() -> None:
48
+ async with DaimonClient("http://127.0.0.1:8080/mcp") as client:
49
+ runtime = await client.runtime.get_context()
50
+ print(runtime.base_workdir)
51
+
52
+ result = await client.files.glob("**/*.rs", path=runtime.base_workdir)
53
+ print(result.filenames[:5])
54
+
55
+ bash = await client.exec.bash("printf 'hello from processd\\n'")
56
+ print(bash.stdout)
57
+
58
+
59
+ asyncio.run(main())
60
+ ```
61
+
62
+ ## Raw MCP vs SDK
63
+
64
+ Raw MCP:
65
+
66
+ ```python
67
+ payload = await mcp_client.call_tool("Read", {"file_path": "/tmp/demo.txt"})
68
+ ```
69
+
70
+ SDK:
71
+
72
+ ```python
73
+ read = await client.files.read("/tmp/demo.txt")
74
+ print(read.file.content)
75
+ ```
76
+
77
+ ## API Overview
78
+
79
+ - `DaimonClient(base_url, access_token=None, timeout_s=30.0)`
80
+ - `await client.connect()` / `await client.close()`
81
+ - `async with DaimonClient(...) as client`
82
+ - `client.runtime.get_context()`
83
+ - `client.files.read() / write() / edit() / glob() / grep()`
84
+ - `client.exec.bash() / start_session()`
85
+ - `SessionHandle.write() / poll() / wait_for_exit() / close()`
86
+ - `client.web.fetch()`
87
+ - `client.raw.call_tool()`
88
+
89
+ ## Local Testing
90
+
91
+ The SDK compatibility tests expect a sibling checkout of `processd-standalone`:
92
+
93
+ ```text
94
+ e2b-project/
95
+ processd-standalone/
96
+ processd-sdk/
97
+ ```
98
+
99
+ Run tests with an environment that already has the dev dependencies installed:
100
+
101
+ ```bash
102
+ PYTHONPATH=src python -m pytest -q
103
+ ```
104
+
105
+ The E2E suite builds and launches `../processd-standalone/target/debug/processd-mcp`.
106
+
107
+ ## Release
108
+
109
+ Releases are published from GitHub Actions when a tag matching `v*` is pushed.
110
+
111
+ ```bash
112
+ git tag v0.1.0
113
+ git push origin v0.1.0
114
+ ```
115
+
116
+ The tag version must match `pyproject.toml`'s project version.
@@ -0,0 +1,8 @@
1
+ daimon_sdk/__init__.py,sha256=Roky3wQ-p6AEOd6lq_4riuzoXmKnTILXMrsu1oT4SM4,650
2
+ daimon_sdk/_transport.py,sha256=U9vSTGmiWSbF829Wsw3kFIjaM1Q0Kt5DNhCznjopQd8,5064
3
+ daimon_sdk/client.py,sha256=mf0Ris81hcnTcfRTVNKEx_0jRgtJBu8SvWyEOr-9_vM,14450
4
+ daimon_sdk/exceptions.py,sha256=YnRwjuXJywsHmRhQVRAiYJ_0Wa0OL1OGXcdM1lWsDbQ,695
5
+ daimon_sdk/models.py,sha256=d1Vm7sxokeurX2vaw2F2rEWnyA5ojFzanjli0cdn3wU,5786
6
+ daimon_sdk-0.1.0.dist-info/METADATA,sha256=dgZXZnLb-Tbb2sRNH09QcKD84YCfps4f83LNM1ynhj8,2744
7
+ daimon_sdk-0.1.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
8
+ daimon_sdk-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.29.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any