kader 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,456 @@
1
+ """Protocol definition for pluggable memory backends.
2
+
3
+ This module defines the BackendProtocol that all backend implementations
4
+ must follow. Backends can store files in different locations (state, filesystem,
5
+ database, etc.) and provide a uniform interface for file operations.
6
+ """
7
+
8
+ import abc
9
+ import asyncio
10
+ from dataclasses import dataclass
11
+ from typing import Any, Literal, NotRequired
12
+
13
+ from typing_extensions import TypedDict
14
+
15
+ FileOperationError = Literal[
16
+ "file_not_found", # Download: file doesn't exist
17
+ "permission_denied", # Both: access denied
18
+ "is_directory", # Download: tried to download directory as file
19
+ "invalid_path", # Both: path syntax malformed (parent dir missing, invalid chars)
20
+ ]
21
+ """Standardized error codes for file upload/download operations.
22
+
23
+ These represent common, recoverable errors that an LLM can understand and potentially fix:
24
+ - file_not_found: The requested file doesn't exist (download)
25
+ - parent_not_found: The parent directory doesn't exist (upload)
26
+ - permission_denied: Access denied for the operation
27
+ - is_directory: Attempted to download a directory as a file
28
+ - invalid_path: Path syntax is malformed or contains invalid characters
29
+ """
30
+
31
+
32
+ @dataclass
33
+ class FileDownloadResponse:
34
+ """Result of a single file download operation.
35
+
36
+ The response is designed to allow partial success in batch operations.
37
+ The errors are standardized using FileOperationError literals
38
+ for certain recoverable conditions for use cases that involve
39
+ LLMs performing file operations.
40
+
41
+ Attributes:
42
+ path: The file path that was requested. Included for easy correlation
43
+ when processing batch results, especially useful for error messages.
44
+ content: File contents as bytes on success, None on failure.
45
+ error: Standardized error code on failure, None on success.
46
+ Uses FileOperationError literal for structured, LLM-actionable error reporting.
47
+
48
+ Examples:
49
+ >>> # Success
50
+ >>> FileDownloadResponse(path="/app/config.json", content=b"{...}", error=None)
51
+ >>> # Failure
52
+ >>> FileDownloadResponse(path="/wrong/path.txt", content=None, error="file_not_found")
53
+ """
54
+
55
+ path: str
56
+ content: bytes | None = None
57
+ error: FileOperationError | None = None
58
+
59
+
60
+ @dataclass
61
+ class FileUploadResponse:
62
+ """Result of a single file upload operation.
63
+
64
+ The response is designed to allow partial success in batch operations.
65
+ The errors are standardized using FileOperationError literals
66
+ for certain recoverable conditions for use cases that involve
67
+ LLMs performing file operations.
68
+
69
+ Attributes:
70
+ path: The file path that was requested. Included for easy correlation
71
+ when processing batch results and for clear error messages.
72
+ error: Standardized error code on failure, None on success.
73
+ Uses FileOperationError literal for structured, LLM-actionable error reporting.
74
+
75
+ Examples:
76
+ >>> # Success
77
+ >>> FileUploadResponse(path="/app/data.txt", error=None)
78
+ >>> # Failure
79
+ >>> FileUploadResponse(path="/readonly/file.txt", error="permission_denied")
80
+ """
81
+
82
+ path: str
83
+ error: FileOperationError | None = None
84
+
85
+
86
+ class FileInfo(TypedDict):
87
+ """Structured file listing info.
88
+
89
+ Minimal contract used across backends. Only "path" is required.
90
+ Other fields are best-effort and may be absent depending on backend.
91
+ """
92
+
93
+ path: str
94
+ is_dir: NotRequired[bool]
95
+ size: NotRequired[int] # bytes (approx)
96
+ modified_at: NotRequired[str] # ISO timestamp if known
97
+
98
+
99
+ class GrepMatch(TypedDict):
100
+ """Structured grep match entry."""
101
+
102
+ path: str
103
+ line: int
104
+ text: str
105
+
106
+
107
+ @dataclass
108
+ class WriteResult:
109
+ """Result from backend write operations.
110
+
111
+ Attributes:
112
+ error: Error message on failure, None on success.
113
+ path: Absolute path of written file, None on failure.
114
+ files_update: State update dict for checkpoint backends, None for external storage.
115
+ Checkpoint backends populate this with {file_path: file_data} for LangGraph state.
116
+ External backends set None (already persisted to disk/S3/database/etc).
117
+
118
+ Examples:
119
+ >>> # Checkpoint storage
120
+ >>> WriteResult(path="/f.txt", files_update={"/f.txt": {...}})
121
+ >>> # External storage
122
+ >>> WriteResult(path="/f.txt", files_update=None)
123
+ >>> # Error
124
+ >>> WriteResult(error="File exists")
125
+ """
126
+
127
+ error: str | None = None
128
+ path: str | None = None
129
+ files_update: dict[str, Any] | None = None
130
+
131
+
132
+ @dataclass
133
+ class EditResult:
134
+ """Result from backend edit operations.
135
+
136
+ Attributes:
137
+ error: Error message on failure, None on success.
138
+ path: Absolute path of edited file, None on failure.
139
+ files_update: State update dict for checkpoint backends, None for external storage.
140
+ Checkpoint backends populate this with {file_path: file_data} for LangGraph state.
141
+ External backends set None (already persisted to disk/S3/database/etc).
142
+ occurrences: Number of replacements made, None on failure.
143
+
144
+ Examples:
145
+ >>> # Checkpoint storage
146
+ >>> EditResult(path="/f.txt", files_update={"/f.txt": {...}}, occurrences=1)
147
+ >>> # External storage
148
+ >>> EditResult(path="/f.txt", files_update=None, occurrences=2)
149
+ >>> # Error
150
+ >>> EditResult(error="File not found")
151
+ """
152
+
153
+ error: str | None = None
154
+ path: str | None = None
155
+ files_update: dict[str, Any] | None = None
156
+ occurrences: int | None = None
157
+
158
+
159
+ class BackendProtocol(abc.ABC):
160
+ """Protocol for pluggable memory backends (single, unified).
161
+
162
+ Backends can store files in different locations (state, filesystem, database, etc.)
163
+ and provide a uniform interface for file operations.
164
+
165
+ All file data is represented as dicts with the following structure:
166
+ {
167
+ "content": list[str], # Lines of text content
168
+ "created_at": str, # ISO format timestamp
169
+ "modified_at": str, # ISO format timestamp
170
+ }
171
+ """
172
+
173
+ def ls_info(self, path: str) -> list["FileInfo"]:
174
+ """List all files in a directory with metadata.
175
+
176
+ Args:
177
+ path: Absolute path to the directory to list. Must start with '/'.
178
+
179
+ Returns:
180
+ List of FileInfo dicts containing file metadata:
181
+
182
+ - `path` (required): Absolute file path
183
+ - `is_dir` (optional): True if directory
184
+ - `size` (optional): File size in bytes
185
+ - `modified_at` (optional): ISO 8601 timestamp
186
+ """
187
+
188
+ async def als_info(self, path: str) -> list["FileInfo"]:
189
+ """Async version of ls_info."""
190
+ return await asyncio.to_thread(self.ls_info, path)
191
+
192
+ def read(
193
+ self,
194
+ file_path: str,
195
+ offset: int = 0,
196
+ limit: int = 2000,
197
+ ) -> str:
198
+ """Read file content with line numbers.
199
+
200
+ Args:
201
+ file_path: Absolute path to the file to read. Must start with '/'.
202
+ offset: Line number to start reading from (0-indexed). Default: 0.
203
+ limit: Maximum number of lines to read. Default: 2000.
204
+
205
+ Returns:
206
+ String containing file content formatted with line numbers (cat -n format),
207
+ starting at line 1. Lines longer than 2000 characters are truncated.
208
+
209
+ Returns an error string if the file doesn't exist or can't be read.
210
+
211
+ !!! note
212
+ - Use pagination (offset/limit) for large files to avoid context overflow
213
+ - First scan: `read(path, limit=100)` to see file structure
214
+ - Read more: `read(path, offset=100, limit=200)` for next section
215
+ - ALWAYS read a file before editing it
216
+ - If file exists but is empty, you'll receive a system reminder warning
217
+ """
218
+
219
+ async def aread(
220
+ self,
221
+ file_path: str,
222
+ offset: int = 0,
223
+ limit: int = 2000,
224
+ ) -> str:
225
+ """Async version of read."""
226
+ return await asyncio.to_thread(self.read, file_path, offset, limit)
227
+
228
+ def grep_raw(
229
+ self,
230
+ pattern: str,
231
+ path: str | None = None,
232
+ glob: str | None = None,
233
+ ) -> list["GrepMatch"] | str:
234
+ """Search for a literal text pattern in files.
235
+
236
+ Args:
237
+ pattern: Literal string to search for (NOT regex).
238
+ Performs exact substring matching within file content.
239
+ Example: "TODO" matches any line containing "TODO"
240
+
241
+ path: Optional directory path to search in.
242
+ If None, searches in current working directory.
243
+ Example: "/workspace/src"
244
+
245
+ glob: Optional glob pattern to filter which FILES to search.
246
+ Filters by filename/path, not content.
247
+ Supports standard glob wildcards:
248
+ - `*` matches any characters in filename
249
+ - `**` matches any directories recursively
250
+ - `?` matches single character
251
+ - `[abc]` matches one character from set
252
+
253
+ Examples:
254
+ - "*.py" - only search Python files
255
+ - "**/*.txt" - search all .txt files recursively
256
+ - "src/**/*.js" - search JS files under src/
257
+ - "test[0-9].txt" - search test0.txt, test1.txt, etc.
258
+
259
+ Returns:
260
+ On success: list[GrepMatch] with structured results containing:
261
+ - path: Absolute file path
262
+ - line: Line number (1-indexed)
263
+ - text: Full line content containing the match
264
+
265
+ On error: str with error message (e.g., invalid path, permission denied)
266
+ """
267
+
268
+ async def agrep_raw(
269
+ self,
270
+ pattern: str,
271
+ path: str | None = None,
272
+ glob: str | None = None,
273
+ ) -> list["GrepMatch"] | str:
274
+ """Async version of grep_raw."""
275
+ return await asyncio.to_thread(self.grep_raw, pattern, path, glob)
276
+
277
+ def glob_info(self, pattern: str, path: str = "/") -> list["FileInfo"]:
278
+ """Find files matching a glob pattern.
279
+
280
+ Args:
281
+ pattern: Glob pattern with wildcards to match file paths.
282
+ Supports standard glob syntax:
283
+ - `*` matches any characters within a filename/directory
284
+ - `**` matches any directories recursively
285
+ - `?` matches a single character
286
+ - `[abc]` matches one character from set
287
+
288
+ path: Base directory to search from. Default: "/" (root).
289
+ The pattern is applied relative to this path.
290
+
291
+ Returns:
292
+ list of FileInfo
293
+ """
294
+
295
+ async def aglob_info(self, pattern: str, path: str = "/") -> list["FileInfo"]:
296
+ """Async version of glob_info."""
297
+ return await asyncio.to_thread(self.glob_info, pattern, path)
298
+
299
+ def write(
300
+ self,
301
+ file_path: str,
302
+ content: str,
303
+ ) -> WriteResult:
304
+ """Write content to a new file in the filesystem, error if file exists.
305
+
306
+ Args:
307
+ file_path: Absolute path where the file should be created.
308
+ Must start with '/'.
309
+ content: String content to write to the file.
310
+
311
+ Returns:
312
+ WriteResult
313
+ """
314
+
315
+ async def awrite(
316
+ self,
317
+ file_path: str,
318
+ content: str,
319
+ ) -> WriteResult:
320
+ """Async version of write."""
321
+ return await asyncio.to_thread(self.write, file_path, content)
322
+
323
+ def edit(
324
+ self,
325
+ file_path: str,
326
+ old_string: str,
327
+ new_string: str,
328
+ replace_all: bool = False,
329
+ ) -> EditResult:
330
+ """Perform exact string replacements in an existing file.
331
+
332
+ Args:
333
+ file_path: Absolute path to the file to edit. Must start with '/'.
334
+ old_string: Exact string to search for and replace.
335
+ Must match exactly including whitespace and indentation.
336
+ new_string: String to replace old_string with.
337
+ Must be different from old_string.
338
+ replace_all: If True, replace all occurrences. If False (default),
339
+ old_string must be unique in the file or the edit fails.
340
+
341
+ Returns:
342
+ EditResult
343
+ """
344
+
345
+ async def aedit(
346
+ self,
347
+ file_path: str,
348
+ old_string: str,
349
+ new_string: str,
350
+ replace_all: bool = False,
351
+ ) -> EditResult:
352
+ """Async version of edit."""
353
+ return await asyncio.to_thread(
354
+ self.edit, file_path, old_string, new_string, replace_all
355
+ )
356
+
357
+ def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
358
+ """Upload multiple files to the sandbox.
359
+
360
+ This API is designed to allow developers to use it either directly or
361
+ by exposing it to LLMs via custom tools.
362
+
363
+ Args:
364
+ files: List of (path, content) tuples to upload.
365
+
366
+ Returns:
367
+ List of FileUploadResponse objects, one per input file.
368
+ Response order matches input order (response[i] for files[i]).
369
+ Check the error field to determine success/failure per file.
370
+
371
+ Examples:
372
+ ```python
373
+ responses = sandbox.upload_files(
374
+ [
375
+ ("/app/config.json", b"{...}"),
376
+ ("/app/data.txt", b"content"),
377
+ ]
378
+ )
379
+ ```
380
+ """
381
+
382
+ async def aupload_files(
383
+ self, files: list[tuple[str, bytes]]
384
+ ) -> list[FileUploadResponse]:
385
+ """Async version of upload_files."""
386
+ return await asyncio.to_thread(self.upload_files, files)
387
+
388
+ def download_files(self, paths: list[str]) -> list[FileDownloadResponse]:
389
+ """Download multiple files from the sandbox.
390
+
391
+ This API is designed to allow developers to use it either directly or
392
+ by exposing it to LLMs via custom tools.
393
+
394
+ Args:
395
+ paths: List of file paths to download.
396
+
397
+ Returns:
398
+ List of FileDownloadResponse objects, one per input path.
399
+ Response order matches input order (response[i] for paths[i]).
400
+ Check the error field to determine success/failure per file.
401
+ """
402
+
403
+ async def adownload_files(self, paths: list[str]) -> list[FileDownloadResponse]:
404
+ """Async version of download_files."""
405
+ return await asyncio.to_thread(self.download_files, paths)
406
+
407
+
408
+ @dataclass
409
+ class ExecuteResponse:
410
+ """Result of code execution.
411
+
412
+ Simplified schema optimized for LLM consumption.
413
+ """
414
+
415
+ output: str
416
+ """Combined stdout and stderr output of the executed command."""
417
+
418
+ exit_code: int | None = None
419
+ """The process exit code. 0 indicates success, non-zero indicates failure."""
420
+
421
+ truncated: bool = False
422
+ """Whether the output was truncated due to backend limitations."""
423
+
424
+
425
+ class SandboxBackendProtocol(BackendProtocol):
426
+ """Protocol for sandboxed backends with isolated runtime.
427
+
428
+ Sandboxed backends run in isolated environments (e.g., separate processes,
429
+ containers) and communicate via defined interfaces.
430
+ """
431
+
432
+ def execute(
433
+ self,
434
+ command: str,
435
+ ) -> ExecuteResponse:
436
+ """Execute a command in the process.
437
+
438
+ Simplified interface optimized for LLM consumption.
439
+
440
+ Args:
441
+ command: Full shell command string to execute.
442
+
443
+ Returns:
444
+ ExecuteResponse with combined output, exit code, optional signal, and truncation flag.
445
+ """
446
+
447
+ async def aexecute(
448
+ self,
449
+ command: str,
450
+ ) -> ExecuteResponse:
451
+ """Async version of execute."""
452
+ return await asyncio.to_thread(self.execute, command)
453
+
454
+ @property
455
+ def id(self) -> str:
456
+ """Unique identifier for the sandbox backend instance."""