cowork-dash 0.1.6__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -749,6 +749,18 @@ details summary:hover {
749
749
  margin: 5px 0;
750
750
  }
751
751
 
752
+ .canvas-markdown ul,
753
+ .canvas-markdown ol {
754
+ margin: 5px 0;
755
+ padding-left: 24px;
756
+ list-style-position: inside;
757
+ }
758
+
759
+ .canvas-markdown li {
760
+ margin: 3px 0;
761
+ color: var(--mantine-color-text);
762
+ }
763
+
752
764
  .canvas-markdown strong {
753
765
  color: var(--mantine-color-text);
754
766
  font-weight: 600;
@@ -0,0 +1,435 @@
1
+ """Custom backend implementation wrapping VirtualFilesystem.
2
+
3
+ This module provides VirtualFilesystemBackend which implements DeepAgents'
4
+ BackendProtocol interface using the existing VirtualFilesystem for storage.
5
+ This enables unified file access between the agent and Dash UI in virtual FS mode.
6
+ """
7
+
8
+ import fnmatch
9
+ import re
10
+
11
+ from deepagents.backends.protocol import (
12
+ BackendProtocol,
13
+ EditResult,
14
+ FileDownloadResponse,
15
+ FileInfo,
16
+ FileUploadResponse,
17
+ GrepMatch,
18
+ WriteResult,
19
+ )
20
+ from deepagents.backends.utils import (
21
+ check_empty_content,
22
+ format_content_with_line_numbers,
23
+ perform_string_replacement,
24
+ )
25
+
26
+ from .virtual_fs import VirtualFilesystem
27
+
28
+
29
+ class VirtualFilesystemBackend(BackendProtocol):
30
+ """Backend that wraps VirtualFilesystem for session-isolated storage.
31
+
32
+ Provides full BackendProtocol support including:
33
+ - Directory operations (ls_info)
34
+ - File read/write with text support
35
+ - Binary file upload/download
36
+ - Grep and glob search
37
+
38
+ This backend stores files directly in the VirtualFilesystem instance,
39
+ which is shared between the agent and Dash UI callbacks.
40
+
41
+ Unlike StateBackend which stores data in LangGraph checkpoint state,
42
+ this backend writes directly to the VirtualFilesystem. Therefore:
43
+ - files_update is always None (no state updates needed)
44
+ - Changes are immediately visible to other code using the same VirtualFilesystem
45
+ """
46
+
47
+ def __init__(self, fs: VirtualFilesystem):
48
+ """Initialize the backend with a VirtualFilesystem instance.
49
+
50
+ Args:
51
+ fs: The VirtualFilesystem to use for storage. This instance
52
+ should be shared with Dash callbacks for unified access.
53
+ """
54
+ self.fs = fs
55
+
56
+ def _normalize_path(self, path: str) -> str:
57
+ """Ensure path is absolute and within the VirtualFilesystem root."""
58
+ if not path:
59
+ return self.fs._root
60
+
61
+ # If path doesn't start with /, treat it as relative to the FS root
62
+ if not path.startswith("/"):
63
+ path = f"{self.fs._root}/{path}"
64
+ # If path starts with / but not with the FS root, prepend the root
65
+ elif not path.startswith(self.fs._root):
66
+ # Strip leading / and prepend root
67
+ path = f"{self.fs._root}/{path.lstrip('/')}"
68
+
69
+ # Remove trailing slash except for root
70
+ if path != self.fs._root and path.endswith("/"):
71
+ path = path.rstrip("/")
72
+
73
+ return path
74
+
75
+ def ls_info(self, path: str) -> list[FileInfo]:
76
+ """List files and directories in path.
77
+
78
+ Args:
79
+ path: Absolute path to the directory to list.
80
+
81
+ Returns:
82
+ List of FileInfo dicts with path, is_dir, and size fields.
83
+ """
84
+ norm_path = self._normalize_path(path)
85
+
86
+ if not self.fs.is_dir(norm_path):
87
+ return []
88
+
89
+ results: list[FileInfo] = []
90
+ try:
91
+ for name in self.fs.listdir(norm_path):
92
+ if norm_path == "/":
93
+ full_path = f"/{name}"
94
+ else:
95
+ full_path = f"{norm_path}/{name}"
96
+
97
+ is_dir = self.fs.is_dir(full_path)
98
+
99
+ info: FileInfo = {
100
+ "path": full_path + ("/" if is_dir else ""),
101
+ "is_dir": is_dir,
102
+ }
103
+
104
+ if not is_dir:
105
+ try:
106
+ content = self.fs.read_bytes(full_path)
107
+ info["size"] = len(content)
108
+ except Exception:
109
+ info["size"] = 0
110
+ else:
111
+ info["size"] = 0
112
+
113
+ results.append(info)
114
+ except FileNotFoundError:
115
+ pass
116
+
117
+ results.sort(key=lambda x: x.get("path", ""))
118
+ return results
119
+
120
+ def read(self, file_path: str, offset: int = 0, limit: int = 2000) -> str:
121
+ """Read file with line numbers.
122
+
123
+ Args:
124
+ file_path: Absolute path to the file to read.
125
+ offset: Line number to start reading from (0-indexed).
126
+ limit: Maximum number of lines to read.
127
+
128
+ Returns:
129
+ Formatted content with line numbers, or error message.
130
+ """
131
+ norm_path = self._normalize_path(file_path)
132
+
133
+ if not self.fs.exists(norm_path):
134
+ return f"Error: File '{file_path}' not found"
135
+
136
+ if not self.fs.is_file(norm_path):
137
+ return f"Error: '{file_path}' is a directory, not a file"
138
+
139
+ try:
140
+ content = self.fs.read_text(norm_path)
141
+ except UnicodeDecodeError:
142
+ return f"Error: Binary file '{file_path}' cannot be read as text"
143
+ except Exception as e:
144
+ return f"Error reading file '{file_path}': {e}"
145
+
146
+ empty_msg = check_empty_content(content)
147
+ if empty_msg:
148
+ return empty_msg
149
+
150
+ lines = content.splitlines()
151
+ start_idx = offset
152
+ end_idx = min(start_idx + limit, len(lines))
153
+
154
+ if start_idx >= len(lines):
155
+ return f"Error: Line offset {offset} exceeds file length ({len(lines)} lines)"
156
+
157
+ selected_lines = lines[start_idx:end_idx]
158
+ return format_content_with_line_numbers(selected_lines, start_line=start_idx + 1)
159
+
160
+ def write(self, file_path: str, content: str) -> WriteResult:
161
+ """Create a new file (error if file exists).
162
+
163
+ Args:
164
+ file_path: Absolute path where the file should be created.
165
+ content: String content to write to the file.
166
+
167
+ Returns:
168
+ WriteResult with path on success, or error message on failure.
169
+ files_update is always None since we write directly to VirtualFilesystem.
170
+ """
171
+ norm_path = self._normalize_path(file_path)
172
+
173
+ if self.fs.exists(norm_path):
174
+ return WriteResult(
175
+ error=f"Cannot write to {file_path} because it already exists. "
176
+ "Use edit to modify existing files."
177
+ )
178
+
179
+ # Ensure parent directory exists
180
+ parent = "/".join(norm_path.split("/")[:-1]) or "/"
181
+ if parent != "/" and not self.fs.is_dir(parent):
182
+ try:
183
+ self.fs.mkdir(parent, parents=True, exist_ok=True)
184
+ except Exception as e:
185
+ return WriteResult(error=f"Cannot create parent directory: {e}")
186
+
187
+ try:
188
+ self.fs.write_text(norm_path, content)
189
+ return WriteResult(path=norm_path, files_update=None)
190
+ except Exception as e:
191
+ return WriteResult(error=f"Error writing file: {e}")
192
+
193
+ def edit(
194
+ self,
195
+ file_path: str,
196
+ old_string: str,
197
+ new_string: str,
198
+ replace_all: bool = False,
199
+ ) -> EditResult:
200
+ """Edit file by replacing strings.
201
+
202
+ Args:
203
+ file_path: Absolute path to the file to edit.
204
+ old_string: Exact string to search for and replace.
205
+ new_string: String to replace old_string with.
206
+ replace_all: If True, replace all occurrences.
207
+
208
+ Returns:
209
+ EditResult with path and occurrences on success, or error message.
210
+ files_update is always None since we write directly to VirtualFilesystem.
211
+ """
212
+ norm_path = self._normalize_path(file_path)
213
+
214
+ if not self.fs.exists(norm_path):
215
+ return EditResult(error=f"Error: File '{file_path}' not found")
216
+
217
+ if not self.fs.is_file(norm_path):
218
+ return EditResult(error=f"Error: '{file_path}' is a directory, not a file")
219
+
220
+ try:
221
+ content = self.fs.read_text(norm_path)
222
+ except UnicodeDecodeError:
223
+ return EditResult(error=f"Error: Binary file '{file_path}' cannot be edited as text")
224
+ except Exception as e:
225
+ return EditResult(error=f"Error reading file: {e}")
226
+
227
+ result = perform_string_replacement(content, old_string, new_string, replace_all)
228
+
229
+ if isinstance(result, str):
230
+ # Error message returned
231
+ return EditResult(error=result)
232
+
233
+ new_content, occurrences = result
234
+
235
+ try:
236
+ self.fs.write_text(norm_path, new_content)
237
+ return EditResult(path=norm_path, files_update=None, occurrences=occurrences)
238
+ except Exception as e:
239
+ return EditResult(error=f"Error writing file: {e}")
240
+
241
+ def grep_raw(
242
+ self,
243
+ pattern: str,
244
+ path: str | None = None,
245
+ glob: str | None = None,
246
+ ) -> list[GrepMatch] | str:
247
+ """Search file contents for pattern.
248
+
249
+ Args:
250
+ pattern: Literal string to search for (NOT regex per protocol,
251
+ but we use regex internally for flexibility).
252
+ path: Optional directory path to search in.
253
+ glob: Optional glob pattern to filter which files to search.
254
+
255
+ Returns:
256
+ List of GrepMatch dicts on success, or error string.
257
+ """
258
+ try:
259
+ regex = re.compile(re.escape(pattern)) # Escape for literal matching
260
+ except re.error as e:
261
+ return f"Invalid pattern: {e}"
262
+
263
+ norm_path = self._normalize_path(path or "/")
264
+ matches: list[GrepMatch] = []
265
+
266
+ def search_dir(dir_path: str) -> None:
267
+ if not self.fs.is_dir(dir_path):
268
+ return
269
+
270
+ try:
271
+ entries = self.fs.listdir(dir_path)
272
+ except Exception:
273
+ return
274
+
275
+ for name in entries:
276
+ if dir_path == "/":
277
+ full_path = f"/{name}"
278
+ else:
279
+ full_path = f"{dir_path}/{name}"
280
+
281
+ if self.fs.is_dir(full_path):
282
+ search_dir(full_path)
283
+ elif self.fs.is_file(full_path):
284
+ # Apply glob filter if provided
285
+ if glob and not fnmatch.fnmatch(name, glob):
286
+ continue
287
+
288
+ try:
289
+ content = self.fs.read_text(full_path)
290
+ for line_num, line in enumerate(content.splitlines(), 1):
291
+ if regex.search(line):
292
+ matches.append({
293
+ "path": full_path,
294
+ "line": line_num,
295
+ "text": line,
296
+ })
297
+ except Exception:
298
+ pass # Skip binary or unreadable files
299
+
300
+ search_dir(norm_path)
301
+ return matches
302
+
303
+ def glob_info(self, pattern: str, path: str = "/") -> list[FileInfo]:
304
+ """Find files matching glob pattern.
305
+
306
+ Args:
307
+ pattern: Glob pattern with wildcards to match file paths.
308
+ path: Base directory to search from.
309
+
310
+ Returns:
311
+ List of FileInfo dicts for matching files.
312
+ """
313
+ norm_path = self._normalize_path(path)
314
+
315
+ if not self.fs.is_dir(norm_path):
316
+ return []
317
+
318
+ results: list[FileInfo] = []
319
+
320
+ def search_dir(dir_path: str, relative_base: str) -> None:
321
+ if not self.fs.is_dir(dir_path):
322
+ return
323
+
324
+ try:
325
+ entries = self.fs.listdir(dir_path)
326
+ except Exception:
327
+ return
328
+
329
+ for name in entries:
330
+ if dir_path == "/":
331
+ full_path = f"/{name}"
332
+ else:
333
+ full_path = f"{dir_path}/{name}"
334
+
335
+ if relative_base:
336
+ relative_path = f"{relative_base}/{name}"
337
+ else:
338
+ relative_path = name
339
+
340
+ is_dir = self.fs.is_dir(full_path)
341
+
342
+ # Check if this entry matches the pattern
343
+ if fnmatch.fnmatch(relative_path, pattern) or fnmatch.fnmatch(name, pattern):
344
+ if is_dir:
345
+ results.append({
346
+ "path": full_path + "/",
347
+ "is_dir": True,
348
+ "size": 0,
349
+ })
350
+ else:
351
+ try:
352
+ size = len(self.fs.read_bytes(full_path))
353
+ except Exception:
354
+ size = 0
355
+ results.append({
356
+ "path": full_path,
357
+ "is_dir": False,
358
+ "size": size,
359
+ })
360
+
361
+ # Recurse into directories for ** patterns
362
+ if is_dir and ("**" in pattern or "*" in pattern):
363
+ search_dir(full_path, relative_path)
364
+
365
+ search_dir(norm_path, "")
366
+ return results
367
+
368
+ def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
369
+ """Upload binary files.
370
+
371
+ Args:
372
+ files: List of (path, content) tuples to upload.
373
+
374
+ Returns:
375
+ List of FileUploadResponse objects, one per input file.
376
+ """
377
+ responses: list[FileUploadResponse] = []
378
+
379
+ for path, content in files:
380
+ norm_path = self._normalize_path(path)
381
+
382
+ # Ensure parent directory exists
383
+ parent = "/".join(norm_path.split("/")[:-1]) or "/"
384
+ if parent != "/" and not self.fs.is_dir(parent):
385
+ try:
386
+ self.fs.mkdir(parent, parents=True, exist_ok=True)
387
+ except Exception:
388
+ responses.append(FileUploadResponse(path=path, error="invalid_path"))
389
+ continue
390
+
391
+ try:
392
+ self.fs.write_bytes(norm_path, content)
393
+ responses.append(FileUploadResponse(path=path, error=None))
394
+ except Exception:
395
+ responses.append(FileUploadResponse(path=path, error="permission_denied"))
396
+
397
+ return responses
398
+
399
+ def download_files(self, paths: list[str]) -> list[FileDownloadResponse]:
400
+ """Download binary files.
401
+
402
+ Args:
403
+ paths: List of file paths to download.
404
+
405
+ Returns:
406
+ List of FileDownloadResponse objects, one per input path.
407
+ """
408
+ responses: list[FileDownloadResponse] = []
409
+
410
+ for path in paths:
411
+ norm_path = self._normalize_path(path)
412
+
413
+ if not self.fs.exists(norm_path):
414
+ responses.append(FileDownloadResponse(
415
+ path=path, content=None, error="file_not_found"
416
+ ))
417
+ continue
418
+
419
+ if self.fs.is_dir(norm_path):
420
+ responses.append(FileDownloadResponse(
421
+ path=path, content=None, error="is_directory"
422
+ ))
423
+ continue
424
+
425
+ try:
426
+ content = self.fs.read_bytes(norm_path)
427
+ responses.append(FileDownloadResponse(
428
+ path=path, content=content, error=None
429
+ ))
430
+ except Exception:
431
+ responses.append(FileDownloadResponse(
432
+ path=path, content=None, error="permission_denied"
433
+ ))
434
+
435
+ return responses