deepagents 0.2.8__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {deepagents-0.2.8 → deepagents-0.3.0}/PKG-INFO +4 -4
  2. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/backends/composite.py +234 -9
  3. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/backends/filesystem.py +5 -5
  4. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/backends/protocol.py +170 -21
  5. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/backends/sandbox.py +5 -4
  6. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/backends/state.py +5 -3
  7. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/backends/store.py +7 -5
  8. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/graph.py +18 -4
  9. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/middleware/filesystem.py +187 -22
  10. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents.egg-info/PKG-INFO +4 -4
  11. deepagents-0.3.0/deepagents.egg-info/requires.txt +4 -0
  12. {deepagents-0.2.8 → deepagents-0.3.0}/pyproject.toml +8 -4
  13. deepagents-0.2.8/deepagents.egg-info/requires.txt +0 -4
  14. {deepagents-0.2.8 → deepagents-0.3.0}/README.md +0 -0
  15. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/__init__.py +0 -0
  16. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/backends/__init__.py +0 -0
  17. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/backends/utils.py +0 -0
  18. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/middleware/__init__.py +0 -0
  19. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/middleware/patch_tool_calls.py +0 -0
  20. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents/middleware/subagents.py +0 -0
  21. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents.egg-info/SOURCES.txt +0 -0
  22. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents.egg-info/dependency_links.txt +0 -0
  23. {deepagents-0.2.8 → deepagents-0.3.0}/deepagents.egg-info/top_level.txt +0 -0
  24. {deepagents-0.2.8 → deepagents-0.3.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: deepagents
3
- Version: 0.2.8
3
+ Version: 0.3.0
4
4
  Summary: General purpose 'deep agent' with sub-agent spawning, todo list capabilities, and mock file system. Built on LangGraph.
5
5
  License: MIT
6
6
  Project-URL: Homepage, https://docs.langchain.com/oss/python/deepagents/overview
@@ -11,9 +11,9 @@ Project-URL: Slack, https://www.langchain.com/join-community
11
11
  Project-URL: Reddit, https://www.reddit.com/r/LangChain/
12
12
  Requires-Python: <4.0,>=3.11
13
13
  Description-Content-Type: text/markdown
14
- Requires-Dist: langchain-anthropic<2.0.0,>=1.0.0
15
- Requires-Dist: langchain<2.0.0,>=1.0.2
16
- Requires-Dist: langchain-core<2.0.0,>=1.0.0
14
+ Requires-Dist: langchain-anthropic<2.0.0,>=1.2.0
15
+ Requires-Dist: langchain<2.0.0,>=1.1.0
16
+ Requires-Dist: langchain-core<2.0.0,>=1.1.0
17
17
  Requires-Dist: wcmatch
18
18
 
19
19
  # 🧠🤖Deep Agents
@@ -97,6 +97,43 @@ class CompositeBackend:
97
97
  # Path doesn't match a route: query only default backend
98
98
  return self.default.ls_info(path)
99
99
 
100
+ async def als_info(self, path: str) -> list[FileInfo]:
101
+ """Async version of ls_info."""
102
+ # Check if path matches a specific route
103
+ for route_prefix, backend in self.sorted_routes:
104
+ if path.startswith(route_prefix.rstrip("/")):
105
+ # Query only the matching routed backend
106
+ suffix = path[len(route_prefix) :]
107
+ search_path = f"/{suffix}" if suffix else "/"
108
+ infos = await backend.als_info(search_path)
109
+ prefixed: list[FileInfo] = []
110
+ for fi in infos:
111
+ fi = dict(fi)
112
+ fi["path"] = f"{route_prefix[:-1]}{fi['path']}"
113
+ prefixed.append(fi)
114
+ return prefixed
115
+
116
+ # At root, aggregate default and all routed backends
117
+ if path == "/":
118
+ results: list[FileInfo] = []
119
+ results.extend(await self.default.als_info(path))
120
+ for route_prefix, backend in self.sorted_routes:
121
+ # Add the route itself as a directory (e.g., /memories/)
122
+ results.append(
123
+ {
124
+ "path": route_prefix,
125
+ "is_dir": True,
126
+ "size": 0,
127
+ "modified_at": "",
128
+ }
129
+ )
130
+
131
+ results.sort(key=lambda x: x.get("path", ""))
132
+ return results
133
+
134
+ # Path doesn't match a route: query only default backend
135
+ return await self.default.als_info(path)
136
+
100
137
  def read(
101
138
  self,
102
139
  file_path: str,
@@ -106,14 +143,26 @@ class CompositeBackend:
106
143
  """Read file content, routing to appropriate backend.
107
144
 
108
145
  Args:
109
- file_path: Absolute file path
110
- offset: Line offset to start reading from (0-indexed)
111
- limit: Maximum number of lines to readReturns:
146
+ file_path: Absolute file path.
147
+ offset: Line offset to start reading from (0-indexed).
148
+ limit: Maximum number of lines to read.
149
+
150
+ Returns:
112
151
  Formatted file content with line numbers, or error message.
113
152
  """
114
153
  backend, stripped_key = self._get_backend_and_key(file_path)
115
154
  return backend.read(stripped_key, offset=offset, limit=limit)
116
155
 
156
+ async def aread(
157
+ self,
158
+ file_path: str,
159
+ offset: int = 0,
160
+ limit: int = 2000,
161
+ ) -> str:
162
+ """Async version of read."""
163
+ backend, stripped_key = self._get_backend_and_key(file_path)
164
+ return await backend.aread(stripped_key, offset=offset, limit=limit)
165
+
117
166
  def grep_raw(
118
167
  self,
119
168
  pattern: str,
@@ -146,6 +195,39 @@ class CompositeBackend:
146
195
 
147
196
  return all_matches
148
197
 
198
+ async def agrep_raw(
199
+ self,
200
+ pattern: str,
201
+ path: str | None = None,
202
+ glob: str | None = None,
203
+ ) -> list[GrepMatch] | str:
204
+ """Async version of grep_raw."""
205
+ # If path targets a specific route, search only that backend
206
+ for route_prefix, backend in self.sorted_routes:
207
+ if path is not None and path.startswith(route_prefix.rstrip("/")):
208
+ search_path = path[len(route_prefix) - 1 :]
209
+ raw = await backend.agrep_raw(pattern, search_path if search_path else "/", glob)
210
+ if isinstance(raw, str):
211
+ return raw
212
+ return [{**m, "path": f"{route_prefix[:-1]}{m['path']}"} for m in raw]
213
+
214
+ # Otherwise, search default and all routed backends and merge
215
+ all_matches: list[GrepMatch] = []
216
+ raw_default = await self.default.agrep_raw(pattern, path, glob) # type: ignore[attr-defined]
217
+ if isinstance(raw_default, str):
218
+ # This happens if error occurs
219
+ return raw_default
220
+ all_matches.extend(raw_default)
221
+
222
+ for route_prefix, backend in self.routes.items():
223
+ raw = await backend.agrep_raw(pattern, "/", glob)
224
+ if isinstance(raw, str):
225
+ # This happens if error occurs
226
+ return raw
227
+ all_matches.extend({**m, "path": f"{route_prefix[:-1]}{m['path']}"} for m in raw)
228
+
229
+ return all_matches
230
+
149
231
  def glob_info(self, pattern: str, path: str = "/") -> list[FileInfo]:
150
232
  results: list[FileInfo] = []
151
233
 
@@ -167,6 +249,28 @@ class CompositeBackend:
167
249
  results.sort(key=lambda x: x.get("path", ""))
168
250
  return results
169
251
 
252
+ async def aglob_info(self, pattern: str, path: str = "/") -> list[FileInfo]:
253
+ """Async version of glob_info."""
254
+ results: list[FileInfo] = []
255
+
256
+ # Route based on path, not pattern
257
+ for route_prefix, backend in self.sorted_routes:
258
+ if path.startswith(route_prefix.rstrip("/")):
259
+ search_path = path[len(route_prefix) - 1 :]
260
+ infos = await backend.aglob_info(pattern, search_path if search_path else "/")
261
+ return [{**fi, "path": f"{route_prefix[:-1]}{fi['path']}"} for fi in infos]
262
+
263
+ # Path doesn't match any specific route - search default backend AND all routed backends
264
+ results.extend(await self.default.aglob_info(pattern, path))
265
+
266
+ for route_prefix, backend in self.routes.items():
267
+ infos = await backend.aglob_info(pattern, "/")
268
+ results.extend({**fi, "path": f"{route_prefix[:-1]}{fi['path']}"} for fi in infos)
269
+
270
+ # Deterministic ordering
271
+ results.sort(key=lambda x: x.get("path", ""))
272
+ return results
273
+
170
274
  def write(
171
275
  self,
172
276
  file_path: str,
@@ -175,8 +279,10 @@ class CompositeBackend:
175
279
  """Create a new file, routing to appropriate backend.
176
280
 
177
281
  Args:
178
- file_path: Absolute file path
179
- content: File content as a stringReturns:
282
+ file_path: Absolute file path.
283
+ content: File content as a string.
284
+
285
+ Returns:
180
286
  Success message or Command object, or error if file already exists.
181
287
  """
182
288
  backend, stripped_key = self._get_backend_and_key(file_path)
@@ -194,6 +300,27 @@ class CompositeBackend:
194
300
  pass
195
301
  return res
196
302
 
303
+ async def awrite(
304
+ self,
305
+ file_path: str,
306
+ content: str,
307
+ ) -> WriteResult:
308
+ """Async version of write."""
309
+ backend, stripped_key = self._get_backend_and_key(file_path)
310
+ res = await backend.awrite(stripped_key, content)
311
+ # If this is a state-backed update and default has state, merge so listings reflect changes
312
+ if res.files_update:
313
+ try:
314
+ runtime = getattr(self.default, "runtime", None)
315
+ if runtime is not None:
316
+ state = runtime.state
317
+ files = state.get("files", {})
318
+ files.update(res.files_update)
319
+ state["files"] = files
320
+ except Exception:
321
+ pass
322
+ return res
323
+
197
324
  def edit(
198
325
  self,
199
326
  file_path: str,
@@ -204,10 +331,12 @@ class CompositeBackend:
204
331
  """Edit a file, routing to appropriate backend.
205
332
 
206
333
  Args:
207
- file_path: Absolute file path
208
- old_string: String to find and replace
209
- new_string: Replacement string
210
- replace_all: If True, replace all occurrencesReturns:
334
+ file_path: Absolute file path.
335
+ old_string: String to find and replace.
336
+ new_string: Replacement string.
337
+ replace_all: If True, replace all occurrences.
338
+
339
+ Returns:
211
340
  Success message or Command object, or error message on failure.
212
341
  """
213
342
  backend, stripped_key = self._get_backend_and_key(file_path)
@@ -224,6 +353,28 @@ class CompositeBackend:
224
353
  pass
225
354
  return res
226
355
 
356
+ async def aedit(
357
+ self,
358
+ file_path: str,
359
+ old_string: str,
360
+ new_string: str,
361
+ replace_all: bool = False,
362
+ ) -> EditResult:
363
+ """Async version of edit."""
364
+ backend, stripped_key = self._get_backend_and_key(file_path)
365
+ res = await backend.aedit(stripped_key, old_string, new_string, replace_all=replace_all)
366
+ if res.files_update:
367
+ try:
368
+ runtime = getattr(self.default, "runtime", None)
369
+ if runtime is not None:
370
+ state = runtime.state
371
+ files = state.get("files", {})
372
+ files.update(res.files_update)
373
+ state["files"] = files
374
+ except Exception:
375
+ pass
376
+ return res
377
+
227
378
  def execute(
228
379
  self,
229
380
  command: str,
@@ -252,6 +403,21 @@ class CompositeBackend:
252
403
  "To enable execution, provide a default backend that implements SandboxBackendProtocol."
253
404
  )
254
405
 
406
+ async def aexecute(
407
+ self,
408
+ command: str,
409
+ ) -> ExecuteResponse:
410
+ """Async version of execute."""
411
+ if isinstance(self.default, SandboxBackendProtocol):
412
+ return await self.default.aexecute(command)
413
+
414
+ # This shouldn't be reached if the runtime check in the execute tool works correctly,
415
+ # but we include it as a safety fallback.
416
+ raise NotImplementedError(
417
+ "Default backend doesn't support command execution (SandboxBackendProtocol). "
418
+ "To enable execution, provide a default backend that implements SandboxBackendProtocol."
419
+ )
420
+
255
421
  def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
256
422
  """Upload multiple files, batching by backend for efficiency.
257
423
 
@@ -295,6 +461,36 @@ class CompositeBackend:
295
461
 
296
462
  return results # type: ignore[return-value]
297
463
 
464
+ async def aupload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
465
+ """Async version of upload_files."""
466
+ # Pre-allocate result list
467
+ results: list[FileUploadResponse | None] = [None] * len(files)
468
+
469
+ # Group files by backend, tracking original indices
470
+ backend_batches: dict[BackendProtocol, list[tuple[int, str, bytes]]] = defaultdict(list)
471
+
472
+ for idx, (path, content) in enumerate(files):
473
+ backend, stripped_path = self._get_backend_and_key(path)
474
+ backend_batches[backend].append((idx, stripped_path, content))
475
+
476
+ # Process each backend's batch
477
+ for backend, batch in backend_batches.items():
478
+ # Extract data for backend call
479
+ indices, stripped_paths, contents = zip(*batch, strict=False)
480
+ batch_files = list(zip(stripped_paths, contents, strict=False))
481
+
482
+ # Call backend once with all its files
483
+ batch_responses = await backend.aupload_files(batch_files)
484
+
485
+ # Place responses at original indices with original paths
486
+ for i, orig_idx in enumerate(indices):
487
+ results[orig_idx] = FileUploadResponse(
488
+ path=files[orig_idx][0], # Original path
489
+ error=batch_responses[i].error if i < len(batch_responses) else None,
490
+ )
491
+
492
+ return results # type: ignore[return-value]
493
+
298
494
  def download_files(self, paths: list[str]) -> list[FileDownloadResponse]:
299
495
  """Download multiple files, batching by backend for efficiency.
300
496
 
@@ -334,3 +530,32 @@ class CompositeBackend:
334
530
  )
335
531
 
336
532
  return results # type: ignore[return-value]
533
+
534
+ async def adownload_files(self, paths: list[str]) -> list[FileDownloadResponse]:
535
+ """Async version of download_files."""
536
+ # Pre-allocate result list
537
+ results: list[FileDownloadResponse | None] = [None] * len(paths)
538
+
539
+ backend_batches: dict[BackendProtocol, list[tuple[int, str]]] = defaultdict(list)
540
+
541
+ for idx, path in enumerate(paths):
542
+ backend, stripped_path = self._get_backend_and_key(path)
543
+ backend_batches[backend].append((idx, stripped_path))
544
+
545
+ # Process each backend's batch
546
+ for backend, batch in backend_batches.items():
547
+ # Extract data for backend call
548
+ indices, stripped_paths = zip(*batch, strict=False)
549
+
550
+ # Call backend once with all its paths
551
+ batch_responses = await backend.adownload_files(list(stripped_paths))
552
+
553
+ # Place responses at original indices with original paths
554
+ for i, orig_idx in enumerate(indices):
555
+ results[orig_idx] = FileDownloadResponse(
556
+ path=paths[orig_idx], # Original path
557
+ content=batch_responses[i].content if i < len(batch_responses) else None,
558
+ error=batch_responses[i].error if i < len(batch_responses) else None,
559
+ )
560
+
561
+ return results # type: ignore[return-value]
@@ -202,9 +202,11 @@ class FilesystemBackend(BackendProtocol):
202
202
  """Read file content with line numbers.
203
203
 
204
204
  Args:
205
- file_path: Absolute or relative file path
206
- offset: Line offset to start reading from (0-indexed)
207
- limit: Maximum number of lines to readReturns:
205
+ file_path: Absolute or relative file path.
206
+ offset: Line offset to start reading from (0-indexed).
207
+ limit: Maximum number of lines to read.
208
+
209
+ Returns:
208
210
  Formatted file content with line numbers, or error message.
209
211
  """
210
212
  resolved_path = self._resolve_path(file_path)
@@ -303,8 +305,6 @@ class FilesystemBackend(BackendProtocol):
303
305
  except (OSError, UnicodeDecodeError, UnicodeEncodeError) as e:
304
306
  return EditResult(error=f"Error editing file '{file_path}': {e}")
305
307
 
306
- # Removed legacy grep() convenience to keep lean surface
307
-
308
308
  def grep_raw(
309
309
  self,
310
310
  pattern: str,
@@ -5,9 +5,11 @@ must follow. Backends can store files in different locations (state, filesystem,
5
5
  database, etc.) and provide a uniform interface for file operations.
6
6
  """
7
7
 
8
+ import abc
9
+ import asyncio
8
10
  from collections.abc import Callable
9
11
  from dataclasses import dataclass
10
- from typing import Any, Literal, NotRequired, Protocol, TypeAlias, runtime_checkable
12
+ from typing import Any, Literal, NotRequired, TypeAlias
11
13
 
12
14
  from langchain.tools import ToolRuntime
13
15
  from typing_extensions import TypedDict
@@ -156,8 +158,7 @@ class EditResult:
156
158
  occurrences: int | None = None
157
159
 
158
160
 
159
- @runtime_checkable
160
- class BackendProtocol(Protocol):
161
+ class BackendProtocol(abc.ABC):
161
162
  """Protocol for pluggable memory backends (single, unified).
162
163
 
163
164
  Backends can store files in different locations (state, filesystem, database, etc.)
@@ -172,8 +173,23 @@ class BackendProtocol(Protocol):
172
173
  """
173
174
 
174
175
  def ls_info(self, path: str) -> list["FileInfo"]:
175
- """Structured listing with file metadata."""
176
- ...
176
+ """List all files in a directory with metadata.
177
+
178
+ Args:
179
+ path: Absolute path to the directory to list. Must start with '/'.
180
+
181
+ Returns:
182
+ List of FileInfo dicts containing file metadata:
183
+
184
+ - `path` (required): Absolute file path
185
+ - `is_dir` (optional): True if directory
186
+ - `size` (optional): File size in bytes
187
+ - `modified_at` (optional): ISO 8601 timestamp
188
+ """
189
+
190
+ async def als_info(self, path: str) -> list["FileInfo"]:
191
+ """Async version of ls_info."""
192
+ return await asyncio.to_thread(self.ls_info, path)
177
193
 
178
194
  def read(
179
195
  self,
@@ -181,8 +197,35 @@ class BackendProtocol(Protocol):
181
197
  offset: int = 0,
182
198
  limit: int = 2000,
183
199
  ) -> str:
184
- """Read file content with line numbers or an error string."""
185
- ...
200
+ """Read file content with line numbers.
201
+
202
+ Args:
203
+ file_path: Absolute path to the file to read. Must start with '/'.
204
+ offset: Line number to start reading from (0-indexed). Default: 0.
205
+ limit: Maximum number of lines to read. Default: 2000.
206
+
207
+ Returns:
208
+ String containing file content formatted with line numbers (cat -n format),
209
+ starting at line 1. Lines longer than 2000 characters are truncated.
210
+
211
+ Returns an error string if the file doesn't exist or can't be read.
212
+
213
+ !!! note
214
+ - Use pagination (offset/limit) for large files to avoid context overflow
215
+ - First scan: `read(path, limit=100)` to see file structure
216
+ - Read more: `read(path, offset=100, limit=200)` for next section
217
+ - ALWAYS read a file before editing it
218
+ - If file exists but is empty, you'll receive a system reminder warning
219
+ """
220
+
221
+ async def aread(
222
+ self,
223
+ file_path: str,
224
+ offset: int = 0,
225
+ limit: int = 2000,
226
+ ) -> str:
227
+ """Async version of read."""
228
+ return await asyncio.to_thread(self.read, file_path, offset, limit)
186
229
 
187
230
  def grep_raw(
188
231
  self,
@@ -190,20 +233,94 @@ class BackendProtocol(Protocol):
190
233
  path: str | None = None,
191
234
  glob: str | None = None,
192
235
  ) -> list["GrepMatch"] | str:
193
- """Structured search results or error string for invalid input."""
194
- ...
236
+ """Search for a literal text pattern in files.
237
+
238
+ Args:
239
+ pattern: Literal string to search for (NOT regex).
240
+ Performs exact substring matching within file content.
241
+ Example: "TODO" matches any line containing "TODO"
242
+
243
+ path: Optional directory path to search in.
244
+ If None, searches in current working directory.
245
+ Example: "/workspace/src"
246
+
247
+ glob: Optional glob pattern to filter which FILES to search.
248
+ Filters by filename/path, not content.
249
+ Supports standard glob wildcards:
250
+ - `*` matches any characters in filename
251
+ - `**` matches any directories recursively
252
+ - `?` matches single character
253
+ - `[abc]` matches one character from set
254
+
255
+ Examples:
256
+ - "*.py" - only search Python files
257
+ - "**/*.txt" - search all .txt files recursively
258
+ - "src/**/*.js" - search JS files under src/
259
+ - "test[0-9].txt" - search test0.txt, test1.txt, etc.
260
+
261
+ Returns:
262
+ On success: list[GrepMatch] with structured results containing:
263
+ - path: Absolute file path
264
+ - line: Line number (1-indexed)
265
+ - text: Full line content containing the match
266
+
267
+ On error: str with error message (e.g., invalid path, permission denied)
268
+ """
269
+
270
+ async def agrep_raw(
271
+ self,
272
+ pattern: str,
273
+ path: str | None = None,
274
+ glob: str | None = None,
275
+ ) -> list["GrepMatch"] | str:
276
+ """Async version of grep_raw."""
277
+ return await asyncio.to_thread(self.grep_raw, pattern, path, glob)
195
278
 
196
279
  def glob_info(self, pattern: str, path: str = "/") -> list["FileInfo"]:
197
- """Structured glob matching returning FileInfo dicts."""
198
- ...
280
+ """Find files matching a glob pattern.
281
+
282
+ Args:
283
+ pattern: Glob pattern with wildcards to match file paths.
284
+ Supports standard glob syntax:
285
+ - `*` matches any characters within a filename/directory
286
+ - `**` matches any directories recursively
287
+ - `?` matches a single character
288
+ - `[abc]` matches one character from set
289
+
290
+ path: Base directory to search from. Default: "/" (root).
291
+ The pattern is applied relative to this path.
292
+
293
+ Returns:
294
+ list of FileInfo
295
+ """
296
+
297
+ async def aglob_info(self, pattern: str, path: str = "/") -> list["FileInfo"]:
298
+ """Async version of glob_info."""
299
+ return await asyncio.to_thread(self.glob_info, pattern, path)
199
300
 
200
301
  def write(
201
302
  self,
202
303
  file_path: str,
203
304
  content: str,
204
305
  ) -> WriteResult:
205
- """Create a new file. Returns WriteResult; error populated on failure."""
206
- ...
306
+ """Write content to a new file in the filesystem, error if file exists.
307
+
308
+ Args:
309
+ file_path: Absolute path where the file should be created.
310
+ Must start with '/'.
311
+ content: String content to write to the file.
312
+
313
+ Returns:
314
+ WriteResult
315
+ """
316
+
317
+ async def awrite(
318
+ self,
319
+ file_path: str,
320
+ content: str,
321
+ ) -> WriteResult:
322
+ """Async version of write."""
323
+ return await asyncio.to_thread(self.write, file_path, content)
207
324
 
208
325
  def edit(
209
326
  self,
@@ -212,8 +329,30 @@ class BackendProtocol(Protocol):
212
329
  new_string: str,
213
330
  replace_all: bool = False,
214
331
  ) -> EditResult:
215
- """Edit a file by replacing string occurrences. Returns EditResult."""
216
- ...
332
+ """Perform exact string replacements in an existing file.
333
+
334
+ Args:
335
+ file_path: Absolute path to the file to edit. Must start with '/'.
336
+ old_string: Exact string to search for and replace.
337
+ Must match exactly including whitespace and indentation.
338
+ new_string: String to replace old_string with.
339
+ Must be different from old_string.
340
+ replace_all: If True, replace all occurrences. If False (default),
341
+ old_string must be unique in the file or the edit fails.
342
+
343
+ Returns:
344
+ EditResult
345
+ """
346
+
347
+ async def aedit(
348
+ self,
349
+ file_path: str,
350
+ old_string: str,
351
+ new_string: str,
352
+ replace_all: bool = False,
353
+ ) -> EditResult:
354
+ """Async version of edit."""
355
+ return await asyncio.to_thread(self.edit, file_path, old_string, new_string, replace_all)
217
356
 
218
357
  def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
219
358
  """Upload multiple files to the sandbox.
@@ -239,7 +378,10 @@ class BackendProtocol(Protocol):
239
378
  )
240
379
  ```
241
380
  """
242
- ...
381
+
382
+ async def aupload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
383
+ """Async version of upload_files."""
384
+ return await asyncio.to_thread(self.upload_files, files)
243
385
 
244
386
  def download_files(self, paths: list[str]) -> list[FileDownloadResponse]:
245
387
  """Download multiple files from the sandbox.
@@ -255,7 +397,10 @@ class BackendProtocol(Protocol):
255
397
  Response order matches input order (response[i] for paths[i]).
256
398
  Check the error field to determine success/failure per file.
257
399
  """
258
- ...
400
+
401
+ async def adownload_files(self, paths: list[str]) -> list[FileDownloadResponse]:
402
+ """Async version of download_files."""
403
+ return await asyncio.to_thread(self.download_files, paths)
259
404
 
260
405
 
261
406
  @dataclass
@@ -275,8 +420,7 @@ class ExecuteResponse:
275
420
  """Whether the output was truncated due to backend limitations."""
276
421
 
277
422
 
278
- @runtime_checkable
279
- class SandboxBackendProtocol(BackendProtocol, Protocol):
423
+ class SandboxBackendProtocol(BackendProtocol):
280
424
  """Protocol for sandboxed backends with isolated runtime.
281
425
 
282
426
  Sandboxed backends run in isolated environments (e.g., separate processes,
@@ -297,12 +441,17 @@ class SandboxBackendProtocol(BackendProtocol, Protocol):
297
441
  Returns:
298
442
  ExecuteResponse with combined output, exit code, optional signal, and truncation flag.
299
443
  """
300
- ...
444
+
445
+ async def aexecute(
446
+ self,
447
+ command: str,
448
+ ) -> ExecuteResponse:
449
+ """Async version of execute."""
450
+ return await asyncio.to_thread(self.execute, command)
301
451
 
302
452
  @property
303
453
  def id(self) -> str:
304
454
  """Unique identifier for the sandbox backend instance."""
305
- ...
306
455
 
307
456
 
308
457
  BackendFactory: TypeAlias = Callable[[ToolRuntime], BackendProtocol]
@@ -9,6 +9,7 @@ from __future__ import annotations
9
9
 
10
10
  import base64
11
11
  import json
12
+ import shlex
12
13
  from abc import ABC, abstractmethod
13
14
 
14
15
  from deepagents.backends.protocol import (
@@ -272,10 +273,10 @@ except PermissionError:
272
273
  glob: str | None = None,
273
274
  ) -> list[GrepMatch] | str:
274
275
  """Structured search results or error string for invalid input."""
275
- search_path = path or "."
276
+ search_path = shlex.quote(path or ".")
276
277
 
277
278
  # Build grep command to get structured output
278
- grep_opts = "-rHn" # recursive, with filename, with line number
279
+ grep_opts = "-rHnF" # recursive, with filename, with line number, fixed-strings (literal)
279
280
 
280
281
  # Add glob pattern if specified
281
282
  glob_pattern = ""
@@ -283,9 +284,9 @@ except PermissionError:
283
284
  glob_pattern = f"--include='{glob}'"
284
285
 
285
286
  # Escape pattern for shell
286
- pattern_escaped = pattern.replace("'", "'\\\\''")
287
+ pattern_escaped = shlex.quote(pattern)
287
288
 
288
- cmd = f"grep {grep_opts} {glob_pattern} -e '{pattern_escaped}' '{search_path}' 2>/dev/null || true"
289
+ cmd = f"grep {grep_opts} {glob_pattern} -e {pattern_escaped} {search_path} 2>/dev/null || true"
289
290
  result = self.execute(cmd)
290
291
 
291
292
  output = result.output.rstrip()
@@ -99,9 +99,11 @@ class StateBackend(BackendProtocol):
99
99
  """Read file content with line numbers.
100
100
 
101
101
  Args:
102
- file_path: Absolute file path
103
- offset: Line offset to start reading from (0-indexed)
104
- limit: Maximum number of lines to readReturns:
102
+ file_path: Absolute file path.
103
+ offset: Line offset to start reading from (0-indexed).
104
+ limit: Maximum number of lines to read.
105
+
106
+ Returns:
105
107
  Formatted file content with line numbers, or error message.
106
108
  """
107
109
  files = self.runtime.state.get("files", {})
@@ -38,17 +38,18 @@ class StoreBackend(BackendProtocol):
38
38
  """Initialize StoreBackend with runtime.
39
39
 
40
40
  Args:
41
+ runtime: The ToolRuntime instance providing store access and configuration.
41
42
  """
42
43
  self.runtime = runtime
43
44
 
44
45
  def _get_store(self) -> BaseStore:
45
46
  """Get the store instance.
46
47
 
47
- Args:Returns:
48
- BaseStore instance
48
+ Returns:
49
+ BaseStore instance from the runtime.
49
50
 
50
51
  Raises:
51
- ValueError: If no store is available or runtime not provided
52
+ ValueError: If no store is available in the runtime.
52
53
  """
53
54
  store = self.runtime.store
54
55
  if store is None:
@@ -257,8 +258,9 @@ class StoreBackend(BackendProtocol):
257
258
  """Read file content with line numbers.
258
259
 
259
260
  Args:
260
- file_path: Absolute file path
261
- offset: Line offset to start reading from (0-indexed)limit: Maximum number of lines to read
261
+ file_path: Absolute file path.
262
+ offset: Line offset to start reading from (0-indexed).
263
+ limit: Maximum number of lines to read.
262
264
 
263
265
  Returns:
264
266
  Formatted file content with line numbers, or error message.
@@ -98,6 +98,18 @@ def create_deep_agent(
98
98
  if model is None:
99
99
  model = get_default_model()
100
100
 
101
+ if (
102
+ model.profile is not None
103
+ and isinstance(model.profile, dict)
104
+ and "max_input_tokens" in model.profile
105
+ and isinstance(model.profile["max_input_tokens"], int)
106
+ ):
107
+ trigger = ("fraction", 0.85)
108
+ keep = ("fraction", 0.10)
109
+ else:
110
+ trigger = ("tokens", 170000)
111
+ keep = ("messages", 6)
112
+
101
113
  deepagent_middleware = [
102
114
  TodoListMiddleware(),
103
115
  FilesystemMiddleware(backend=backend),
@@ -110,8 +122,9 @@ def create_deep_agent(
110
122
  FilesystemMiddleware(backend=backend),
111
123
  SummarizationMiddleware(
112
124
  model=model,
113
- max_tokens_before_summary=170000,
114
- messages_to_keep=6,
125
+ trigger=trigger,
126
+ keep=keep,
127
+ trim_tokens_to_summarize=None,
115
128
  ),
116
129
  AnthropicPromptCachingMiddleware(unsupported_model_behavior="ignore"),
117
130
  PatchToolCallsMiddleware(),
@@ -121,8 +134,9 @@ def create_deep_agent(
121
134
  ),
122
135
  SummarizationMiddleware(
123
136
  model=model,
124
- max_tokens_before_summary=170000,
125
- messages_to_keep=6,
137
+ trigger=trigger,
138
+ keep=keep,
139
+ trim_tokens_to_summarize=None,
126
140
  ),
127
141
  AnthropicPromptCachingMiddleware(unsupported_model_behavior="ignore"),
128
142
  PatchToolCallsMiddleware(),
@@ -15,7 +15,7 @@ from langchain.agents.middleware.types import (
15
15
  from langchain.tools import ToolRuntime
16
16
  from langchain.tools.tool_node import ToolCallRequest
17
17
  from langchain_core.messages import ToolMessage
18
- from langchain_core.tools import BaseTool, tool
18
+ from langchain_core.tools import BaseTool, StructuredTool
19
19
  from langgraph.types import Command
20
20
  from typing_extensions import TypedDict
21
21
 
@@ -325,8 +325,8 @@ def _ls_tool_generator(
325
325
  """
326
326
  tool_description = custom_description or LIST_FILES_TOOL_DESCRIPTION
327
327
 
328
- @tool(description=tool_description)
329
- def ls(runtime: ToolRuntime[None, FilesystemState], path: str) -> str:
328
+ def sync_ls(runtime: ToolRuntime[None, FilesystemState], path: str) -> str:
329
+ """Synchronous wrapper for ls tool."""
330
330
  resolved_backend = _get_backend(backend, runtime)
331
331
  validated_path = _validate_path(path)
332
332
  infos = resolved_backend.ls_info(validated_path)
@@ -334,7 +334,21 @@ def _ls_tool_generator(
334
334
  result = truncate_if_too_long(paths)
335
335
  return str(result)
336
336
 
337
- return ls
337
+ async def async_ls(runtime: ToolRuntime[None, FilesystemState], path: str) -> str:
338
+ """Asynchronous wrapper for ls tool."""
339
+ resolved_backend = _get_backend(backend, runtime)
340
+ validated_path = _validate_path(path)
341
+ infos = await resolved_backend.als_info(validated_path)
342
+ paths = [fi.get("path", "") for fi in infos]
343
+ result = truncate_if_too_long(paths)
344
+ return str(result)
345
+
346
+ return StructuredTool.from_function(
347
+ name="ls",
348
+ description=tool_description,
349
+ func=sync_ls,
350
+ coroutine=async_ls,
351
+ )
338
352
 
339
353
 
340
354
  def _read_file_tool_generator(
@@ -352,18 +366,34 @@ def _read_file_tool_generator(
352
366
  """
353
367
  tool_description = custom_description or READ_FILE_TOOL_DESCRIPTION
354
368
 
355
- @tool(description=tool_description)
356
- def read_file(
369
+ def sync_read_file(
357
370
  file_path: str,
358
371
  runtime: ToolRuntime[None, FilesystemState],
359
372
  offset: int = DEFAULT_READ_OFFSET,
360
373
  limit: int = DEFAULT_READ_LIMIT,
361
374
  ) -> str:
375
+ """Synchronous wrapper for read_file tool."""
362
376
  resolved_backend = _get_backend(backend, runtime)
363
377
  file_path = _validate_path(file_path)
364
378
  return resolved_backend.read(file_path, offset=offset, limit=limit)
365
379
 
366
- return read_file
380
+ async def async_read_file(
381
+ file_path: str,
382
+ runtime: ToolRuntime[None, FilesystemState],
383
+ offset: int = DEFAULT_READ_OFFSET,
384
+ limit: int = DEFAULT_READ_LIMIT,
385
+ ) -> str:
386
+ """Asynchronous wrapper for read_file tool."""
387
+ resolved_backend = _get_backend(backend, runtime)
388
+ file_path = _validate_path(file_path)
389
+ return await resolved_backend.aread(file_path, offset=offset, limit=limit)
390
+
391
+ return StructuredTool.from_function(
392
+ name="read_file",
393
+ description=tool_description,
394
+ func=sync_read_file,
395
+ coroutine=async_read_file,
396
+ )
367
397
 
368
398
 
369
399
  def _write_file_tool_generator(
@@ -381,12 +411,12 @@ def _write_file_tool_generator(
381
411
  """
382
412
  tool_description = custom_description or WRITE_FILE_TOOL_DESCRIPTION
383
413
 
384
- @tool(description=tool_description)
385
- def write_file(
414
+ def sync_write_file(
386
415
  file_path: str,
387
416
  content: str,
388
417
  runtime: ToolRuntime[None, FilesystemState],
389
418
  ) -> Command | str:
419
+ """Synchronous wrapper for write_file tool."""
390
420
  resolved_backend = _get_backend(backend, runtime)
391
421
  file_path = _validate_path(file_path)
392
422
  res: WriteResult = resolved_backend.write(file_path, content)
@@ -407,7 +437,38 @@ def _write_file_tool_generator(
407
437
  )
408
438
  return f"Updated file {res.path}"
409
439
 
410
- return write_file
440
+ async def async_write_file(
441
+ file_path: str,
442
+ content: str,
443
+ runtime: ToolRuntime[None, FilesystemState],
444
+ ) -> Command | str:
445
+ """Asynchronous wrapper for write_file tool."""
446
+ resolved_backend = _get_backend(backend, runtime)
447
+ file_path = _validate_path(file_path)
448
+ res: WriteResult = await resolved_backend.awrite(file_path, content)
449
+ if res.error:
450
+ return res.error
451
+ # If backend returns state update, wrap into Command with ToolMessage
452
+ if res.files_update is not None:
453
+ return Command(
454
+ update={
455
+ "files": res.files_update,
456
+ "messages": [
457
+ ToolMessage(
458
+ content=f"Updated file {res.path}",
459
+ tool_call_id=runtime.tool_call_id,
460
+ )
461
+ ],
462
+ }
463
+ )
464
+ return f"Updated file {res.path}"
465
+
466
+ return StructuredTool.from_function(
467
+ name="write_file",
468
+ description=tool_description,
469
+ func=sync_write_file,
470
+ coroutine=async_write_file,
471
+ )
411
472
 
412
473
 
413
474
  def _edit_file_tool_generator(
@@ -425,8 +486,7 @@ def _edit_file_tool_generator(
425
486
  """
426
487
  tool_description = custom_description or EDIT_FILE_TOOL_DESCRIPTION
427
488
 
428
- @tool(description=tool_description)
429
- def edit_file(
489
+ def sync_edit_file(
430
490
  file_path: str,
431
491
  old_string: str,
432
492
  new_string: str,
@@ -434,6 +494,7 @@ def _edit_file_tool_generator(
434
494
  *,
435
495
  replace_all: bool = False,
436
496
  ) -> Command | str:
497
+ """Synchronous wrapper for edit_file tool."""
437
498
  resolved_backend = _get_backend(backend, runtime)
438
499
  file_path = _validate_path(file_path)
439
500
  res: EditResult = resolved_backend.edit(file_path, old_string, new_string, replace_all=replace_all)
@@ -453,7 +514,40 @@ def _edit_file_tool_generator(
453
514
  )
454
515
  return f"Successfully replaced {res.occurrences} instance(s) of the string in '{res.path}'"
455
516
 
456
- return edit_file
517
+ async def async_edit_file(
518
+ file_path: str,
519
+ old_string: str,
520
+ new_string: str,
521
+ runtime: ToolRuntime[None, FilesystemState],
522
+ *,
523
+ replace_all: bool = False,
524
+ ) -> Command | str:
525
+ """Asynchronous wrapper for edit_file tool."""
526
+ resolved_backend = _get_backend(backend, runtime)
527
+ file_path = _validate_path(file_path)
528
+ res: EditResult = await resolved_backend.aedit(file_path, old_string, new_string, replace_all=replace_all)
529
+ if res.error:
530
+ return res.error
531
+ if res.files_update is not None:
532
+ return Command(
533
+ update={
534
+ "files": res.files_update,
535
+ "messages": [
536
+ ToolMessage(
537
+ content=f"Successfully replaced {res.occurrences} instance(s) of the string in '{res.path}'",
538
+ tool_call_id=runtime.tool_call_id,
539
+ )
540
+ ],
541
+ }
542
+ )
543
+ return f"Successfully replaced {res.occurrences} instance(s) of the string in '{res.path}'"
544
+
545
+ return StructuredTool.from_function(
546
+ name="edit_file",
547
+ description=tool_description,
548
+ func=sync_edit_file,
549
+ coroutine=async_edit_file,
550
+ )
457
551
 
458
552
 
459
553
  def _glob_tool_generator(
@@ -471,15 +565,28 @@ def _glob_tool_generator(
471
565
  """
472
566
  tool_description = custom_description or GLOB_TOOL_DESCRIPTION
473
567
 
474
- @tool(description=tool_description)
475
- def glob(pattern: str, runtime: ToolRuntime[None, FilesystemState], path: str = "/") -> str:
568
+ def sync_glob(pattern: str, runtime: ToolRuntime[None, FilesystemState], path: str = "/") -> str:
569
+ """Synchronous wrapper for glob tool."""
476
570
  resolved_backend = _get_backend(backend, runtime)
477
571
  infos = resolved_backend.glob_info(pattern, path=path)
478
572
  paths = [fi.get("path", "") for fi in infos]
479
573
  result = truncate_if_too_long(paths)
480
574
  return str(result)
481
575
 
482
- return glob
576
+ async def async_glob(pattern: str, runtime: ToolRuntime[None, FilesystemState], path: str = "/") -> str:
577
+ """Asynchronous wrapper for glob tool."""
578
+ resolved_backend = _get_backend(backend, runtime)
579
+ infos = await resolved_backend.aglob_info(pattern, path=path)
580
+ paths = [fi.get("path", "") for fi in infos]
581
+ result = truncate_if_too_long(paths)
582
+ return str(result)
583
+
584
+ return StructuredTool.from_function(
585
+ name="glob",
586
+ description=tool_description,
587
+ func=sync_glob,
588
+ coroutine=async_glob,
589
+ )
483
590
 
484
591
 
485
592
  def _grep_tool_generator(
@@ -497,14 +604,14 @@ def _grep_tool_generator(
497
604
  """
498
605
  tool_description = custom_description or GREP_TOOL_DESCRIPTION
499
606
 
500
- @tool(description=tool_description)
501
- def grep(
607
+ def sync_grep(
502
608
  pattern: str,
503
609
  runtime: ToolRuntime[None, FilesystemState],
504
610
  path: str | None = None,
505
611
  glob: str | None = None,
506
612
  output_mode: Literal["files_with_matches", "content", "count"] = "files_with_matches",
507
613
  ) -> str:
614
+ """Synchronous wrapper for grep tool."""
508
615
  resolved_backend = _get_backend(backend, runtime)
509
616
  raw = resolved_backend.grep_raw(pattern, path=path, glob=glob)
510
617
  if isinstance(raw, str):
@@ -512,7 +619,27 @@ def _grep_tool_generator(
512
619
  formatted = format_grep_matches(raw, output_mode)
513
620
  return truncate_if_too_long(formatted) # type: ignore[arg-type]
514
621
 
515
- return grep
622
+ async def async_grep(
623
+ pattern: str,
624
+ runtime: ToolRuntime[None, FilesystemState],
625
+ path: str | None = None,
626
+ glob: str | None = None,
627
+ output_mode: Literal["files_with_matches", "content", "count"] = "files_with_matches",
628
+ ) -> str:
629
+ """Asynchronous wrapper for grep tool."""
630
+ resolved_backend = _get_backend(backend, runtime)
631
+ raw = await resolved_backend.agrep_raw(pattern, path=path, glob=glob)
632
+ if isinstance(raw, str):
633
+ return raw
634
+ formatted = format_grep_matches(raw, output_mode)
635
+ return truncate_if_too_long(formatted) # type: ignore[arg-type]
636
+
637
+ return StructuredTool.from_function(
638
+ name="grep",
639
+ description=tool_description,
640
+ func=sync_grep,
641
+ coroutine=async_grep,
642
+ )
516
643
 
517
644
 
518
645
  def _supports_execution(backend: BackendProtocol) -> bool:
@@ -553,11 +680,11 @@ def _execute_tool_generator(
553
680
  """
554
681
  tool_description = custom_description or EXECUTE_TOOL_DESCRIPTION
555
682
 
556
- @tool(description=tool_description)
557
- def execute(
683
+ def sync_execute(
558
684
  command: str,
559
685
  runtime: ToolRuntime[None, FilesystemState],
560
686
  ) -> str:
687
+ """Synchronous wrapper for execute tool."""
561
688
  resolved_backend = _get_backend(backend, runtime)
562
689
 
563
690
  # Runtime check - fail gracefully if not supported
@@ -586,7 +713,45 @@ def _execute_tool_generator(
586
713
 
587
714
  return "".join(parts)
588
715
 
589
- return execute
716
+ async def async_execute(
717
+ command: str,
718
+ runtime: ToolRuntime[None, FilesystemState],
719
+ ) -> str:
720
+ """Asynchronous wrapper for execute tool."""
721
+ resolved_backend = _get_backend(backend, runtime)
722
+
723
+ # Runtime check - fail gracefully if not supported
724
+ if not _supports_execution(resolved_backend):
725
+ return (
726
+ "Error: Execution not available. This agent's backend "
727
+ "does not support command execution (SandboxBackendProtocol). "
728
+ "To use the execute tool, provide a backend that implements SandboxBackendProtocol."
729
+ )
730
+
731
+ try:
732
+ result = await resolved_backend.aexecute(command)
733
+ except NotImplementedError as e:
734
+ # Handle case where execute() exists but raises NotImplementedError
735
+ return f"Error: Execution not available. {e}"
736
+
737
+ # Format output for LLM consumption
738
+ parts = [result.output]
739
+
740
+ if result.exit_code is not None:
741
+ status = "succeeded" if result.exit_code == 0 else "failed"
742
+ parts.append(f"\n[Command {status} with exit code {result.exit_code}]")
743
+
744
+ if result.truncated:
745
+ parts.append("\n[Output was truncated due to size limits]")
746
+
747
+ return "".join(parts)
748
+
749
+ return StructuredTool.from_function(
750
+ name="execute",
751
+ description=tool_description,
752
+ func=sync_execute,
753
+ coroutine=async_execute,
754
+ )
590
755
 
591
756
 
592
757
  TOOL_GENERATORS = {
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: deepagents
3
- Version: 0.2.8
3
+ Version: 0.3.0
4
4
  Summary: General purpose 'deep agent' with sub-agent spawning, todo list capabilities, and mock file system. Built on LangGraph.
5
5
  License: MIT
6
6
  Project-URL: Homepage, https://docs.langchain.com/oss/python/deepagents/overview
@@ -11,9 +11,9 @@ Project-URL: Slack, https://www.langchain.com/join-community
11
11
  Project-URL: Reddit, https://www.reddit.com/r/LangChain/
12
12
  Requires-Python: <4.0,>=3.11
13
13
  Description-Content-Type: text/markdown
14
- Requires-Dist: langchain-anthropic<2.0.0,>=1.0.0
15
- Requires-Dist: langchain<2.0.0,>=1.0.2
16
- Requires-Dist: langchain-core<2.0.0,>=1.0.0
14
+ Requires-Dist: langchain-anthropic<2.0.0,>=1.2.0
15
+ Requires-Dist: langchain<2.0.0,>=1.1.0
16
+ Requires-Dist: langchain-core<2.0.0,>=1.1.0
17
17
  Requires-Dist: wcmatch
18
18
 
19
19
  # 🧠🤖Deep Agents
@@ -0,0 +1,4 @@
1
+ langchain-anthropic<2.0.0,>=1.2.0
2
+ langchain<2.0.0,>=1.1.0
3
+ langchain-core<2.0.0,>=1.1.0
4
+ wcmatch
@@ -1,14 +1,14 @@
1
1
  [project]
2
2
  name = "deepagents"
3
- version = "0.2.8"
3
+ version = "0.3.0"
4
4
  description = "General purpose 'deep agent' with sub-agent spawning, todo list capabilities, and mock file system. Built on LangGraph."
5
5
  readme = "README.md"
6
6
  license = { text = "MIT" }
7
7
  requires-python = ">=3.11,<4.0"
8
8
  dependencies = [
9
- "langchain-anthropic>=1.0.0,<2.0.0",
10
- "langchain>=1.0.2,<2.0.0",
11
- "langchain-core>=1.0.0,<2.0.0",
9
+ "langchain-anthropic>=1.2.0,<2.0.0",
10
+ "langchain>=1.1.0,<2.0.0",
11
+ "langchain-core>=1.1.0,<2.0.0",
12
12
  "wcmatch",
13
13
  ]
14
14
 
@@ -29,6 +29,7 @@ test = [
29
29
  "pytest-xdist",
30
30
  "ruff>=0.12.2,<0.13.0",
31
31
  "mypy>=1.18.1,<1.19.0",
32
+ "pytest-asyncio>=1.3.0",
32
33
  ]
33
34
 
34
35
  dev = [
@@ -93,3 +94,6 @@ enable_error_code = ["deprecated"]
93
94
  # Optional: reduce strictness if needed
94
95
  disallow_any_generics = false
95
96
  warn_return_any = false
97
+
98
+ [tool.pytest.ini_options]
99
+ asyncio_mode = "auto"
@@ -1,4 +0,0 @@
1
- langchain-anthropic<2.0.0,>=1.0.0
2
- langchain<2.0.0,>=1.0.2
3
- langchain-core<2.0.0,>=1.0.0
4
- wcmatch
File without changes
File without changes