hdsp-jupyter-extension 2.0.7__py3-none-any.whl → 2.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. agent_server/core/embedding_service.py +67 -46
  2. agent_server/core/rag_manager.py +31 -17
  3. agent_server/core/retriever.py +13 -8
  4. agent_server/core/vllm_embedding_service.py +243 -0
  5. agent_server/langchain/agent.py +8 -0
  6. agent_server/langchain/custom_middleware.py +58 -31
  7. agent_server/langchain/hitl_config.py +6 -1
  8. agent_server/langchain/logging_utils.py +53 -14
  9. agent_server/langchain/prompts.py +47 -16
  10. agent_server/langchain/tools/__init__.py +13 -0
  11. agent_server/langchain/tools/file_tools.py +285 -7
  12. agent_server/langchain/tools/file_utils.py +334 -0
  13. agent_server/langchain/tools/lsp_tools.py +264 -0
  14. agent_server/main.py +7 -0
  15. agent_server/routers/langchain_agent.py +115 -19
  16. agent_server/routers/rag.py +8 -3
  17. hdsp_agent_core/models/rag.py +15 -1
  18. hdsp_agent_core/services/rag_service.py +6 -1
  19. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/build_log.json +1 -1
  20. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/package.json +3 -2
  21. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js → hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.8740a527757068814573.js +160 -3
  22. hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.8740a527757068814573.js.map +1 -0
  23. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.29cf4312af19e86f82af.js → hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.e4ff4b5779b5e049f84c.js +1759 -221
  24. hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.e4ff4b5779b5e049f84c.js.map +1 -0
  25. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.61343eb4cf0577e74b50.js → hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.020cdb0b864cfaa4e41e.js +14 -12
  26. hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.020cdb0b864cfaa4e41e.js.map +1 -0
  27. jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js → hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js +2 -209
  28. hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js.map +1 -0
  29. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js → hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js +209 -2
  30. hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js.map +1 -0
  31. jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js → hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js +212 -3
  32. hdsp_jupyter_extension-2.0.8.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js.map +1 -0
  33. {hdsp_jupyter_extension-2.0.7.dist-info → hdsp_jupyter_extension-2.0.8.dist-info}/METADATA +1 -1
  34. {hdsp_jupyter_extension-2.0.7.dist-info → hdsp_jupyter_extension-2.0.8.dist-info}/RECORD +66 -63
  35. jupyter_ext/__init__.py +18 -0
  36. jupyter_ext/_version.py +1 -1
  37. jupyter_ext/handlers.py +176 -1
  38. jupyter_ext/labextension/build_log.json +1 -1
  39. jupyter_ext/labextension/package.json +3 -2
  40. jupyter_ext/labextension/static/{frontend_styles_index_js.4770ec0fb2d173b6deb4.js → frontend_styles_index_js.8740a527757068814573.js} +160 -3
  41. jupyter_ext/labextension/static/frontend_styles_index_js.8740a527757068814573.js.map +1 -0
  42. jupyter_ext/labextension/static/{lib_index_js.29cf4312af19e86f82af.js → lib_index_js.e4ff4b5779b5e049f84c.js} +1759 -221
  43. jupyter_ext/labextension/static/lib_index_js.e4ff4b5779b5e049f84c.js.map +1 -0
  44. jupyter_ext/labextension/static/{remoteEntry.61343eb4cf0577e74b50.js → remoteEntry.020cdb0b864cfaa4e41e.js} +14 -12
  45. jupyter_ext/labextension/static/remoteEntry.020cdb0b864cfaa4e41e.js.map +1 -0
  46. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js → jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js +2 -209
  47. jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js.map +1 -0
  48. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js → jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js +209 -2
  49. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js.map +1 -0
  50. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js → jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js +212 -3
  51. jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js.map +1 -0
  52. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js.map +0 -1
  53. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.29cf4312af19e86f82af.js.map +0 -1
  54. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.61343eb4cf0577e74b50.js.map +0 -1
  55. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js.map +0 -1
  56. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js.map +0 -1
  57. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js.map +0 -1
  58. jupyter_ext/labextension/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js.map +0 -1
  59. jupyter_ext/labextension/static/lib_index_js.29cf4312af19e86f82af.js.map +0 -1
  60. jupyter_ext/labextension/static/remoteEntry.61343eb4cf0577e74b50.js.map +0 -1
  61. jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js.map +0 -1
  62. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js.map +0 -1
  63. jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js.map +0 -1
  64. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/etc/jupyter/jupyter_server_config.d/hdsp_jupyter_extension.json +0 -0
  65. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/install.json +0 -0
  66. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js +0 -0
  67. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js.map +0 -0
  68. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js +0 -0
  69. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js.map +0 -0
  70. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/static/style.js +0 -0
  71. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js +0 -0
  72. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js.map +0 -0
  73. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js +0 -0
  74. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js.map +0 -0
  75. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js +0 -0
  76. {hdsp_jupyter_extension-2.0.7.data → hdsp_jupyter_extension-2.0.8.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js.map +0 -0
  77. {hdsp_jupyter_extension-2.0.7.dist-info → hdsp_jupyter_extension-2.0.8.dist-info}/WHEEL +0 -0
  78. {hdsp_jupyter_extension-2.0.7.dist-info → hdsp_jupyter_extension-2.0.8.dist-info}/licenses/LICENSE +0 -0
@@ -4,24 +4,33 @@ File Tools for LangChain Agent
4
4
  Provides tools for file system operations:
5
5
  - read_file: Read file content
6
6
  - write_file: Write content to file (requires approval)
7
+ - edit_file: Edit file with string replacement (requires approval)
7
8
  - list_files: List directory contents
8
9
  """
9
10
 
10
11
  import os
11
- from typing import Any, Dict, Optional
12
+ from typing import Any, Dict, List, Optional
12
13
 
13
14
  from langchain_core.tools import tool
14
15
  from pydantic import BaseModel, Field
15
16
 
17
+ # Default constants for file reading (aligned with DeepAgents best practices)
18
+ DEFAULT_READ_LIMIT = 500 # Conservative default to prevent context overflow
19
+ DEFAULT_READ_OFFSET = 0
20
+
16
21
 
17
22
  class ReadFileInput(BaseModel):
18
23
  """Input schema for read_file tool"""
19
24
 
20
25
  path: str = Field(description="Relative path to the file to read")
21
26
  encoding: str = Field(default="utf-8", description="File encoding")
22
- max_lines: Optional[int] = Field(
23
- default=None,
24
- description="Maximum number of lines to read",
27
+ offset: int = Field(
28
+ default=DEFAULT_READ_OFFSET,
29
+ description="Line offset to start reading from (0-indexed). Use for pagination.",
30
+ )
31
+ limit: int = Field(
32
+ default=DEFAULT_READ_LIMIT,
33
+ description="Maximum number of lines to read (default: 500). Use pagination for large files.",
25
34
  )
26
35
  execution_result: Optional[Dict[str, Any]] = Field(
27
36
  default=None,
@@ -45,6 +54,22 @@ class WriteFileInput(BaseModel):
45
54
  )
46
55
 
47
56
 
57
+ class EditFileInput(BaseModel):
58
+ """Input schema for edit_file tool"""
59
+
60
+ path: str = Field(description="Relative path to the file to edit")
61
+ old_string: str = Field(description="The exact string to find and replace")
62
+ new_string: str = Field(description="The replacement string")
63
+ replace_all: bool = Field(
64
+ default=False,
65
+ description="Whether to replace all occurrences (default: false, requires unique match)",
66
+ )
67
+ execution_result: Optional[Dict[str, Any]] = Field(
68
+ default=None,
69
+ description="Optional execution result payload from the client",
70
+ )
71
+
72
+
48
73
  class ListFilesInput(BaseModel):
49
74
  """Input schema for list_files tool"""
50
75
 
@@ -93,19 +118,27 @@ def _validate_path(path: str, workspace_root: str = ".") -> str:
93
118
  def read_file_tool(
94
119
  path: str,
95
120
  encoding: str = "utf-8",
96
- max_lines: Optional[int] = None,
121
+ offset: int = DEFAULT_READ_OFFSET,
122
+ limit: int = DEFAULT_READ_LIMIT,
97
123
  execution_result: Optional[Dict[str, Any]] = None,
98
124
  workspace_root: str = ".",
99
125
  ) -> Dict[str, Any]:
100
126
  """
101
- Read content from a file.
127
+ Read content from a file with pagination support.
102
128
 
103
129
  Only relative paths within the workspace are allowed.
104
130
  Absolute paths and parent directory traversal (..) are blocked.
105
131
 
132
+ **IMPORTANT for large files**: Use pagination with offset and limit to avoid context overflow.
133
+ - First scan: read_file(path, limit=100) to see file structure
134
+ - Read more: read_file(path, offset=100, limit=200) for next 200 lines
135
+ - Only omit limit when necessary for immediate editing
136
+
106
137
  Args:
107
138
  path: Relative path to the file
108
139
  encoding: File encoding (default: utf-8)
140
+ offset: Line offset to start reading from (0-indexed)
141
+ limit: Maximum number of lines to read (default: 500)
109
142
 
110
143
  Returns:
111
144
  Dict with file content or error
@@ -130,7 +163,8 @@ def read_file_tool(
130
163
  "parameters": {
131
164
  "path": path,
132
165
  "encoding": encoding,
133
- "max_lines": max_lines,
166
+ "offset": offset,
167
+ "limit": limit,
134
168
  },
135
169
  "status": "pending_execution",
136
170
  "message": "File read queued for execution by client",
@@ -197,6 +231,90 @@ def write_file_tool(
197
231
  }
198
232
 
199
233
 
234
+ @tool(args_schema=EditFileInput)
235
+ def edit_file_tool(
236
+ path: str,
237
+ old_string: str,
238
+ new_string: str,
239
+ replace_all: bool = False,
240
+ execution_result: Optional[Dict[str, Any]] = None,
241
+ workspace_root: str = ".",
242
+ ) -> Dict[str, Any]:
243
+ """
244
+ Edit a file by replacing a specific string with another.
245
+
246
+ This operation requires user approval before execution.
247
+ The old_string must be unique in the file unless replace_all=True.
248
+
249
+ Args:
250
+ path: Relative path to the file
251
+ old_string: The exact string to find and replace
252
+ new_string: The replacement string
253
+ replace_all: Whether to replace all occurrences
254
+
255
+ Returns:
256
+ Dict with operation status and diff preview (pending approval)
257
+ """
258
+ # Security validation
259
+ if os.path.isabs(path):
260
+ return {
261
+ "tool": "edit_file_tool",
262
+ "success": False,
263
+ "error": f"Absolute paths not allowed: {path}",
264
+ "path": path,
265
+ }
266
+ if ".." in path:
267
+ return {
268
+ "tool": "edit_file_tool",
269
+ "success": False,
270
+ "error": f"Parent directory traversal not allowed: {path}",
271
+ "path": path,
272
+ }
273
+
274
+ try:
275
+ resolved_path = _validate_path(path, workspace_root)
276
+
277
+ # Build response with diff preview
278
+ # Note: actual file content will be read by client for diff generation
279
+ old_preview = old_string[:200] + "..." if len(old_string) > 200 else old_string
280
+ new_preview = new_string[:200] + "..." if len(new_string) > 200 else new_string
281
+
282
+ response: Dict[str, Any] = {
283
+ "tool": "edit_file_tool",
284
+ "parameters": {
285
+ "path": path,
286
+ "old_string": old_string,
287
+ "new_string": new_string,
288
+ "replace_all": replace_all,
289
+ },
290
+ "status": "pending_approval",
291
+ "path": path,
292
+ "resolved_path": resolved_path,
293
+ "old_string_preview": old_preview,
294
+ "new_string_preview": new_preview,
295
+ "replace_all": replace_all,
296
+ "message": "File edit operation requires user approval",
297
+ }
298
+
299
+ if execution_result is not None:
300
+ response["execution_result"] = execution_result
301
+ response["status"] = "complete"
302
+ response["message"] = "File edit executed with client-reported results"
303
+ # Include diff if provided by client
304
+ if "diff" in execution_result:
305
+ response["diff"] = execution_result["diff"]
306
+
307
+ return response
308
+
309
+ except ValueError as e:
310
+ return {
311
+ "tool": "edit_file_tool",
312
+ "success": False,
313
+ "error": str(e),
314
+ "path": path,
315
+ }
316
+
317
+
200
318
  @tool(args_schema=ListFilesInput)
201
319
  def list_files_tool(
202
320
  path: str = ".",
@@ -233,9 +351,169 @@ def list_files_tool(
233
351
  return response
234
352
 
235
353
 
354
+ class EditOperation(BaseModel):
355
+ """Single edit operation for multiedit_file tool"""
356
+
357
+ old_string: str = Field(description="The exact string to find and replace")
358
+ new_string: str = Field(description="The replacement string")
359
+ replace_all: bool = Field(
360
+ default=False,
361
+ description="Whether to replace all occurrences (default: false)"
362
+ )
363
+
364
+
365
+ class MultiEditInput(BaseModel):
366
+ """Input schema for multiedit_file tool"""
367
+
368
+ path: str = Field(description="Relative path to the file to edit")
369
+ edits: List[EditOperation] = Field(
370
+ description="List of edit operations to apply sequentially"
371
+ )
372
+ execution_result: Optional[Dict[str, Any]] = Field(
373
+ default=None,
374
+ description="Optional execution result payload from the client",
375
+ )
376
+
377
+
378
+ @tool(args_schema=MultiEditInput)
379
+ def multiedit_file_tool(
380
+ path: str,
381
+ edits: List[EditOperation],
382
+ execution_result: Optional[Dict[str, Any]] = None,
383
+ workspace_root: str = ".",
384
+ ) -> Dict[str, Any]:
385
+ """
386
+ Apply multiple sequential edits to a single file atomically.
387
+
388
+ This is more efficient than multiple edit_file_tool calls when you need
389
+ to make several changes to the same file. All edits are validated before
390
+ any are applied - if one fails, none are applied.
391
+
392
+ Use this tool when:
393
+ - Making multiple related changes to a file
394
+ - Updating several config values at once
395
+ - Refactoring multiple sections of code
396
+
397
+ Args:
398
+ path: Relative path to the file
399
+ edits: List of edit operations, each containing:
400
+ - old_string: The exact string to find and replace
401
+ - new_string: The replacement string
402
+ - replace_all: (optional) Whether to replace all occurrences
403
+
404
+ Returns:
405
+ Dict with operation status, edits_applied count, and diff preview
406
+
407
+ Example:
408
+ multiedit_file_tool(
409
+ path="config.py",
410
+ edits=[
411
+ {"old_string": "DEBUG = True", "new_string": "DEBUG = False"},
412
+ {"old_string": "LOG_LEVEL = 'INFO'", "new_string": "LOG_LEVEL = 'WARNING'"}
413
+ ]
414
+ )
415
+ """
416
+ # Security validation
417
+ if os.path.isabs(path):
418
+ return {
419
+ "tool": "multiedit_file_tool",
420
+ "success": False,
421
+ "error": f"Absolute paths not allowed: {path}",
422
+ "path": path,
423
+ }
424
+ if ".." in path:
425
+ return {
426
+ "tool": "multiedit_file_tool",
427
+ "success": False,
428
+ "error": f"Parent directory traversal not allowed: {path}",
429
+ "path": path,
430
+ }
431
+
432
+ if not edits or len(edits) == 0:
433
+ return {
434
+ "tool": "multiedit_file_tool",
435
+ "success": False,
436
+ "error": "At least one edit is required",
437
+ "path": path,
438
+ }
439
+
440
+ try:
441
+ resolved_path = _validate_path(path, workspace_root)
442
+
443
+ # Build edits preview (handle both EditOperation objects and dicts)
444
+ edits_preview = []
445
+ edits_as_dicts = []
446
+ for i, edit in enumerate(edits[:5]): # Preview first 5
447
+ # Support both Pydantic model and dict access
448
+ if hasattr(edit, "old_string"):
449
+ old_str = edit.old_string
450
+ new_str = edit.new_string
451
+ replace_all_val = edit.replace_all
452
+ else:
453
+ old_str = edit.get("old_string", "")
454
+ new_str = edit.get("new_string", "")
455
+ replace_all_val = edit.get("replace_all", False)
456
+
457
+ old_preview = (old_str[:50] + "...") if len(old_str) > 50 else old_str
458
+ new_preview = (new_str[:50] + "...") if len(new_str) > 50 else new_str
459
+ edits_preview.append({
460
+ "index": i,
461
+ "old_preview": old_preview,
462
+ "new_preview": new_preview,
463
+ "replace_all": replace_all_val
464
+ })
465
+
466
+ # Convert all edits to dicts for serialization
467
+ for edit in edits:
468
+ if hasattr(edit, "model_dump"):
469
+ edits_as_dicts.append(edit.model_dump())
470
+ elif hasattr(edit, "dict"):
471
+ edits_as_dicts.append(edit.dict())
472
+ else:
473
+ edits_as_dicts.append(edit)
474
+
475
+ response: Dict[str, Any] = {
476
+ "tool": "multiedit_file_tool",
477
+ "parameters": {
478
+ "path": path,
479
+ "edits_count": len(edits),
480
+ "edits": edits_as_dicts,
481
+ },
482
+ "status": "pending_approval",
483
+ "path": path,
484
+ "resolved_path": resolved_path,
485
+ "edits_preview": edits_preview,
486
+ "total_edits": len(edits),
487
+ "message": f"Multi-edit operation ({len(edits)} edits) requires user approval",
488
+ }
489
+
490
+ if execution_result is not None:
491
+ response["execution_result"] = execution_result
492
+ response["status"] = "complete"
493
+ response["message"] = "Multi-edit executed with client-reported results"
494
+ if "diff" in execution_result:
495
+ response["diff"] = execution_result["diff"]
496
+ if "edits_applied" in execution_result:
497
+ response["edits_applied"] = execution_result["edits_applied"]
498
+ if "edits_failed" in execution_result:
499
+ response["edits_failed"] = execution_result["edits_failed"]
500
+
501
+ return response
502
+
503
+ except ValueError as e:
504
+ return {
505
+ "tool": "multiedit_file_tool",
506
+ "success": False,
507
+ "error": str(e),
508
+ "path": path,
509
+ }
510
+
511
+
236
512
  # Export all tools
237
513
  FILE_TOOLS = [
238
514
  read_file_tool,
239
515
  write_file_tool,
516
+ edit_file_tool,
517
+ multiedit_file_tool,
240
518
  list_files_tool,
241
519
  ]
@@ -0,0 +1,334 @@
1
+ """
2
+ File Utilities for LangChain Agent
3
+
4
+ Provides utility functions for file operations:
5
+ - perform_string_replacement: String replacement with occurrence validation
6
+ - compute_unified_diff: Generate unified diff between before/after content
7
+ - count_diff_changes: Count additions/deletions from diff
8
+ - format_content_with_line_numbers: Format file content with line numbers (cat -n style)
9
+ - check_empty_content: Check if content is empty and return warning message
10
+ """
11
+
12
+ import difflib
13
+ from typing import List, Optional, Tuple, Union
14
+
15
+ # Constants for file reading (aligned with DeepAgents)
16
+ EMPTY_CONTENT_WARNING = "System reminder: File exists but has empty contents"
17
+ MAX_LINE_LENGTH = 10000 # Chunk lines longer than this
18
+ LINE_NUMBER_WIDTH = 6 # Width for line number padding
19
+
20
+
21
+ def check_empty_content(content: str) -> Optional[str]:
22
+ """
23
+ Check if content is empty and return warning message.
24
+
25
+ Args:
26
+ content: Content to check
27
+
28
+ Returns:
29
+ Warning message if empty, None otherwise
30
+ """
31
+ if not content or content.strip() == "":
32
+ return EMPTY_CONTENT_WARNING
33
+ return None
34
+
35
+
36
+ def format_content_with_line_numbers(
37
+ content: Union[str, List[str]],
38
+ start_line: int = 1,
39
+ ) -> str:
40
+ """
41
+ Format file content with line numbers (cat -n style).
42
+
43
+ Chunks lines longer than MAX_LINE_LENGTH with continuation markers (e.g., 5.1, 5.2).
44
+
45
+ Args:
46
+ content: File content as string or list of lines
47
+ start_line: Starting line number (default: 1)
48
+
49
+ Returns:
50
+ Formatted content with line numbers and continuation markers
51
+ """
52
+ if isinstance(content, str):
53
+ lines = content.split("\n")
54
+ # Remove trailing empty line if content ends with newline
55
+ if lines and lines[-1] == "":
56
+ lines = lines[:-1]
57
+ else:
58
+ lines = content
59
+
60
+ result_lines = []
61
+ for i, line in enumerate(lines):
62
+ line_num = i + start_line
63
+
64
+ if len(line) <= MAX_LINE_LENGTH:
65
+ result_lines.append(f"{line_num:{LINE_NUMBER_WIDTH}d}\t{line}")
66
+ else:
67
+ # Split long line into chunks with continuation markers
68
+ num_chunks = (len(line) + MAX_LINE_LENGTH - 1) // MAX_LINE_LENGTH
69
+ for chunk_idx in range(num_chunks):
70
+ start = chunk_idx * MAX_LINE_LENGTH
71
+ end = min(start + MAX_LINE_LENGTH, len(line))
72
+ chunk = line[start:end]
73
+ if chunk_idx == 0:
74
+ # First chunk: use normal line number
75
+ result_lines.append(f"{line_num:{LINE_NUMBER_WIDTH}d}\t{chunk}")
76
+ else:
77
+ # Continuation chunks: use decimal notation (e.g., 5.1, 5.2)
78
+ continuation_marker = f"{line_num}.{chunk_idx}"
79
+ result_lines.append(
80
+ f"{continuation_marker:>{LINE_NUMBER_WIDTH}}\t{chunk}"
81
+ )
82
+
83
+ return "\n".join(result_lines)
84
+
85
+
86
+ def format_read_response(
87
+ content: str,
88
+ offset: int = 0,
89
+ limit: int = 500,
90
+ ) -> str:
91
+ """
92
+ Format file content for read response with line numbers and pagination.
93
+
94
+ Args:
95
+ content: Full file content
96
+ offset: Line offset (0-indexed)
97
+ limit: Maximum number of lines to return
98
+
99
+ Returns:
100
+ Formatted content with line numbers, or error/warning message
101
+ """
102
+ # Check for empty content
103
+ empty_msg = check_empty_content(content)
104
+ if empty_msg:
105
+ return empty_msg
106
+
107
+ lines = content.splitlines()
108
+ total_lines = len(lines)
109
+
110
+ # Validate offset
111
+ if offset >= total_lines:
112
+ return f"Error: Line offset {offset} exceeds file length ({total_lines} lines)"
113
+
114
+ # Apply pagination
115
+ start_idx = offset
116
+ end_idx = min(start_idx + limit, total_lines)
117
+ selected_lines = lines[start_idx:end_idx]
118
+
119
+ # Format with line numbers
120
+ formatted = format_content_with_line_numbers(
121
+ selected_lines, start_line=start_idx + 1
122
+ )
123
+
124
+ # Add pagination info if truncated
125
+ if end_idx < total_lines:
126
+ remaining = total_lines - end_idx
127
+ formatted += f"\n\n[... {remaining} more lines. Use offset={end_idx} to continue reading]"
128
+
129
+ return formatted
130
+
131
+
132
+ def _normalize_whitespace(text: str) -> str:
133
+ """Normalize line endings and trailing whitespace per line."""
134
+ lines = text.replace("\r\n", "\n").replace("\r", "\n").split("\n")
135
+ return "\n".join(line.rstrip() for line in lines)
136
+
137
+
138
+ def perform_string_replacement(
139
+ content: str,
140
+ old_string: str,
141
+ new_string: str,
142
+ replace_all: bool = False,
143
+ ) -> Union[Tuple[str, int], str]:
144
+ """
145
+ Perform string replacement with occurrence validation.
146
+
147
+ Includes fallback strategies for more robust matching:
148
+ 1. Exact match
149
+ 2. Strip leading/trailing newlines from old_string
150
+ 3. Normalize whitespace (line endings, trailing spaces)
151
+
152
+ Args:
153
+ content: Original file content
154
+ old_string: String to replace
155
+ new_string: Replacement string
156
+ replace_all: Whether to replace all occurrences
157
+
158
+ Returns:
159
+ Tuple of (new_content, occurrences) on success,
160
+ or error message string on failure
161
+ """
162
+ # Strategy 1: Exact match
163
+ occurrences = content.count(old_string)
164
+
165
+ if occurrences == 0:
166
+ # Strategy 2: Strip leading/trailing newlines from old_string
167
+ stripped_old = old_string.strip("\n")
168
+ occurrences = content.count(stripped_old)
169
+ if occurrences > 0:
170
+ old_string = stripped_old
171
+ # Also strip new_string's leading/trailing newlines to match
172
+ new_string = new_string.strip("\n")
173
+
174
+ if occurrences == 0:
175
+ # Strategy 3: Normalize whitespace (line endings, trailing spaces)
176
+ normalized_content = _normalize_whitespace(content)
177
+ normalized_old = _normalize_whitespace(old_string.strip("\n"))
178
+ occurrences = normalized_content.count(normalized_old)
179
+
180
+ if occurrences > 0:
181
+ # Find the original text in content that matches normalized version
182
+ # We need to do replacement on the normalized content first
183
+ normalized_new = _normalize_whitespace(new_string.strip("\n"))
184
+ if occurrences > 1 and not replace_all:
185
+ preview = (
186
+ old_string[:50] + "..." if len(old_string) > 50 else old_string
187
+ )
188
+ return (
189
+ f"Error: String '{preview}' appears {occurrences} times in file. "
190
+ "Use replace_all=True to replace all instances, "
191
+ "or provide a more specific string with surrounding context."
192
+ )
193
+ # Replace on normalized content, then return
194
+ new_content = normalized_content.replace(normalized_old, normalized_new)
195
+ return new_content, occurrences
196
+
197
+ if occurrences == 0:
198
+ # All strategies failed
199
+ preview = old_string[:100] + "..." if len(old_string) > 100 else old_string
200
+ return f"Error: String not found in file: '{preview}'"
201
+
202
+ if occurrences > 1 and not replace_all:
203
+ preview = old_string[:50] + "..." if len(old_string) > 50 else old_string
204
+ return (
205
+ f"Error: String '{preview}' appears {occurrences} times in file. "
206
+ "Use replace_all=True to replace all instances, "
207
+ "or provide a more specific string with surrounding context."
208
+ )
209
+
210
+ new_content = content.replace(old_string, new_string)
211
+ return new_content, occurrences
212
+
213
+
214
+ def compute_unified_diff(
215
+ before: str,
216
+ after: str,
217
+ filepath: str,
218
+ max_lines: int = 100,
219
+ context_lines: int = 3,
220
+ ) -> Union[str, None]:
221
+ """
222
+ Compute a unified diff between before and after content.
223
+
224
+ Args:
225
+ before: Original content
226
+ after: New content
227
+ filepath: Path for display in diff headers
228
+ max_lines: Maximum number of diff lines (None for unlimited)
229
+ context_lines: Number of context lines around changes (default 3)
230
+
231
+ Returns:
232
+ Unified diff string or None if no changes
233
+ """
234
+ before_lines = before.splitlines()
235
+ after_lines = after.splitlines()
236
+
237
+ diff_lines = list(
238
+ difflib.unified_diff(
239
+ before_lines,
240
+ after_lines,
241
+ fromfile=f"{filepath} (before)",
242
+ tofile=f"{filepath} (after)",
243
+ lineterm="",
244
+ n=context_lines,
245
+ )
246
+ )
247
+
248
+ if not diff_lines:
249
+ return None
250
+
251
+ if max_lines and len(diff_lines) > max_lines:
252
+ truncated = diff_lines[: max_lines - 1]
253
+ truncated.append(
254
+ f"... [{len(diff_lines) - max_lines + 1} more lines truncated]"
255
+ )
256
+ return "\n".join(truncated)
257
+
258
+ return "\n".join(diff_lines)
259
+
260
+
261
+ def count_diff_changes(diff: str) -> Tuple[int, int]:
262
+ """
263
+ Count additions and deletions from unified diff.
264
+
265
+ Args:
266
+ diff: Unified diff string
267
+
268
+ Returns:
269
+ Tuple of (additions, deletions) line counts
270
+ """
271
+ if not diff:
272
+ return 0, 0
273
+
274
+ additions = sum(
275
+ 1
276
+ for line in diff.splitlines()
277
+ if line.startswith("+") and not line.startswith("+++")
278
+ )
279
+ deletions = sum(
280
+ 1
281
+ for line in diff.splitlines()
282
+ if line.startswith("-") and not line.startswith("---")
283
+ )
284
+ return additions, deletions
285
+
286
+
287
+ def build_edit_preview(
288
+ original_content: str,
289
+ old_string: str,
290
+ new_string: str,
291
+ replace_all: bool,
292
+ filepath: str,
293
+ ) -> dict:
294
+ """
295
+ Build a preview for edit_file operation including diff.
296
+
297
+ Args:
298
+ original_content: Current file content
299
+ old_string: String to replace
300
+ new_string: Replacement string
301
+ replace_all: Whether to replace all occurrences
302
+ filepath: File path for display
303
+
304
+ Returns:
305
+ Dict with diff, occurrences, and change counts
306
+ """
307
+ result = perform_string_replacement(
308
+ original_content, old_string, new_string, replace_all
309
+ )
310
+
311
+ if isinstance(result, str):
312
+ # Error case
313
+ return {
314
+ "success": False,
315
+ "error": result,
316
+ "diff": None,
317
+ "occurrences": 0,
318
+ "lines_added": 0,
319
+ "lines_removed": 0,
320
+ }
321
+
322
+ new_content, occurrences = result
323
+ diff = compute_unified_diff(original_content, new_content, filepath)
324
+ additions, deletions = count_diff_changes(diff) if diff else (0, 0)
325
+
326
+ return {
327
+ "success": True,
328
+ "error": None,
329
+ "diff": diff,
330
+ "occurrences": occurrences,
331
+ "lines_added": additions,
332
+ "lines_removed": deletions,
333
+ "new_content": new_content,
334
+ }