hdsp-jupyter-extension 2.0.6__py3-none-any.whl → 2.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. agent_server/core/reflection_engine.py +0 -1
  2. agent_server/knowledge/watchdog_service.py +1 -1
  3. agent_server/langchain/ARCHITECTURE.md +1193 -0
  4. agent_server/langchain/agent.py +74 -588
  5. agent_server/langchain/custom_middleware.py +636 -0
  6. agent_server/langchain/executors/__init__.py +2 -7
  7. agent_server/langchain/executors/notebook_searcher.py +46 -38
  8. agent_server/langchain/hitl_config.py +66 -0
  9. agent_server/langchain/llm_factory.py +166 -0
  10. agent_server/langchain/logging_utils.py +184 -0
  11. agent_server/langchain/prompts.py +119 -0
  12. agent_server/langchain/state.py +16 -6
  13. agent_server/langchain/tools/__init__.py +6 -0
  14. agent_server/langchain/tools/file_tools.py +91 -129
  15. agent_server/langchain/tools/jupyter_tools.py +18 -18
  16. agent_server/langchain/tools/resource_tools.py +161 -0
  17. agent_server/langchain/tools/search_tools.py +198 -216
  18. agent_server/langchain/tools/shell_tools.py +54 -0
  19. agent_server/main.py +4 -1
  20. agent_server/routers/health.py +1 -1
  21. agent_server/routers/langchain_agent.py +940 -285
  22. hdsp_agent_core/prompts/auto_agent_prompts.py +3 -3
  23. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/build_log.json +1 -1
  24. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/package.json +2 -2
  25. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.02d346171474a0fb2dc1.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js +312 -6
  26. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js.map +1 -0
  27. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.a223ea20056954479ae9.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.29cf4312af19e86f82af.js +1547 -330
  28. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.29cf4312af19e86f82af.js.map +1 -0
  29. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.addf2fa038fa60304aa2.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.61343eb4cf0577e74b50.js +8 -8
  30. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.61343eb4cf0577e74b50.js.map +1 -0
  31. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js +209 -2
  32. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js.map +1 -0
  33. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js +2 -209
  34. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js.map +1 -0
  35. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js +3 -212
  36. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js.map +1 -0
  37. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/METADATA +2 -1
  38. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/RECORD +71 -68
  39. jupyter_ext/_version.py +1 -1
  40. jupyter_ext/handlers.py +1176 -58
  41. jupyter_ext/labextension/build_log.json +1 -1
  42. jupyter_ext/labextension/package.json +2 -2
  43. jupyter_ext/labextension/static/{frontend_styles_index_js.02d346171474a0fb2dc1.js → frontend_styles_index_js.4770ec0fb2d173b6deb4.js} +312 -6
  44. jupyter_ext/labextension/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js.map +1 -0
  45. jupyter_ext/labextension/static/{lib_index_js.a223ea20056954479ae9.js → lib_index_js.29cf4312af19e86f82af.js} +1547 -330
  46. jupyter_ext/labextension/static/lib_index_js.29cf4312af19e86f82af.js.map +1 -0
  47. jupyter_ext/labextension/static/{remoteEntry.addf2fa038fa60304aa2.js → remoteEntry.61343eb4cf0577e74b50.js} +8 -8
  48. jupyter_ext/labextension/static/remoteEntry.61343eb4cf0577e74b50.js.map +1 -0
  49. jupyter_ext/labextension/static/{vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js → vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js} +209 -2
  50. jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js.map +1 -0
  51. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js → jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js +2 -209
  52. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js.map +1 -0
  53. jupyter_ext/labextension/static/{vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js → vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js} +3 -212
  54. jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js.map +1 -0
  55. jupyter_ext/resource_usage.py +180 -0
  56. jupyter_ext/tests/test_handlers.py +58 -0
  57. agent_server/langchain/executors/jupyter_executor.py +0 -429
  58. agent_server/langchain/middleware/__init__.py +0 -36
  59. agent_server/langchain/middleware/code_search_middleware.py +0 -278
  60. agent_server/langchain/middleware/error_handling_middleware.py +0 -338
  61. agent_server/langchain/middleware/jupyter_execution_middleware.py +0 -301
  62. agent_server/langchain/middleware/rag_middleware.py +0 -227
  63. agent_server/langchain/middleware/validation_middleware.py +0 -240
  64. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.02d346171474a0fb2dc1.js.map +0 -1
  65. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.a223ea20056954479ae9.js.map +0 -1
  66. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.addf2fa038fa60304aa2.js.map +0 -1
  67. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js.map +0 -1
  68. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js.map +0 -1
  69. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js.map +0 -1
  70. jupyter_ext/labextension/static/frontend_styles_index_js.02d346171474a0fb2dc1.js.map +0 -1
  71. jupyter_ext/labextension/static/lib_index_js.a223ea20056954479ae9.js.map +0 -1
  72. jupyter_ext/labextension/static/remoteEntry.addf2fa038fa60304aa2.js.map +0 -1
  73. jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js.map +0 -1
  74. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js.map +0 -1
  75. jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js.map +0 -1
  76. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/etc/jupyter/jupyter_server_config.d/hdsp_jupyter_extension.json +0 -0
  77. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/install.json +0 -0
  78. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js +0 -0
  79. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js.map +0 -0
  80. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js +0 -0
  81. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js.map +0 -0
  82. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/style.js +0 -0
  83. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js +0 -0
  84. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js.map +0 -0
  85. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js +0 -0
  86. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js.map +0 -0
  87. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js +0 -0
  88. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js.map +0 -0
  89. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/WHEEL +0 -0
  90. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/licenses/LICENSE +0 -0
@@ -1,131 +1,142 @@
1
1
  """
2
2
  Search Tools for LangChain Agent
3
3
 
4
- Provides tools for searching code in workspace and notebooks:
5
- - search_workspace: Search files in the workspace
6
- - search_notebook_cells: Search cells in Jupyter notebooks
4
+ Provides tools for searching code in workspace and notebooks.
5
+ These tools return pending_execution status and are executed on the client (Jupyter) side
6
+ using subprocess (find/grep/ripgrep).
7
+
8
+ Key features:
9
+ - Returns command info for client-side execution via subprocess
10
+ - Supports ripgrep (rg) if available, falls back to grep
11
+ - Executes immediately without user approval
12
+ - Shows the command being executed in status messages
7
13
  """
8
14
 
9
- import json
10
- import os
11
- import re
15
+ import logging
16
+ import shutil
12
17
  from typing import Any, Dict, List, Optional
13
18
 
14
19
  from langchain_core.tools import tool
15
20
  from pydantic import BaseModel, Field
16
21
 
22
+ logger = logging.getLogger(__name__)
23
+
17
24
 
18
25
  class SearchWorkspaceInput(BaseModel):
19
26
  """Input schema for search_workspace tool"""
27
+
20
28
  pattern: str = Field(description="Search pattern (regex or text)")
21
29
  file_types: List[str] = Field(
22
30
  default=["*.py", "*.ipynb"],
23
- description="File patterns to search (e.g., ['*.py', '*.ipynb'])"
31
+ description="File patterns to search (e.g., ['*.py', '*.ipynb'])",
24
32
  )
25
33
  path: str = Field(default=".", description="Directory to search in")
26
34
  max_results: int = Field(default=50, description="Maximum number of results")
27
35
  case_sensitive: bool = Field(default=False, description="Case-sensitive search")
36
+ execution_result: Optional[Dict[str, Any]] = Field(
37
+ default=None,
38
+ description="Execution result payload from the client",
39
+ )
28
40
 
29
41
 
30
42
  class SearchNotebookCellsInput(BaseModel):
31
43
  """Input schema for search_notebook_cells tool"""
44
+
32
45
  pattern: str = Field(description="Search pattern (regex or text)")
33
46
  notebook_path: Optional[str] = Field(
34
- default=None,
35
- description="Specific notebook to search (None = all notebooks)"
47
+ default=None, description="Specific notebook to search (None = all notebooks)"
36
48
  )
37
49
  cell_type: Optional[str] = Field(
38
50
  default=None,
39
- description="Cell type filter: 'code', 'markdown', or None for all"
51
+ description="Cell type filter: 'code', 'markdown', or None for all",
40
52
  )
41
53
  max_results: int = Field(default=30, description="Maximum number of results")
42
54
  case_sensitive: bool = Field(default=False, description="Case-sensitive search")
55
+ execution_result: Optional[Dict[str, Any]] = Field(
56
+ default=None,
57
+ description="Execution result payload from the client",
58
+ )
59
+
60
+
61
+ def _is_ripgrep_available() -> bool:
62
+ """Check if ripgrep (rg) is installed and available."""
63
+ return shutil.which("rg") is not None
43
64
 
44
65
 
45
- def _search_in_file(
46
- file_path: str,
66
+ def _build_grep_command(
47
67
  pattern: str,
48
- case_sensitive: bool = False
49
- ) -> List[Dict[str, Any]]:
50
- """Search for pattern in a regular file"""
51
- results = []
52
- flags = 0 if case_sensitive else re.IGNORECASE
53
-
54
- try:
55
- compiled = re.compile(pattern, flags)
56
- except re.error:
57
- # If pattern is not valid regex, use literal search
58
- compiled = re.compile(re.escape(pattern), flags)
59
-
60
- try:
61
- with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
62
- for line_num, line in enumerate(f, 1):
63
- if compiled.search(line):
64
- results.append({
65
- "file_path": file_path,
66
- "line_number": line_num,
67
- "content": line.strip()[:200],
68
- "match_type": "line",
69
- })
70
- except Exception:
71
- pass
72
-
73
- return results
74
-
75
-
76
- def _search_in_notebook(
77
- notebook_path: str,
68
+ file_types: List[str],
69
+ path: str,
70
+ case_sensitive: bool,
71
+ max_results: int,
72
+ ) -> tuple[str, str]:
73
+ """
74
+ Build a grep/ripgrep command for searching files.
75
+
76
+ Returns:
77
+ Tuple of (command_string, tool_name) where tool_name is 'rg' or 'grep'
78
+ """
79
+ # Check ripgrep availability (this check will also be done on client)
80
+ use_ripgrep = _is_ripgrep_available()
81
+
82
+ if use_ripgrep:
83
+ # Build ripgrep command
84
+ cmd_parts = ["rg", "--line-number", "--with-filename"]
85
+
86
+ if not case_sensitive:
87
+ cmd_parts.append("--ignore-case")
88
+
89
+ # Add file type filters using glob patterns
90
+ for ft in file_types:
91
+ cmd_parts.extend(["--glob", ft])
92
+
93
+ # Limit results
94
+ cmd_parts.extend(["--max-count", str(max_results)])
95
+
96
+ # Escape pattern for shell
97
+ escaped_pattern = pattern.replace("'", "'\\''")
98
+ cmd_parts.append(f"'{escaped_pattern}'")
99
+ cmd_parts.append(path)
100
+
101
+ return " ".join(cmd_parts), "rg"
102
+ else:
103
+ # Build find + grep command for cross-platform compatibility
104
+ find_parts = ["find", path, "-type", "f", "("]
105
+
106
+ for i, ft in enumerate(file_types):
107
+ if i > 0:
108
+ find_parts.append("-o")
109
+ find_parts.extend(["-name", f"'{ft}'"])
110
+
111
+ find_parts.append(")")
112
+
113
+ # Add grep with proper flags
114
+ grep_flags = "-n" # Line numbers
115
+ if not case_sensitive:
116
+ grep_flags += "i"
117
+
118
+ # Escape pattern for shell
119
+ escaped_pattern = pattern.replace("'", "'\\''")
120
+
121
+ # Combine with xargs for efficiency
122
+ cmd = f"{' '.join(find_parts)} 2>/dev/null | xargs grep -{grep_flags} '{escaped_pattern}' 2>/dev/null | head -n {max_results}"
123
+
124
+ return cmd, "grep"
125
+
126
+
127
+ def _build_notebook_search_command(
78
128
  pattern: str,
79
- cell_type: Optional[str] = None,
80
- case_sensitive: bool = False
81
- ) -> List[Dict[str, Any]]:
82
- """Search for pattern in a Jupyter notebook"""
83
- results = []
84
- flags = 0 if case_sensitive else re.IGNORECASE
85
-
86
- try:
87
- compiled = re.compile(pattern, flags)
88
- except re.error:
89
- compiled = re.compile(re.escape(pattern), flags)
90
-
91
- try:
92
- with open(notebook_path, "r", encoding="utf-8") as f:
93
- notebook = json.load(f)
94
-
95
- cells = notebook.get("cells", [])
96
- for idx, cell in enumerate(cells):
97
- current_type = cell.get("cell_type", "code")
98
-
99
- # Filter by cell type if specified
100
- if cell_type and current_type != cell_type:
101
- continue
102
-
103
- source = cell.get("source", [])
104
- if isinstance(source, list):
105
- source = "".join(source)
106
-
107
- if compiled.search(source):
108
- # Find matching lines
109
- matching_lines = []
110
- for line_num, line in enumerate(source.split("\n"), 1):
111
- if compiled.search(line):
112
- matching_lines.append({
113
- "line": line_num,
114
- "content": line.strip()[:150]
115
- })
116
-
117
- results.append({
118
- "file_path": notebook_path,
119
- "cell_index": idx,
120
- "cell_type": current_type,
121
- "content": source[:300] + "..." if len(source) > 300 else source,
122
- "matching_lines": matching_lines[:5],
123
- "match_type": "cell",
124
- })
125
- except Exception:
126
- pass
127
-
128
- return results
129
+ notebook_path: Optional[str],
130
+ path: str,
131
+ max_results: int,
132
+ ) -> str:
133
+ """Build a command to find notebooks for searching."""
134
+ if notebook_path:
135
+ return f"echo '{notebook_path}'"
136
+ else:
137
+ return (
138
+ f"find {path} -name '*.ipynb' -type f 2>/dev/null | head -n {max_results}"
139
+ )
129
140
 
130
141
 
131
142
  @tool(args_schema=SearchWorkspaceInput)
@@ -135,84 +146,65 @@ def search_workspace_tool(
135
146
  path: str = ".",
136
147
  max_results: int = 50,
137
148
  case_sensitive: bool = False,
138
- workspace_root: str = "."
149
+ execution_result: Optional[Dict[str, Any]] = None,
150
+ workspace_root: str = ".",
139
151
  ) -> Dict[str, Any]:
140
152
  """
141
153
  Search for a pattern across files in the workspace.
142
-
154
+
155
+ This tool is executed on the client side using subprocess (grep/ripgrep).
143
156
  Searches both regular files and Jupyter notebooks.
144
- For notebooks, searches within cell contents.
145
-
157
+
146
158
  Args:
147
159
  pattern: Search pattern (regex or text)
148
160
  file_types: File patterns to search (default: ['*.py', '*.ipynb'])
149
- path: Directory to search in
161
+ path: Directory to search in (relative to workspace)
150
162
  max_results: Maximum number of results to return
151
163
  case_sensitive: Whether search is case-sensitive
152
-
164
+
153
165
  Returns:
154
- Dict with search results
166
+ Dict with search results or pending_execution status
155
167
  """
156
- import fnmatch
157
-
158
168
  if file_types is None:
159
169
  file_types = ["*.py", "*.ipynb"]
160
170
 
161
- results = []
162
- files_searched = 0
163
-
164
- try:
165
- search_path = os.path.normpath(os.path.join(workspace_root, path))
166
-
167
- for root, _, filenames in os.walk(search_path):
168
- for filename in filenames:
169
- # Check if file matches any pattern
170
- if not any(fnmatch.fnmatch(filename, ft) for ft in file_types):
171
- continue
172
-
173
- file_path = os.path.join(root, filename)
174
- rel_path = os.path.relpath(file_path, workspace_root)
175
- files_searched += 1
176
-
177
- if filename.endswith(".ipynb"):
178
- # Search in notebook
179
- matches = _search_in_notebook(
180
- file_path, pattern, None, case_sensitive
181
- )
182
- for m in matches:
183
- m["file_path"] = rel_path
184
- results.extend(matches)
185
- else:
186
- # Search in regular file
187
- matches = _search_in_file(file_path, pattern, case_sensitive)
188
- for m in matches:
189
- m["file_path"] = rel_path
190
- results.extend(matches)
191
-
192
- if len(results) >= max_results:
193
- break
194
-
195
- if len(results) >= max_results:
196
- break
197
-
198
- return {
199
- "tool": "search_workspace",
200
- "success": True,
171
+ # Build the search command
172
+ command, tool_used = _build_grep_command(
173
+ pattern=pattern,
174
+ file_types=file_types,
175
+ path=path,
176
+ case_sensitive=case_sensitive,
177
+ max_results=max_results,
178
+ )
179
+
180
+ response: Dict[str, Any] = {
181
+ "tool": "search_workspace_tool",
182
+ "parameters": {
201
183
  "pattern": pattern,
184
+ "file_types": file_types,
202
185
  "path": path,
203
- "files_searched": files_searched,
204
- "total_results": len(results),
205
- "results": results[:max_results],
206
- "truncated": len(results) > max_results,
207
- }
208
-
209
- except Exception as e:
210
- return {
211
- "tool": "search_workspace",
212
- "success": False,
213
- "error": f"Search failed: {str(e)}",
214
- "pattern": pattern,
215
- }
186
+ "max_results": max_results,
187
+ "case_sensitive": case_sensitive,
188
+ },
189
+ "command": command,
190
+ "tool_used": tool_used,
191
+ "status": "pending_execution",
192
+ "message": "Search queued for execution by client",
193
+ }
194
+
195
+ if execution_result is not None:
196
+ response["execution_result"] = execution_result
197
+ response["status"] = "complete"
198
+ response["message"] = "Search executed with client-reported results"
199
+ # Parse the execution result to extract search results
200
+ if isinstance(execution_result, dict):
201
+ response["success"] = execution_result.get("success", False)
202
+ response["results"] = execution_result.get("results", [])
203
+ response["total_results"] = execution_result.get("total_results", 0)
204
+ if "error" in execution_result:
205
+ response["error"] = execution_result["error"]
206
+
207
+ return response
216
208
 
217
209
 
218
210
  @tool(args_schema=SearchNotebookCellsInput)
@@ -222,84 +214,74 @@ def search_notebook_cells_tool(
222
214
  cell_type: Optional[str] = None,
223
215
  max_results: int = 30,
224
216
  case_sensitive: bool = False,
225
- workspace_root: str = "."
217
+ execution_result: Optional[Dict[str, Any]] = None,
218
+ workspace_root: str = ".",
226
219
  ) -> Dict[str, Any]:
227
220
  """
228
221
  Search for a pattern in Jupyter notebook cells.
229
-
222
+
223
+ This tool is executed on the client side.
230
224
  Can search a specific notebook or all notebooks in workspace.
231
225
  Optionally filter by cell type (code/markdown).
232
-
226
+
233
227
  Args:
234
228
  pattern: Search pattern (regex or text)
235
229
  notebook_path: Specific notebook to search (None = all)
236
230
  cell_type: Filter by cell type ('code', 'markdown', or None)
237
231
  max_results: Maximum number of results
238
232
  case_sensitive: Whether search is case-sensitive
239
-
233
+
240
234
  Returns:
241
- Dict with matching cells
235
+ Dict with matching cells or pending_execution status
242
236
  """
243
- results = []
244
- notebooks_searched = 0
245
-
246
- try:
247
- if notebook_path:
248
- # Search specific notebook
249
- full_path = os.path.normpath(
250
- os.path.join(workspace_root, notebook_path)
251
- )
252
- if os.path.exists(full_path) and full_path.endswith(".ipynb"):
253
- matches = _search_in_notebook(
254
- full_path, pattern, cell_type, case_sensitive
255
- )
256
- for m in matches:
257
- m["file_path"] = notebook_path
258
- results.extend(matches)
259
- notebooks_searched = 1
260
- else:
261
- # Search all notebooks
262
- for root, _, filenames in os.walk(workspace_root):
263
- for filename in filenames:
264
- if not filename.endswith(".ipynb"):
265
- continue
266
-
267
- file_path = os.path.join(root, filename)
268
- rel_path = os.path.relpath(file_path, workspace_root)
269
- notebooks_searched += 1
270
-
271
- matches = _search_in_notebook(
272
- file_path, pattern, cell_type, case_sensitive
273
- )
274
- for m in matches:
275
- m["file_path"] = rel_path
276
- results.extend(matches)
277
-
278
- if len(results) >= max_results:
279
- break
280
-
281
- if len(results) >= max_results:
282
- break
283
-
284
- return {
285
- "tool": "search_notebook_cells",
286
- "success": True,
237
+ # Build find command for notebooks
238
+ find_command = _build_notebook_search_command(
239
+ pattern=pattern,
240
+ notebook_path=notebook_path,
241
+ path=".",
242
+ max_results=max_results,
243
+ )
244
+
245
+ response: Dict[str, Any] = {
246
+ "tool": "search_notebook_cells_tool",
247
+ "parameters": {
287
248
  "pattern": pattern,
288
249
  "notebook_path": notebook_path,
289
250
  "cell_type": cell_type,
290
- "notebooks_searched": notebooks_searched,
291
- "total_results": len(results),
292
- "results": results[:max_results],
293
- "truncated": len(results) > max_results,
294
- }
295
-
296
- except Exception as e:
297
- return {
298
- "tool": "search_notebook_cells",
299
- "success": False,
300
- "error": f"Search failed: {str(e)}",
301
- "pattern": pattern,
302
- }
251
+ "max_results": max_results,
252
+ "case_sensitive": case_sensitive,
253
+ },
254
+ "find_command": find_command,
255
+ "status": "pending_execution",
256
+ "message": "Notebook search queued for execution by client",
257
+ }
258
+
259
+ if execution_result is not None:
260
+ response["execution_result"] = execution_result
261
+ response["status"] = "complete"
262
+ response["message"] = "Notebook search executed with client-reported results"
263
+ # Parse the execution result
264
+ if isinstance(execution_result, dict):
265
+ response["success"] = execution_result.get("success", False)
266
+ response["results"] = execution_result.get("results", [])
267
+ response["total_results"] = execution_result.get("total_results", 0)
268
+ response["notebooks_searched"] = execution_result.get(
269
+ "notebooks_searched", 0
270
+ )
271
+ if "error" in execution_result:
272
+ response["error"] = execution_result["error"]
273
+
274
+ return response
275
+
276
+
277
+ def create_search_tools(workspace_root: str = ".") -> List:
278
+ """
279
+ Create search tools (for backward compatibility).
280
+
281
+ Note: workspace_root is not used since tools return pending_execution
282
+ and actual execution happens on the client side.
283
+ """
284
+ return [search_workspace_tool, search_notebook_cells_tool]
303
285
 
304
286
 
305
287
  # Export all tools
@@ -0,0 +1,54 @@
1
+ """
2
+ Shell command tool for LangChain agent.
3
+
4
+ Execution is handled by the Jupyter extension (client) after HITL approval.
5
+ """
6
+
7
+ from typing import Any, Dict, Optional
8
+
9
+ from langchain_core.tools import tool
10
+ from pydantic import BaseModel, Field
11
+
12
+
13
+ class ExecuteCommandInput(BaseModel):
14
+ """Input schema for execute_command_tool."""
15
+
16
+ command: str = Field(description="Shell command to execute")
17
+ stdin: Optional[str] = Field(
18
+ default="y\n",
19
+ description="Input to provide to the command for interactive prompts (default: 'y\\n' for yes/no prompts)",
20
+ )
21
+ timeout: Optional[int] = Field(
22
+ default=600000, description="Timeout in milliseconds"
23
+ )
24
+ execution_result: Optional[Dict[str, Any]] = Field(
25
+ default=None, description="Optional execution result payload from the client"
26
+ )
27
+
28
+
29
+ @tool(args_schema=ExecuteCommandInput)
30
+ def execute_command_tool(
31
+ command: str,
32
+ stdin: Optional[str] = "y\n",
33
+ timeout: Optional[int] = 600000,
34
+ execution_result: Optional[Dict[str, Any]] = None,
35
+ ) -> Dict[str, Any]:
36
+ """
37
+ Execute a shell command via the client (Jupyter extension).
38
+ The agent server only coordinates the request. The client performs the
39
+ actual execution after user approval and returns execution_result.
40
+
41
+ - Interactive prompts are auto-answered with "y" by default
42
+ - NEVER run long-running commands (servers, watch, dev) or endless processes
43
+ """
44
+ response: Dict[str, Any] = {
45
+ "tool": "execute_command_tool",
46
+ "parameters": {"command": command, "stdin": stdin, "timeout": timeout},
47
+ "status": "pending_execution",
48
+ "message": "Shell command queued for execution by client",
49
+ }
50
+ if execution_result is not None:
51
+ response["execution_result"] = execution_result
52
+ response["status"] = "complete"
53
+ response["message"] = "Shell command executed with client-reported results"
54
+ return response
agent_server/main.py CHANGED
@@ -19,6 +19,7 @@ from agent_server.routers import agent, chat, config, file_resolver, health, rag
19
19
  # Optional LangChain router (requires langchain dependencies)
20
20
  try:
21
21
  from agent_server.routers import langchain_agent
22
+
22
23
  LANGCHAIN_AVAILABLE = True
23
24
  except ImportError:
24
25
  LANGCHAIN_AVAILABLE = False
@@ -153,7 +154,9 @@ app.include_router(file_resolver.router, prefix="/file", tags=["File Resolution"
153
154
 
154
155
  # Register LangChain agent router if available
155
156
  if LANGCHAIN_AVAILABLE:
156
- app.include_router(langchain_agent.router, prefix="/agent", tags=["LangChain Agent"])
157
+ app.include_router(
158
+ langchain_agent.router, prefix="/agent", tags=["LangChain Agent"]
159
+ )
157
160
  logger.info("LangChain agent router registered")
158
161
 
159
162
 
@@ -36,7 +36,7 @@ async def health_check() -> HealthResponse:
36
36
  async def status_check() -> HealthResponse:
37
37
  """
38
38
  Status check endpoint (alias for health check).
39
-
39
+
40
40
  Returns server status, timestamp, and version.
41
41
  """
42
42
  return HealthResponse(