hdsp-jupyter-extension 2.0.6__py3-none-any.whl → 2.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agent_server/core/reflection_engine.py +0 -1
- agent_server/knowledge/watchdog_service.py +1 -1
- agent_server/langchain/ARCHITECTURE.md +1193 -0
- agent_server/langchain/agent.py +74 -588
- agent_server/langchain/custom_middleware.py +636 -0
- agent_server/langchain/executors/__init__.py +2 -7
- agent_server/langchain/executors/notebook_searcher.py +46 -38
- agent_server/langchain/hitl_config.py +66 -0
- agent_server/langchain/llm_factory.py +166 -0
- agent_server/langchain/logging_utils.py +184 -0
- agent_server/langchain/prompts.py +119 -0
- agent_server/langchain/state.py +16 -6
- agent_server/langchain/tools/__init__.py +6 -0
- agent_server/langchain/tools/file_tools.py +91 -129
- agent_server/langchain/tools/jupyter_tools.py +18 -18
- agent_server/langchain/tools/resource_tools.py +161 -0
- agent_server/langchain/tools/search_tools.py +198 -216
- agent_server/langchain/tools/shell_tools.py +54 -0
- agent_server/main.py +4 -1
- agent_server/routers/health.py +1 -1
- agent_server/routers/langchain_agent.py +940 -285
- hdsp_agent_core/prompts/auto_agent_prompts.py +3 -3
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/build_log.json +1 -1
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/package.json +2 -2
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.02d346171474a0fb2dc1.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js +312 -6
- hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js.map +1 -0
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.a223ea20056954479ae9.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.29cf4312af19e86f82af.js +1547 -330
- hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.29cf4312af19e86f82af.js.map +1 -0
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.addf2fa038fa60304aa2.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.61343eb4cf0577e74b50.js +8 -8
- hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.61343eb4cf0577e74b50.js.map +1 -0
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js +209 -2
- hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js.map +1 -0
- jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js +2 -209
- hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js.map +1 -0
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js +3 -212
- hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js.map +1 -0
- {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/METADATA +2 -1
- {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/RECORD +71 -68
- jupyter_ext/_version.py +1 -1
- jupyter_ext/handlers.py +1176 -58
- jupyter_ext/labextension/build_log.json +1 -1
- jupyter_ext/labextension/package.json +2 -2
- jupyter_ext/labextension/static/{frontend_styles_index_js.02d346171474a0fb2dc1.js → frontend_styles_index_js.4770ec0fb2d173b6deb4.js} +312 -6
- jupyter_ext/labextension/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js.map +1 -0
- jupyter_ext/labextension/static/{lib_index_js.a223ea20056954479ae9.js → lib_index_js.29cf4312af19e86f82af.js} +1547 -330
- jupyter_ext/labextension/static/lib_index_js.29cf4312af19e86f82af.js.map +1 -0
- jupyter_ext/labextension/static/{remoteEntry.addf2fa038fa60304aa2.js → remoteEntry.61343eb4cf0577e74b50.js} +8 -8
- jupyter_ext/labextension/static/remoteEntry.61343eb4cf0577e74b50.js.map +1 -0
- jupyter_ext/labextension/static/{vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js → vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js} +209 -2
- jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js.map +1 -0
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js → jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js +2 -209
- jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js.map +1 -0
- jupyter_ext/labextension/static/{vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js → vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js} +3 -212
- jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js.map +1 -0
- jupyter_ext/resource_usage.py +180 -0
- jupyter_ext/tests/test_handlers.py +58 -0
- agent_server/langchain/executors/jupyter_executor.py +0 -429
- agent_server/langchain/middleware/__init__.py +0 -36
- agent_server/langchain/middleware/code_search_middleware.py +0 -278
- agent_server/langchain/middleware/error_handling_middleware.py +0 -338
- agent_server/langchain/middleware/jupyter_execution_middleware.py +0 -301
- agent_server/langchain/middleware/rag_middleware.py +0 -227
- agent_server/langchain/middleware/validation_middleware.py +0 -240
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.02d346171474a0fb2dc1.js.map +0 -1
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.a223ea20056954479ae9.js.map +0 -1
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.addf2fa038fa60304aa2.js.map +0 -1
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js.map +0 -1
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js.map +0 -1
- hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js.map +0 -1
- jupyter_ext/labextension/static/frontend_styles_index_js.02d346171474a0fb2dc1.js.map +0 -1
- jupyter_ext/labextension/static/lib_index_js.a223ea20056954479ae9.js.map +0 -1
- jupyter_ext/labextension/static/remoteEntry.addf2fa038fa60304aa2.js.map +0 -1
- jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js.map +0 -1
- jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js.map +0 -1
- jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js.map +0 -1
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/etc/jupyter/jupyter_server_config.d/hdsp_jupyter_extension.json +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/install.json +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js.map +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js.map +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/style.js +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js.map +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js.map +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js +0 -0
- {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js.map +0 -0
- {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/WHEEL +0 -0
- {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,131 +1,142 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Search Tools for LangChain Agent
|
|
3
3
|
|
|
4
|
-
Provides tools for searching code in workspace and notebooks
|
|
5
|
-
|
|
6
|
-
|
|
4
|
+
Provides tools for searching code in workspace and notebooks.
|
|
5
|
+
These tools return pending_execution status and are executed on the client (Jupyter) side
|
|
6
|
+
using subprocess (find/grep/ripgrep).
|
|
7
|
+
|
|
8
|
+
Key features:
|
|
9
|
+
- Returns command info for client-side execution via subprocess
|
|
10
|
+
- Supports ripgrep (rg) if available, falls back to grep
|
|
11
|
+
- Executes immediately without user approval
|
|
12
|
+
- Shows the command being executed in status messages
|
|
7
13
|
"""
|
|
8
14
|
|
|
9
|
-
import
|
|
10
|
-
import
|
|
11
|
-
import re
|
|
15
|
+
import logging
|
|
16
|
+
import shutil
|
|
12
17
|
from typing import Any, Dict, List, Optional
|
|
13
18
|
|
|
14
19
|
from langchain_core.tools import tool
|
|
15
20
|
from pydantic import BaseModel, Field
|
|
16
21
|
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
17
24
|
|
|
18
25
|
class SearchWorkspaceInput(BaseModel):
|
|
19
26
|
"""Input schema for search_workspace tool"""
|
|
27
|
+
|
|
20
28
|
pattern: str = Field(description="Search pattern (regex or text)")
|
|
21
29
|
file_types: List[str] = Field(
|
|
22
30
|
default=["*.py", "*.ipynb"],
|
|
23
|
-
description="File patterns to search (e.g., ['*.py', '*.ipynb'])"
|
|
31
|
+
description="File patterns to search (e.g., ['*.py', '*.ipynb'])",
|
|
24
32
|
)
|
|
25
33
|
path: str = Field(default=".", description="Directory to search in")
|
|
26
34
|
max_results: int = Field(default=50, description="Maximum number of results")
|
|
27
35
|
case_sensitive: bool = Field(default=False, description="Case-sensitive search")
|
|
36
|
+
execution_result: Optional[Dict[str, Any]] = Field(
|
|
37
|
+
default=None,
|
|
38
|
+
description="Execution result payload from the client",
|
|
39
|
+
)
|
|
28
40
|
|
|
29
41
|
|
|
30
42
|
class SearchNotebookCellsInput(BaseModel):
|
|
31
43
|
"""Input schema for search_notebook_cells tool"""
|
|
44
|
+
|
|
32
45
|
pattern: str = Field(description="Search pattern (regex or text)")
|
|
33
46
|
notebook_path: Optional[str] = Field(
|
|
34
|
-
default=None,
|
|
35
|
-
description="Specific notebook to search (None = all notebooks)"
|
|
47
|
+
default=None, description="Specific notebook to search (None = all notebooks)"
|
|
36
48
|
)
|
|
37
49
|
cell_type: Optional[str] = Field(
|
|
38
50
|
default=None,
|
|
39
|
-
description="Cell type filter: 'code', 'markdown', or None for all"
|
|
51
|
+
description="Cell type filter: 'code', 'markdown', or None for all",
|
|
40
52
|
)
|
|
41
53
|
max_results: int = Field(default=30, description="Maximum number of results")
|
|
42
54
|
case_sensitive: bool = Field(default=False, description="Case-sensitive search")
|
|
55
|
+
execution_result: Optional[Dict[str, Any]] = Field(
|
|
56
|
+
default=None,
|
|
57
|
+
description="Execution result payload from the client",
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _is_ripgrep_available() -> bool:
|
|
62
|
+
"""Check if ripgrep (rg) is installed and available."""
|
|
63
|
+
return shutil.which("rg") is not None
|
|
43
64
|
|
|
44
65
|
|
|
45
|
-
def
|
|
46
|
-
file_path: str,
|
|
66
|
+
def _build_grep_command(
|
|
47
67
|
pattern: str,
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
68
|
+
file_types: List[str],
|
|
69
|
+
path: str,
|
|
70
|
+
case_sensitive: bool,
|
|
71
|
+
max_results: int,
|
|
72
|
+
) -> tuple[str, str]:
|
|
73
|
+
"""
|
|
74
|
+
Build a grep/ripgrep command for searching files.
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
Tuple of (command_string, tool_name) where tool_name is 'rg' or 'grep'
|
|
78
|
+
"""
|
|
79
|
+
# Check ripgrep availability (this check will also be done on client)
|
|
80
|
+
use_ripgrep = _is_ripgrep_available()
|
|
81
|
+
|
|
82
|
+
if use_ripgrep:
|
|
83
|
+
# Build ripgrep command
|
|
84
|
+
cmd_parts = ["rg", "--line-number", "--with-filename"]
|
|
85
|
+
|
|
86
|
+
if not case_sensitive:
|
|
87
|
+
cmd_parts.append("--ignore-case")
|
|
88
|
+
|
|
89
|
+
# Add file type filters using glob patterns
|
|
90
|
+
for ft in file_types:
|
|
91
|
+
cmd_parts.extend(["--glob", ft])
|
|
92
|
+
|
|
93
|
+
# Limit results
|
|
94
|
+
cmd_parts.extend(["--max-count", str(max_results)])
|
|
95
|
+
|
|
96
|
+
# Escape pattern for shell
|
|
97
|
+
escaped_pattern = pattern.replace("'", "'\\''")
|
|
98
|
+
cmd_parts.append(f"'{escaped_pattern}'")
|
|
99
|
+
cmd_parts.append(path)
|
|
100
|
+
|
|
101
|
+
return " ".join(cmd_parts), "rg"
|
|
102
|
+
else:
|
|
103
|
+
# Build find + grep command for cross-platform compatibility
|
|
104
|
+
find_parts = ["find", path, "-type", "f", "("]
|
|
105
|
+
|
|
106
|
+
for i, ft in enumerate(file_types):
|
|
107
|
+
if i > 0:
|
|
108
|
+
find_parts.append("-o")
|
|
109
|
+
find_parts.extend(["-name", f"'{ft}'"])
|
|
110
|
+
|
|
111
|
+
find_parts.append(")")
|
|
112
|
+
|
|
113
|
+
# Add grep with proper flags
|
|
114
|
+
grep_flags = "-n" # Line numbers
|
|
115
|
+
if not case_sensitive:
|
|
116
|
+
grep_flags += "i"
|
|
117
|
+
|
|
118
|
+
# Escape pattern for shell
|
|
119
|
+
escaped_pattern = pattern.replace("'", "'\\''")
|
|
120
|
+
|
|
121
|
+
# Combine with xargs for efficiency
|
|
122
|
+
cmd = f"{' '.join(find_parts)} 2>/dev/null | xargs grep -{grep_flags} '{escaped_pattern}' 2>/dev/null | head -n {max_results}"
|
|
123
|
+
|
|
124
|
+
return cmd, "grep"
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def _build_notebook_search_command(
|
|
78
128
|
pattern: str,
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
try:
|
|
92
|
-
with open(notebook_path, "r", encoding="utf-8") as f:
|
|
93
|
-
notebook = json.load(f)
|
|
94
|
-
|
|
95
|
-
cells = notebook.get("cells", [])
|
|
96
|
-
for idx, cell in enumerate(cells):
|
|
97
|
-
current_type = cell.get("cell_type", "code")
|
|
98
|
-
|
|
99
|
-
# Filter by cell type if specified
|
|
100
|
-
if cell_type and current_type != cell_type:
|
|
101
|
-
continue
|
|
102
|
-
|
|
103
|
-
source = cell.get("source", [])
|
|
104
|
-
if isinstance(source, list):
|
|
105
|
-
source = "".join(source)
|
|
106
|
-
|
|
107
|
-
if compiled.search(source):
|
|
108
|
-
# Find matching lines
|
|
109
|
-
matching_lines = []
|
|
110
|
-
for line_num, line in enumerate(source.split("\n"), 1):
|
|
111
|
-
if compiled.search(line):
|
|
112
|
-
matching_lines.append({
|
|
113
|
-
"line": line_num,
|
|
114
|
-
"content": line.strip()[:150]
|
|
115
|
-
})
|
|
116
|
-
|
|
117
|
-
results.append({
|
|
118
|
-
"file_path": notebook_path,
|
|
119
|
-
"cell_index": idx,
|
|
120
|
-
"cell_type": current_type,
|
|
121
|
-
"content": source[:300] + "..." if len(source) > 300 else source,
|
|
122
|
-
"matching_lines": matching_lines[:5],
|
|
123
|
-
"match_type": "cell",
|
|
124
|
-
})
|
|
125
|
-
except Exception:
|
|
126
|
-
pass
|
|
127
|
-
|
|
128
|
-
return results
|
|
129
|
+
notebook_path: Optional[str],
|
|
130
|
+
path: str,
|
|
131
|
+
max_results: int,
|
|
132
|
+
) -> str:
|
|
133
|
+
"""Build a command to find notebooks for searching."""
|
|
134
|
+
if notebook_path:
|
|
135
|
+
return f"echo '{notebook_path}'"
|
|
136
|
+
else:
|
|
137
|
+
return (
|
|
138
|
+
f"find {path} -name '*.ipynb' -type f 2>/dev/null | head -n {max_results}"
|
|
139
|
+
)
|
|
129
140
|
|
|
130
141
|
|
|
131
142
|
@tool(args_schema=SearchWorkspaceInput)
|
|
@@ -135,84 +146,65 @@ def search_workspace_tool(
|
|
|
135
146
|
path: str = ".",
|
|
136
147
|
max_results: int = 50,
|
|
137
148
|
case_sensitive: bool = False,
|
|
138
|
-
|
|
149
|
+
execution_result: Optional[Dict[str, Any]] = None,
|
|
150
|
+
workspace_root: str = ".",
|
|
139
151
|
) -> Dict[str, Any]:
|
|
140
152
|
"""
|
|
141
153
|
Search for a pattern across files in the workspace.
|
|
142
|
-
|
|
154
|
+
|
|
155
|
+
This tool is executed on the client side using subprocess (grep/ripgrep).
|
|
143
156
|
Searches both regular files and Jupyter notebooks.
|
|
144
|
-
|
|
145
|
-
|
|
157
|
+
|
|
146
158
|
Args:
|
|
147
159
|
pattern: Search pattern (regex or text)
|
|
148
160
|
file_types: File patterns to search (default: ['*.py', '*.ipynb'])
|
|
149
|
-
path: Directory to search in
|
|
161
|
+
path: Directory to search in (relative to workspace)
|
|
150
162
|
max_results: Maximum number of results to return
|
|
151
163
|
case_sensitive: Whether search is case-sensitive
|
|
152
|
-
|
|
164
|
+
|
|
153
165
|
Returns:
|
|
154
|
-
Dict with search results
|
|
166
|
+
Dict with search results or pending_execution status
|
|
155
167
|
"""
|
|
156
|
-
import fnmatch
|
|
157
|
-
|
|
158
168
|
if file_types is None:
|
|
159
169
|
file_types = ["*.py", "*.ipynb"]
|
|
160
170
|
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
file_path = os.path.join(root, filename)
|
|
174
|
-
rel_path = os.path.relpath(file_path, workspace_root)
|
|
175
|
-
files_searched += 1
|
|
176
|
-
|
|
177
|
-
if filename.endswith(".ipynb"):
|
|
178
|
-
# Search in notebook
|
|
179
|
-
matches = _search_in_notebook(
|
|
180
|
-
file_path, pattern, None, case_sensitive
|
|
181
|
-
)
|
|
182
|
-
for m in matches:
|
|
183
|
-
m["file_path"] = rel_path
|
|
184
|
-
results.extend(matches)
|
|
185
|
-
else:
|
|
186
|
-
# Search in regular file
|
|
187
|
-
matches = _search_in_file(file_path, pattern, case_sensitive)
|
|
188
|
-
for m in matches:
|
|
189
|
-
m["file_path"] = rel_path
|
|
190
|
-
results.extend(matches)
|
|
191
|
-
|
|
192
|
-
if len(results) >= max_results:
|
|
193
|
-
break
|
|
194
|
-
|
|
195
|
-
if len(results) >= max_results:
|
|
196
|
-
break
|
|
197
|
-
|
|
198
|
-
return {
|
|
199
|
-
"tool": "search_workspace",
|
|
200
|
-
"success": True,
|
|
171
|
+
# Build the search command
|
|
172
|
+
command, tool_used = _build_grep_command(
|
|
173
|
+
pattern=pattern,
|
|
174
|
+
file_types=file_types,
|
|
175
|
+
path=path,
|
|
176
|
+
case_sensitive=case_sensitive,
|
|
177
|
+
max_results=max_results,
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
response: Dict[str, Any] = {
|
|
181
|
+
"tool": "search_workspace_tool",
|
|
182
|
+
"parameters": {
|
|
201
183
|
"pattern": pattern,
|
|
184
|
+
"file_types": file_types,
|
|
202
185
|
"path": path,
|
|
203
|
-
"
|
|
204
|
-
"
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
186
|
+
"max_results": max_results,
|
|
187
|
+
"case_sensitive": case_sensitive,
|
|
188
|
+
},
|
|
189
|
+
"command": command,
|
|
190
|
+
"tool_used": tool_used,
|
|
191
|
+
"status": "pending_execution",
|
|
192
|
+
"message": "Search queued for execution by client",
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
if execution_result is not None:
|
|
196
|
+
response["execution_result"] = execution_result
|
|
197
|
+
response["status"] = "complete"
|
|
198
|
+
response["message"] = "Search executed with client-reported results"
|
|
199
|
+
# Parse the execution result to extract search results
|
|
200
|
+
if isinstance(execution_result, dict):
|
|
201
|
+
response["success"] = execution_result.get("success", False)
|
|
202
|
+
response["results"] = execution_result.get("results", [])
|
|
203
|
+
response["total_results"] = execution_result.get("total_results", 0)
|
|
204
|
+
if "error" in execution_result:
|
|
205
|
+
response["error"] = execution_result["error"]
|
|
206
|
+
|
|
207
|
+
return response
|
|
216
208
|
|
|
217
209
|
|
|
218
210
|
@tool(args_schema=SearchNotebookCellsInput)
|
|
@@ -222,84 +214,74 @@ def search_notebook_cells_tool(
|
|
|
222
214
|
cell_type: Optional[str] = None,
|
|
223
215
|
max_results: int = 30,
|
|
224
216
|
case_sensitive: bool = False,
|
|
225
|
-
|
|
217
|
+
execution_result: Optional[Dict[str, Any]] = None,
|
|
218
|
+
workspace_root: str = ".",
|
|
226
219
|
) -> Dict[str, Any]:
|
|
227
220
|
"""
|
|
228
221
|
Search for a pattern in Jupyter notebook cells.
|
|
229
|
-
|
|
222
|
+
|
|
223
|
+
This tool is executed on the client side.
|
|
230
224
|
Can search a specific notebook or all notebooks in workspace.
|
|
231
225
|
Optionally filter by cell type (code/markdown).
|
|
232
|
-
|
|
226
|
+
|
|
233
227
|
Args:
|
|
234
228
|
pattern: Search pattern (regex or text)
|
|
235
229
|
notebook_path: Specific notebook to search (None = all)
|
|
236
230
|
cell_type: Filter by cell type ('code', 'markdown', or None)
|
|
237
231
|
max_results: Maximum number of results
|
|
238
232
|
case_sensitive: Whether search is case-sensitive
|
|
239
|
-
|
|
233
|
+
|
|
240
234
|
Returns:
|
|
241
|
-
Dict with matching cells
|
|
235
|
+
Dict with matching cells or pending_execution status
|
|
242
236
|
"""
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
full_path, pattern, cell_type, case_sensitive
|
|
255
|
-
)
|
|
256
|
-
for m in matches:
|
|
257
|
-
m["file_path"] = notebook_path
|
|
258
|
-
results.extend(matches)
|
|
259
|
-
notebooks_searched = 1
|
|
260
|
-
else:
|
|
261
|
-
# Search all notebooks
|
|
262
|
-
for root, _, filenames in os.walk(workspace_root):
|
|
263
|
-
for filename in filenames:
|
|
264
|
-
if not filename.endswith(".ipynb"):
|
|
265
|
-
continue
|
|
266
|
-
|
|
267
|
-
file_path = os.path.join(root, filename)
|
|
268
|
-
rel_path = os.path.relpath(file_path, workspace_root)
|
|
269
|
-
notebooks_searched += 1
|
|
270
|
-
|
|
271
|
-
matches = _search_in_notebook(
|
|
272
|
-
file_path, pattern, cell_type, case_sensitive
|
|
273
|
-
)
|
|
274
|
-
for m in matches:
|
|
275
|
-
m["file_path"] = rel_path
|
|
276
|
-
results.extend(matches)
|
|
277
|
-
|
|
278
|
-
if len(results) >= max_results:
|
|
279
|
-
break
|
|
280
|
-
|
|
281
|
-
if len(results) >= max_results:
|
|
282
|
-
break
|
|
283
|
-
|
|
284
|
-
return {
|
|
285
|
-
"tool": "search_notebook_cells",
|
|
286
|
-
"success": True,
|
|
237
|
+
# Build find command for notebooks
|
|
238
|
+
find_command = _build_notebook_search_command(
|
|
239
|
+
pattern=pattern,
|
|
240
|
+
notebook_path=notebook_path,
|
|
241
|
+
path=".",
|
|
242
|
+
max_results=max_results,
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
response: Dict[str, Any] = {
|
|
246
|
+
"tool": "search_notebook_cells_tool",
|
|
247
|
+
"parameters": {
|
|
287
248
|
"pattern": pattern,
|
|
288
249
|
"notebook_path": notebook_path,
|
|
289
250
|
"cell_type": cell_type,
|
|
290
|
-
"
|
|
291
|
-
"
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
251
|
+
"max_results": max_results,
|
|
252
|
+
"case_sensitive": case_sensitive,
|
|
253
|
+
},
|
|
254
|
+
"find_command": find_command,
|
|
255
|
+
"status": "pending_execution",
|
|
256
|
+
"message": "Notebook search queued for execution by client",
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
if execution_result is not None:
|
|
260
|
+
response["execution_result"] = execution_result
|
|
261
|
+
response["status"] = "complete"
|
|
262
|
+
response["message"] = "Notebook search executed with client-reported results"
|
|
263
|
+
# Parse the execution result
|
|
264
|
+
if isinstance(execution_result, dict):
|
|
265
|
+
response["success"] = execution_result.get("success", False)
|
|
266
|
+
response["results"] = execution_result.get("results", [])
|
|
267
|
+
response["total_results"] = execution_result.get("total_results", 0)
|
|
268
|
+
response["notebooks_searched"] = execution_result.get(
|
|
269
|
+
"notebooks_searched", 0
|
|
270
|
+
)
|
|
271
|
+
if "error" in execution_result:
|
|
272
|
+
response["error"] = execution_result["error"]
|
|
273
|
+
|
|
274
|
+
return response
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def create_search_tools(workspace_root: str = ".") -> List:
|
|
278
|
+
"""
|
|
279
|
+
Create search tools (for backward compatibility).
|
|
280
|
+
|
|
281
|
+
Note: workspace_root is not used since tools return pending_execution
|
|
282
|
+
and actual execution happens on the client side.
|
|
283
|
+
"""
|
|
284
|
+
return [search_workspace_tool, search_notebook_cells_tool]
|
|
303
285
|
|
|
304
286
|
|
|
305
287
|
# Export all tools
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Shell command tool for LangChain agent.
|
|
3
|
+
|
|
4
|
+
Execution is handled by the Jupyter extension (client) after HITL approval.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Any, Dict, Optional
|
|
8
|
+
|
|
9
|
+
from langchain_core.tools import tool
|
|
10
|
+
from pydantic import BaseModel, Field
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ExecuteCommandInput(BaseModel):
|
|
14
|
+
"""Input schema for execute_command_tool."""
|
|
15
|
+
|
|
16
|
+
command: str = Field(description="Shell command to execute")
|
|
17
|
+
stdin: Optional[str] = Field(
|
|
18
|
+
default="y\n",
|
|
19
|
+
description="Input to provide to the command for interactive prompts (default: 'y\\n' for yes/no prompts)",
|
|
20
|
+
)
|
|
21
|
+
timeout: Optional[int] = Field(
|
|
22
|
+
default=600000, description="Timeout in milliseconds"
|
|
23
|
+
)
|
|
24
|
+
execution_result: Optional[Dict[str, Any]] = Field(
|
|
25
|
+
default=None, description="Optional execution result payload from the client"
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@tool(args_schema=ExecuteCommandInput)
|
|
30
|
+
def execute_command_tool(
|
|
31
|
+
command: str,
|
|
32
|
+
stdin: Optional[str] = "y\n",
|
|
33
|
+
timeout: Optional[int] = 600000,
|
|
34
|
+
execution_result: Optional[Dict[str, Any]] = None,
|
|
35
|
+
) -> Dict[str, Any]:
|
|
36
|
+
"""
|
|
37
|
+
Execute a shell command via the client (Jupyter extension).
|
|
38
|
+
The agent server only coordinates the request. The client performs the
|
|
39
|
+
actual execution after user approval and returns execution_result.
|
|
40
|
+
|
|
41
|
+
- Interactive prompts are auto-answered with "y" by default
|
|
42
|
+
- NEVER run long-running commands (servers, watch, dev) or endless processes
|
|
43
|
+
"""
|
|
44
|
+
response: Dict[str, Any] = {
|
|
45
|
+
"tool": "execute_command_tool",
|
|
46
|
+
"parameters": {"command": command, "stdin": stdin, "timeout": timeout},
|
|
47
|
+
"status": "pending_execution",
|
|
48
|
+
"message": "Shell command queued for execution by client",
|
|
49
|
+
}
|
|
50
|
+
if execution_result is not None:
|
|
51
|
+
response["execution_result"] = execution_result
|
|
52
|
+
response["status"] = "complete"
|
|
53
|
+
response["message"] = "Shell command executed with client-reported results"
|
|
54
|
+
return response
|
agent_server/main.py
CHANGED
|
@@ -19,6 +19,7 @@ from agent_server.routers import agent, chat, config, file_resolver, health, rag
|
|
|
19
19
|
# Optional LangChain router (requires langchain dependencies)
|
|
20
20
|
try:
|
|
21
21
|
from agent_server.routers import langchain_agent
|
|
22
|
+
|
|
22
23
|
LANGCHAIN_AVAILABLE = True
|
|
23
24
|
except ImportError:
|
|
24
25
|
LANGCHAIN_AVAILABLE = False
|
|
@@ -153,7 +154,9 @@ app.include_router(file_resolver.router, prefix="/file", tags=["File Resolution"
|
|
|
153
154
|
|
|
154
155
|
# Register LangChain agent router if available
|
|
155
156
|
if LANGCHAIN_AVAILABLE:
|
|
156
|
-
app.include_router(
|
|
157
|
+
app.include_router(
|
|
158
|
+
langchain_agent.router, prefix="/agent", tags=["LangChain Agent"]
|
|
159
|
+
)
|
|
157
160
|
logger.info("LangChain agent router registered")
|
|
158
161
|
|
|
159
162
|
|
agent_server/routers/health.py
CHANGED