hdsp-jupyter-extension 2.0.6__py3-none-any.whl → 2.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. agent_server/core/reflection_engine.py +0 -1
  2. agent_server/knowledge/watchdog_service.py +1 -1
  3. agent_server/langchain/ARCHITECTURE.md +1193 -0
  4. agent_server/langchain/agent.py +74 -588
  5. agent_server/langchain/custom_middleware.py +636 -0
  6. agent_server/langchain/executors/__init__.py +2 -7
  7. agent_server/langchain/executors/notebook_searcher.py +46 -38
  8. agent_server/langchain/hitl_config.py +66 -0
  9. agent_server/langchain/llm_factory.py +166 -0
  10. agent_server/langchain/logging_utils.py +184 -0
  11. agent_server/langchain/prompts.py +119 -0
  12. agent_server/langchain/state.py +16 -6
  13. agent_server/langchain/tools/__init__.py +6 -0
  14. agent_server/langchain/tools/file_tools.py +91 -129
  15. agent_server/langchain/tools/jupyter_tools.py +18 -18
  16. agent_server/langchain/tools/resource_tools.py +161 -0
  17. agent_server/langchain/tools/search_tools.py +198 -216
  18. agent_server/langchain/tools/shell_tools.py +54 -0
  19. agent_server/main.py +4 -1
  20. agent_server/routers/health.py +1 -1
  21. agent_server/routers/langchain_agent.py +940 -285
  22. hdsp_agent_core/prompts/auto_agent_prompts.py +3 -3
  23. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/build_log.json +1 -1
  24. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/package.json +2 -2
  25. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.02d346171474a0fb2dc1.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js +312 -6
  26. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js.map +1 -0
  27. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.a223ea20056954479ae9.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.29cf4312af19e86f82af.js +1547 -330
  28. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.29cf4312af19e86f82af.js.map +1 -0
  29. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.addf2fa038fa60304aa2.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.61343eb4cf0577e74b50.js +8 -8
  30. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.61343eb4cf0577e74b50.js.map +1 -0
  31. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js +209 -2
  32. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js.map +1 -0
  33. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js +2 -209
  34. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js.map +1 -0
  35. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js +3 -212
  36. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js.map +1 -0
  37. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/METADATA +2 -1
  38. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/RECORD +71 -68
  39. jupyter_ext/_version.py +1 -1
  40. jupyter_ext/handlers.py +1176 -58
  41. jupyter_ext/labextension/build_log.json +1 -1
  42. jupyter_ext/labextension/package.json +2 -2
  43. jupyter_ext/labextension/static/{frontend_styles_index_js.02d346171474a0fb2dc1.js → frontend_styles_index_js.4770ec0fb2d173b6deb4.js} +312 -6
  44. jupyter_ext/labextension/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js.map +1 -0
  45. jupyter_ext/labextension/static/{lib_index_js.a223ea20056954479ae9.js → lib_index_js.29cf4312af19e86f82af.js} +1547 -330
  46. jupyter_ext/labextension/static/lib_index_js.29cf4312af19e86f82af.js.map +1 -0
  47. jupyter_ext/labextension/static/{remoteEntry.addf2fa038fa60304aa2.js → remoteEntry.61343eb4cf0577e74b50.js} +8 -8
  48. jupyter_ext/labextension/static/remoteEntry.61343eb4cf0577e74b50.js.map +1 -0
  49. jupyter_ext/labextension/static/{vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js → vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js} +209 -2
  50. jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js.map +1 -0
  51. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js → jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js +2 -209
  52. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js.map +1 -0
  53. jupyter_ext/labextension/static/{vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js → vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js} +3 -212
  54. jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js.map +1 -0
  55. jupyter_ext/resource_usage.py +180 -0
  56. jupyter_ext/tests/test_handlers.py +58 -0
  57. agent_server/langchain/executors/jupyter_executor.py +0 -429
  58. agent_server/langchain/middleware/__init__.py +0 -36
  59. agent_server/langchain/middleware/code_search_middleware.py +0 -278
  60. agent_server/langchain/middleware/error_handling_middleware.py +0 -338
  61. agent_server/langchain/middleware/jupyter_execution_middleware.py +0 -301
  62. agent_server/langchain/middleware/rag_middleware.py +0 -227
  63. agent_server/langchain/middleware/validation_middleware.py +0 -240
  64. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.02d346171474a0fb2dc1.js.map +0 -1
  65. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.a223ea20056954479ae9.js.map +0 -1
  66. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.addf2fa038fa60304aa2.js.map +0 -1
  67. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js.map +0 -1
  68. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js.map +0 -1
  69. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js.map +0 -1
  70. jupyter_ext/labextension/static/frontend_styles_index_js.02d346171474a0fb2dc1.js.map +0 -1
  71. jupyter_ext/labextension/static/lib_index_js.a223ea20056954479ae9.js.map +0 -1
  72. jupyter_ext/labextension/static/remoteEntry.addf2fa038fa60304aa2.js.map +0 -1
  73. jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js.map +0 -1
  74. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js.map +0 -1
  75. jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js.map +0 -1
  76. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/etc/jupyter/jupyter_server_config.d/hdsp_jupyter_extension.json +0 -0
  77. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/install.json +0 -0
  78. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js +0 -0
  79. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js.map +0 -0
  80. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js +0 -0
  81. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js.map +0 -0
  82. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/style.js +0 -0
  83. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js +0 -0
  84. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js.map +0 -0
  85. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js +0 -0
  86. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js.map +0 -0
  87. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js +0 -0
  88. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js.map +0 -0
  89. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/WHEEL +0 -0
  90. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/licenses/LICENSE +0 -0
jupyter_ext/handlers.py CHANGED
@@ -6,26 +6,58 @@ ServiceFactory-based handlers supporting both embedded and proxy modes:
6
6
  - Proxy mode (HDSP_AGENT_MODE=proxy): HTTP proxy to external Agent Server
7
7
  """
8
8
 
9
+ import asyncio
9
10
  import json
10
11
  import logging
11
12
  import os
12
- from typing import Any, Dict
13
+ import subprocess
14
+ import time
15
+ from typing import Any, Awaitable, Callable, Dict, Optional
13
16
 
14
17
  import httpx
15
18
  from jupyter_server.base.handlers import APIHandler
16
19
  from jupyter_server.utils import url_path_join
17
- from tornado.web import RequestHandler
20
+
21
+ from .resource_usage import get_integrated_resources
18
22
 
19
23
  logger = logging.getLogger(__name__)
20
24
 
25
+ DEFAULT_EXECUTE_COMMAND_TIMEOUT_MS = 600_000
26
+ MAX_EXECUTE_COMMAND_STREAM_BYTES = 1_000_000
27
+
28
+
29
+ def _resolve_timeout_ms(
30
+ value: Any, default: int = DEFAULT_EXECUTE_COMMAND_TIMEOUT_MS
31
+ ) -> int:
32
+ """Resolve timeout in milliseconds with a safe default."""
33
+ try:
34
+ timeout_ms = int(value)
35
+ except (TypeError, ValueError):
36
+ return default
37
+ if timeout_ms <= 0:
38
+ return default
39
+ return timeout_ms
40
+
41
+
42
+ def _resolve_stream_timeout_ms(
43
+ value: Any, default: int = DEFAULT_EXECUTE_COMMAND_TIMEOUT_MS
44
+ ) -> Optional[int]:
45
+ """Resolve timeout in milliseconds; non-positive disables timeout."""
46
+ try:
47
+ timeout_ms = int(value)
48
+ except (TypeError, ValueError):
49
+ return default
50
+ if timeout_ms <= 0:
51
+ return None
52
+ return timeout_ms
53
+
21
54
 
22
55
  def _resolve_workspace_root(server_root: str) -> str:
23
56
  """Resolve workspace root by walking up to the project root if needed."""
24
57
  current = os.path.abspath(server_root)
25
58
  while True:
26
- if (
27
- os.path.isdir(os.path.join(current, "extensions"))
28
- and os.path.isdir(os.path.join(current, "agent-server"))
59
+ if os.path.isdir(os.path.join(current, "extensions")) and os.path.isdir(
60
+ os.path.join(current, "agent-server")
29
61
  ):
30
62
  return current
31
63
  parent = os.path.dirname(current)
@@ -37,6 +69,7 @@ def _resolve_workspace_root(server_root: str) -> str:
37
69
  def _get_service_factory():
38
70
  """Get ServiceFactory instance (lazy import to avoid circular imports)"""
39
71
  from hdsp_agent_core.factory import get_service_factory
72
+
40
73
  return get_service_factory()
41
74
 
42
75
 
@@ -49,6 +82,472 @@ def _is_embedded_mode() -> bool:
49
82
  return False
50
83
 
51
84
 
85
+ def _run_shell_command(
86
+ command: str, timeout_ms: int, cwd: str, stdin_input: Optional[str] = None
87
+ ) -> Dict[str, Any]:
88
+ """Run a shell command with timeout and capture output.
89
+
90
+ Args:
91
+ command: Shell command to execute
92
+ timeout_ms: Timeout in milliseconds
93
+ cwd: Working directory
94
+ stdin_input: Optional input to provide to the command (for interactive prompts)
95
+ """
96
+ timeout_sec = max(0.1, timeout_ms / 1000)
97
+ try:
98
+ result = subprocess.run(
99
+ command,
100
+ shell=True,
101
+ capture_output=True,
102
+ text=True,
103
+ timeout=timeout_sec,
104
+ cwd=cwd,
105
+ input=stdin_input, # Provide stdin if specified
106
+ )
107
+ return {
108
+ "success": result.returncode == 0,
109
+ "stdout": result.stdout,
110
+ "stderr": result.stderr,
111
+ "returncode": result.returncode,
112
+ "cwd": cwd,
113
+ }
114
+ except subprocess.TimeoutExpired:
115
+ return {
116
+ "success": False,
117
+ "error": f"Command timed out after {timeout_sec}s. If the command requires user input, use stdin parameter or non-interactive flags.",
118
+ "cwd": cwd,
119
+ }
120
+ except Exception as exc:
121
+ return {"success": False, "error": str(exc), "cwd": cwd}
122
+
123
+
124
+ def _append_stream_output(
125
+ current: str,
126
+ chunk: str,
127
+ max_bytes: int = MAX_EXECUTE_COMMAND_STREAM_BYTES,
128
+ ) -> tuple[str, bool]:
129
+ """Append output chunk, truncating when max_bytes is reached."""
130
+ if not chunk:
131
+ return current, False
132
+ current_bytes = current.encode("utf-8")
133
+ if len(current_bytes) >= max_bytes:
134
+ return current, True
135
+ chunk_bytes = chunk.encode("utf-8")
136
+ remaining = max_bytes - len(current_bytes)
137
+ if len(chunk_bytes) <= remaining:
138
+ return current + chunk, False
139
+ truncated_chunk = chunk_bytes[:remaining].decode("utf-8", errors="ignore")
140
+ return current + truncated_chunk, True
141
+
142
+
143
+ async def _stream_subprocess_output(
144
+ stream: Optional[asyncio.StreamReader],
145
+ stream_name: str,
146
+ emit: Callable[[str, Dict[str, Any]], Awaitable[None]],
147
+ append: Callable[[str], None],
148
+ ) -> None:
149
+ """Stream subprocess output lines and collect them."""
150
+ if stream is None:
151
+ return
152
+ while True:
153
+ chunk = await stream.readline()
154
+ if not chunk:
155
+ break
156
+ text = chunk.decode("utf-8", errors="replace")
157
+ await emit("output", {"stream": stream_name, "text": text})
158
+ append(text)
159
+
160
+
161
+ def _resolve_path_in_workspace(
162
+ path: str, workspace_root: str, requested_cwd: Optional[str] = None
163
+ ) -> str:
164
+ """Resolve a relative path within the workspace root."""
165
+ if os.path.isabs(path):
166
+ raise ValueError("absolute paths are not allowed")
167
+ if ".." in path:
168
+ raise ValueError("parent directory traversal is not allowed")
169
+
170
+ normalized_path = os.path.normpath(path)
171
+ base_dir = workspace_root
172
+ if requested_cwd:
173
+ if os.path.isabs(requested_cwd):
174
+ resolved_cwd = os.path.abspath(requested_cwd)
175
+ else:
176
+ resolved_cwd = os.path.abspath(os.path.join(workspace_root, requested_cwd))
177
+ if os.path.commonpath([workspace_root, resolved_cwd]) != workspace_root:
178
+ raise ValueError("cwd escapes workspace root")
179
+ base_dir = resolved_cwd
180
+
181
+ rel_cwd = os.path.normpath(os.path.relpath(resolved_cwd, workspace_root))
182
+ if rel_cwd != ".":
183
+ prefix = rel_cwd + os.sep
184
+ if normalized_path == rel_cwd:
185
+ normalized_path = "."
186
+ elif normalized_path.startswith(prefix):
187
+ normalized_path = normalized_path[len(prefix) :]
188
+
189
+ resolved_path = os.path.abspath(os.path.join(base_dir, normalized_path))
190
+ if os.path.commonpath([workspace_root, resolved_path]) != workspace_root:
191
+ raise ValueError("path escapes workspace root")
192
+ return resolved_path
193
+
194
+
195
+ def _resolve_command_cwd(
196
+ server_root: str, workspace_root: str, requested_cwd: Optional[str] = None
197
+ ) -> str:
198
+ """Resolve command cwd within workspace root."""
199
+ default_cwd = os.path.abspath(server_root)
200
+ if os.path.commonpath([workspace_root, default_cwd]) != workspace_root:
201
+ default_cwd = workspace_root
202
+
203
+ if not requested_cwd:
204
+ return default_cwd
205
+
206
+ if os.path.isabs(requested_cwd):
207
+ resolved_cwd = os.path.abspath(requested_cwd)
208
+ else:
209
+ resolved_cwd = os.path.abspath(os.path.join(default_cwd, requested_cwd))
210
+
211
+ if os.path.commonpath([workspace_root, resolved_cwd]) != workspace_root:
212
+ raise ValueError("cwd escapes workspace root")
213
+
214
+ return resolved_cwd
215
+
216
+
217
+ def _write_file(
218
+ resolved_path: str, content: str, encoding: str, overwrite: bool
219
+ ) -> Dict[str, Any]:
220
+ """Write content to a file on disk."""
221
+ print(
222
+ f"[WRITE DEBUG] resolved_path={resolved_path}, overwrite={overwrite}, type={type(overwrite)}",
223
+ flush=True,
224
+ )
225
+ try:
226
+ dir_path = os.path.dirname(resolved_path)
227
+ if dir_path:
228
+ os.makedirs(dir_path, exist_ok=True)
229
+ mode = "w" if overwrite else "x"
230
+ print(f"[WRITE DEBUG] mode={mode}", flush=True)
231
+ with open(resolved_path, mode, encoding=encoding) as f:
232
+ f.write(content)
233
+ return {
234
+ "success": True,
235
+ "size": len(content),
236
+ }
237
+ except FileExistsError:
238
+ return {
239
+ "success": False,
240
+ "error": "File already exists. Set overwrite=true to overwrite.",
241
+ }
242
+ except Exception as exc:
243
+ return {"success": False, "error": str(exc)}
244
+
245
+
246
+ def _is_ripgrep_available() -> bool:
247
+ """Check if ripgrep (rg) is installed."""
248
+ import shutil
249
+
250
+ return shutil.which("rg") is not None
251
+
252
+
253
+ def _build_search_command(
254
+ pattern: str,
255
+ file_types: list,
256
+ path: str,
257
+ case_sensitive: bool,
258
+ max_results: int,
259
+ ) -> tuple:
260
+ """Build grep/ripgrep command for searching files."""
261
+ use_ripgrep = _is_ripgrep_available()
262
+
263
+ if use_ripgrep:
264
+ cmd_parts = ["rg", "--line-number", "--with-filename"]
265
+ if not case_sensitive:
266
+ cmd_parts.append("--ignore-case")
267
+ for ft in file_types:
268
+ cmd_parts.extend(["--glob", ft])
269
+ cmd_parts.extend(["--max-count", str(max_results)])
270
+ escaped_pattern = pattern.replace("'", "'\\''")
271
+ cmd_parts.append(f"'{escaped_pattern}'")
272
+ cmd_parts.append(path)
273
+ return " ".join(cmd_parts), "rg"
274
+ else:
275
+ find_parts = ["find", path, "-type", "f", "\\("]
276
+ for i, ft in enumerate(file_types):
277
+ if i > 0:
278
+ find_parts.append("-o")
279
+ find_parts.extend(["-name", f"'{ft}'"])
280
+ find_parts.append("\\)")
281
+ grep_flags = "n" + ("" if case_sensitive else "i")
282
+ escaped_pattern = pattern.replace("'", "'\\''")
283
+ cmd = f"{' '.join(find_parts)} 2>/dev/null | xargs grep -{grep_flags} '{escaped_pattern}' 2>/dev/null | head -n {max_results}"
284
+ return cmd, "grep"
285
+
286
+
287
+ def _parse_grep_output(output: str, workspace_root: str) -> list:
288
+ """Parse grep/ripgrep output into structured results."""
289
+ results = []
290
+ for line in output.strip().split("\n"):
291
+ if not line:
292
+ continue
293
+ parts = line.split(":", 2)
294
+ if len(parts) >= 2:
295
+ file_path = parts[0]
296
+ try:
297
+ line_num = int(parts[1])
298
+ content = parts[2] if len(parts) > 2 else ""
299
+ except ValueError:
300
+ line_num = 0
301
+ content = line
302
+ try:
303
+ rel_path = os.path.relpath(file_path, workspace_root)
304
+ except ValueError:
305
+ rel_path = file_path
306
+ results.append(
307
+ {
308
+ "file_path": rel_path,
309
+ "line_number": line_num,
310
+ "content": content.strip()[:200],
311
+ "match_type": "line",
312
+ }
313
+ )
314
+ return results
315
+
316
+
317
+ def _search_in_notebook(
318
+ notebook_path: str, pattern: str, cell_type: Optional[str], case_sensitive: bool
319
+ ) -> list:
320
+ """Search for pattern in notebook cells."""
321
+ import re
322
+
323
+ results = []
324
+ flags = 0 if case_sensitive else re.IGNORECASE
325
+
326
+ try:
327
+ compiled = re.compile(pattern, flags)
328
+ except re.error:
329
+ compiled = re.compile(re.escape(pattern), flags)
330
+
331
+ try:
332
+ with open(notebook_path, "r", encoding="utf-8") as f:
333
+ notebook = json.load(f)
334
+ cells = notebook.get("cells", [])
335
+ for idx, cell in enumerate(cells):
336
+ current_type = cell.get("cell_type", "code")
337
+ if cell_type and current_type != cell_type:
338
+ continue
339
+ source = cell.get("source", [])
340
+ if isinstance(source, list):
341
+ source = "".join(source)
342
+ if compiled.search(source):
343
+ matching_lines = []
344
+ for line_num, line in enumerate(source.split("\n"), 1):
345
+ if compiled.search(line):
346
+ matching_lines.append(
347
+ {"line": line_num, "content": line.strip()[:150]}
348
+ )
349
+ results.append(
350
+ {
351
+ "cell_index": idx,
352
+ "cell_type": current_type,
353
+ "content": source[:300] + "..."
354
+ if len(source) > 300
355
+ else source,
356
+ "matching_lines": matching_lines[:5],
357
+ "match_type": "cell",
358
+ }
359
+ )
360
+ except Exception as e:
361
+ logger.warning(f"Error searching notebook {notebook_path}: {e}")
362
+ return results
363
+
364
+
365
+ def _execute_search_workspace(
366
+ pattern: str,
367
+ file_types: list,
368
+ path: str,
369
+ max_results: int,
370
+ case_sensitive: bool,
371
+ workspace_root: str,
372
+ ) -> Dict[str, Any]:
373
+ """Execute workspace search using subprocess."""
374
+ search_path = os.path.normpath(os.path.join(workspace_root, path))
375
+ if not os.path.exists(search_path):
376
+ return {
377
+ "success": False,
378
+ "error": f"Path does not exist: {path}",
379
+ "results": [],
380
+ "total_results": 0,
381
+ }
382
+
383
+ command, tool_used = _build_search_command(
384
+ pattern, file_types, search_path, case_sensitive, max_results
385
+ )
386
+ print(f"[SEARCH DEBUG] Executing search: {command}", flush=True)
387
+ print(
388
+ f"[SEARCH DEBUG] cwd: {workspace_root}, search_path: {search_path}", flush=True
389
+ )
390
+
391
+ # Debug: test find command separately
392
+ find_only = f"find {search_path} -type f -name '*.py' | head -5"
393
+ find_result = subprocess.run(
394
+ find_only,
395
+ shell=True,
396
+ capture_output=True,
397
+ text=True,
398
+ timeout=10,
399
+ cwd=workspace_root,
400
+ )
401
+ print(
402
+ f"[SEARCH DEBUG] find only stdout: {find_result.stdout[:300] if find_result.stdout else 'EMPTY'}",
403
+ flush=True,
404
+ )
405
+
406
+ try:
407
+ result = subprocess.run(
408
+ command,
409
+ shell=True,
410
+ capture_output=True,
411
+ text=True,
412
+ timeout=30,
413
+ cwd=workspace_root,
414
+ )
415
+ print(
416
+ f"[SEARCH DEBUG] stdout: {result.stdout[:500] if result.stdout else 'EMPTY'}",
417
+ flush=True,
418
+ )
419
+ print(
420
+ f"[SEARCH DEBUG] stderr: {result.stderr[:500] if result.stderr else 'EMPTY'}",
421
+ flush=True,
422
+ )
423
+ print(f"[SEARCH DEBUG] returncode: {result.returncode}", flush=True)
424
+ results = _parse_grep_output(result.stdout, workspace_root)
425
+
426
+ # Also search in notebook cell contents
427
+ notebook_results = []
428
+ if any("ipynb" in ft for ft in file_types):
429
+ find_cmd = f"find {search_path} -name '*.ipynb' -type f 2>/dev/null"
430
+ nb_result = subprocess.run(
431
+ find_cmd,
432
+ shell=True,
433
+ capture_output=True,
434
+ text=True,
435
+ timeout=10,
436
+ cwd=workspace_root,
437
+ )
438
+ if nb_result.returncode == 0 and nb_result.stdout:
439
+ notebooks = nb_result.stdout.strip().split("\n")
440
+ for nb_path in notebooks[:20]:
441
+ if nb_path and os.path.exists(nb_path):
442
+ nb_matches = _search_in_notebook(
443
+ nb_path, pattern, None, case_sensitive
444
+ )
445
+ for m in nb_matches:
446
+ try:
447
+ m["file_path"] = os.path.relpath(
448
+ nb_path, workspace_root
449
+ )
450
+ except ValueError:
451
+ m["file_path"] = nb_path
452
+ notebook_results.extend(nb_matches)
453
+ if len(notebook_results) >= max_results:
454
+ break
455
+
456
+ all_results = results + notebook_results
457
+ return {
458
+ "success": True,
459
+ "command": command,
460
+ "tool_used": tool_used,
461
+ "results": all_results[:max_results],
462
+ "total_results": len(all_results),
463
+ }
464
+ except subprocess.TimeoutExpired:
465
+ return {
466
+ "success": False,
467
+ "error": "Search timed out",
468
+ "results": [],
469
+ "total_results": 0,
470
+ }
471
+ except Exception as e:
472
+ return {"success": False, "error": str(e), "results": [], "total_results": 0}
473
+
474
+
475
+ def _execute_search_notebook_cells(
476
+ pattern: str,
477
+ notebook_path: Optional[str],
478
+ cell_type: Optional[str],
479
+ max_results: int,
480
+ case_sensitive: bool,
481
+ workspace_root: str,
482
+ ) -> Dict[str, Any]:
483
+ """Execute notebook cell search."""
484
+ results = []
485
+ notebooks_searched = 0
486
+
487
+ try:
488
+ if notebook_path:
489
+ full_path = os.path.normpath(os.path.join(workspace_root, notebook_path))
490
+ if os.path.exists(full_path) and full_path.endswith(".ipynb"):
491
+ matches = _search_in_notebook(
492
+ full_path, pattern, cell_type, case_sensitive
493
+ )
494
+ for m in matches:
495
+ m["file_path"] = notebook_path
496
+ results.extend(matches)
497
+ notebooks_searched = 1
498
+ else:
499
+ return {
500
+ "success": False,
501
+ "error": f"Notebook not found: {notebook_path}",
502
+ "results": [],
503
+ "total_results": 0,
504
+ "notebooks_searched": 0,
505
+ }
506
+ else:
507
+ find_cmd = f"find {workspace_root} -name '*.ipynb' -type f 2>/dev/null"
508
+ find_result = subprocess.run(
509
+ find_cmd,
510
+ shell=True,
511
+ capture_output=True,
512
+ text=True,
513
+ timeout=10,
514
+ cwd=workspace_root,
515
+ )
516
+ if find_result.returncode == 0 and find_result.stdout:
517
+ notebooks = find_result.stdout.strip().split("\n")
518
+ for nb_full_path in notebooks:
519
+ if not nb_full_path or not os.path.exists(nb_full_path):
520
+ continue
521
+ notebooks_searched += 1
522
+ matches = _search_in_notebook(
523
+ nb_full_path, pattern, cell_type, case_sensitive
524
+ )
525
+ try:
526
+ rel_path = os.path.relpath(nb_full_path, workspace_root)
527
+ except ValueError:
528
+ rel_path = nb_full_path
529
+ for m in matches:
530
+ m["file_path"] = rel_path
531
+ results.extend(matches)
532
+ if len(results) >= max_results:
533
+ break
534
+
535
+ return {
536
+ "success": True,
537
+ "results": results[:max_results],
538
+ "total_results": len(results),
539
+ "notebooks_searched": notebooks_searched,
540
+ }
541
+ except Exception as e:
542
+ return {
543
+ "success": False,
544
+ "error": str(e),
545
+ "results": [],
546
+ "total_results": 0,
547
+ "notebooks_searched": 0,
548
+ }
549
+
550
+
52
551
  # ============ Service-Based Handlers ============
53
552
 
54
553
 
@@ -206,7 +705,7 @@ class ChatStreamHandler(APIHandler):
206
705
 
207
706
  except Exception as e:
208
707
  logger.error(f"Chat stream failed: {e}", exc_info=True)
209
- self.write(f'data: {json.dumps({"error": str(e)})}\n\n')
708
+ self.write(f"data: {json.dumps({'error': str(e)})}\n\n")
210
709
  self.finish()
211
710
 
212
711
 
@@ -235,6 +734,410 @@ class RAGSearchHandler(APIHandler):
235
734
  self.write({"error": str(e)})
236
735
 
237
736
 
737
+ class ExecuteCommandHandler(APIHandler):
738
+ """Handler for /execute-command endpoint (runs on Jupyter server)."""
739
+
740
+ async def post(self):
741
+ """Execute shell command after user approval."""
742
+ try:
743
+ body = (
744
+ json.loads(self.request.body.decode("utf-8"))
745
+ if self.request.body
746
+ else {}
747
+ )
748
+ command = (body.get("command") or "").strip()
749
+ timeout_ms = _resolve_timeout_ms(body.get("timeout"))
750
+ requested_cwd = (body.get("cwd") or "").strip()
751
+ stdin_input = body.get("stdin") # Optional stdin for interactive commands
752
+
753
+ if not command:
754
+ self.set_status(400)
755
+ self.write({"error": "command is required"})
756
+ return
757
+
758
+ server_root = self.settings.get("server_root_dir", os.getcwd())
759
+ server_root = os.path.expanduser(server_root)
760
+ workspace_root = _resolve_workspace_root(server_root)
761
+
762
+ try:
763
+ cwd = _resolve_command_cwd(server_root, workspace_root, requested_cwd)
764
+ except ValueError as exc:
765
+ self.set_status(400)
766
+ self.write({"error": str(exc)})
767
+ return
768
+
769
+ loop = asyncio.get_running_loop()
770
+ result = await loop.run_in_executor(
771
+ None,
772
+ lambda: _run_shell_command(command, timeout_ms, cwd, stdin_input),
773
+ )
774
+
775
+ self.set_header("Content-Type", "application/json")
776
+ self.write(result)
777
+
778
+ except Exception as e:
779
+ logger.error(f"Execute command failed: {e}", exc_info=True)
780
+ self.set_status(500)
781
+ self.write({"error": str(e)})
782
+
783
+
784
+ class ExecuteCommandStreamHandler(APIHandler):
785
+ """Handler for /execute-command/stream endpoint (runs on Jupyter server)."""
786
+
787
+ async def post(self):
788
+ """Execute shell command and stream output."""
789
+ try:
790
+ body = (
791
+ json.loads(self.request.body.decode("utf-8"))
792
+ if self.request.body
793
+ else {}
794
+ )
795
+ command = (body.get("command") or "").strip()
796
+ timeout_ms = _resolve_stream_timeout_ms(body.get("timeout"))
797
+ requested_cwd = (body.get("cwd") or "").strip()
798
+ stdin_input = body.get("stdin") # Optional stdin for interactive commands
799
+
800
+ if not command:
801
+ self.set_status(400)
802
+ self.write({"error": "command is required"})
803
+ return
804
+
805
+ server_root = self.settings.get("server_root_dir", os.getcwd())
806
+ server_root = os.path.expanduser(server_root)
807
+ workspace_root = _resolve_workspace_root(server_root)
808
+
809
+ try:
810
+ cwd = _resolve_command_cwd(server_root, workspace_root, requested_cwd)
811
+ except ValueError as exc:
812
+ self.set_status(400)
813
+ self.write({"error": str(exc)})
814
+ return
815
+
816
+ self.set_header("Content-Type", "text/event-stream")
817
+ self.set_header("Cache-Control", "no-cache")
818
+ self.set_header("Connection", "keep-alive")
819
+ self.set_header("X-Accel-Buffering", "no")
820
+
821
+ async def emit(event: str, payload: Dict[str, Any]) -> None:
822
+ self.write(f"event: {event}\n")
823
+ self.write(f"data: {json.dumps(payload)}\n\n")
824
+ await self.flush()
825
+
826
+ start_time = time.monotonic()
827
+ await emit("start", {"command": command, "cwd": cwd})
828
+
829
+ # Use PIPE for stdin if input is provided
830
+ stdin_pipe = asyncio.subprocess.PIPE if stdin_input else None
831
+ process = await asyncio.create_subprocess_shell(
832
+ command,
833
+ stdin=stdin_pipe,
834
+ stdout=asyncio.subprocess.PIPE,
835
+ stderr=asyncio.subprocess.PIPE,
836
+ cwd=cwd,
837
+ )
838
+
839
+ # Write stdin if provided
840
+ if stdin_input and process.stdin:
841
+ process.stdin.write(stdin_input.encode())
842
+ await process.stdin.drain()
843
+ process.stdin.close()
844
+ await process.stdin.wait_closed()
845
+
846
+ stdout_text = ""
847
+ stderr_text = ""
848
+ stdout_truncated = False
849
+ stderr_truncated = False
850
+
851
+ def append_stdout(text: str) -> None:
852
+ nonlocal stdout_text, stdout_truncated
853
+ stdout_text, truncated = _append_stream_output(stdout_text, text)
854
+ stdout_truncated = stdout_truncated or truncated
855
+
856
+ def append_stderr(text: str) -> None:
857
+ nonlocal stderr_text, stderr_truncated
858
+ stderr_text, truncated = _append_stream_output(stderr_text, text)
859
+ stderr_truncated = stderr_truncated or truncated
860
+
861
+ stdout_task = asyncio.create_task(
862
+ _stream_subprocess_output(process.stdout, "stdout", emit, append_stdout)
863
+ )
864
+ stderr_task = asyncio.create_task(
865
+ _stream_subprocess_output(process.stderr, "stderr", emit, append_stderr)
866
+ )
867
+
868
+ timed_out = False
869
+ timeout_sec = None if timeout_ms is None else max(0.1, timeout_ms / 1000)
870
+ try:
871
+ if timeout_sec is None:
872
+ await process.wait()
873
+ else:
874
+ await asyncio.wait_for(process.wait(), timeout=timeout_sec)
875
+ except asyncio.TimeoutError:
876
+ timed_out = True
877
+ process.kill()
878
+ await process.wait()
879
+
880
+ try:
881
+ await asyncio.wait_for(
882
+ asyncio.gather(stdout_task, stderr_task),
883
+ timeout=0.5,
884
+ )
885
+ except asyncio.TimeoutError:
886
+ stdout_task.cancel()
887
+ stderr_task.cancel()
888
+ await asyncio.gather(stdout_task, stderr_task, return_exceptions=True)
889
+
890
+ duration_ms = int((time.monotonic() - start_time) * 1000)
891
+ truncated = stdout_truncated or stderr_truncated
892
+ if timed_out:
893
+ result = {
894
+ "success": False,
895
+ "stdout": stdout_text,
896
+ "stderr": stderr_text,
897
+ "returncode": process.returncode,
898
+ "error": f"Command timed out after {timeout_sec}s",
899
+ "truncated": truncated,
900
+ "cwd": cwd,
901
+ "duration_ms": duration_ms,
902
+ }
903
+ else:
904
+ result = {
905
+ "success": process.returncode == 0,
906
+ "stdout": stdout_text,
907
+ "stderr": stderr_text,
908
+ "returncode": process.returncode,
909
+ "truncated": truncated,
910
+ "cwd": cwd,
911
+ "duration_ms": duration_ms,
912
+ }
913
+
914
+ await emit("result", result)
915
+ self.finish()
916
+
917
+ except Exception as e:
918
+ logger.error(f"Execute command stream failed: {e}", exc_info=True)
919
+ self.set_header("Content-Type", "text/event-stream")
920
+ self.write(f"event: error\ndata: {json.dumps({'error': str(e)})}\n\n")
921
+ self.finish()
922
+
923
+
924
+ class ResourceUsageHandler(APIHandler):
925
+ """Handler for /resource-usage endpoint (runs on Jupyter server)."""
926
+
927
+ async def get(self):
928
+ """Return resource usage summary for the client host."""
929
+ try:
930
+ server_root = self.settings.get("server_root_dir", os.getcwd())
931
+ server_root = os.path.expanduser(server_root)
932
+ workspace_root = _resolve_workspace_root(server_root)
933
+
934
+ resource = get_integrated_resources(workspace_root=workspace_root)
935
+
936
+ self.set_header("Content-Type", "application/json")
937
+ self.write({"resource": resource})
938
+
939
+ except Exception as e:
940
+ logger.error(f"Resource usage collection failed: {e}", exc_info=True)
941
+ self.set_status(500)
942
+ self.write({"error": str(e)})
943
+
944
+
945
+ class CheckResourceHandler(APIHandler):
946
+ """Handler for /check-resource endpoint (runs on Jupyter server).
947
+
948
+ Checks system resources, file sizes, and DataFrame shapes for resource-aware
949
+ code generation.
950
+ """
951
+
952
+ async def post(self):
953
+ """Check resources for the requested files and dataframes."""
954
+ try:
955
+ body = (
956
+ json.loads(self.request.body.decode("utf-8"))
957
+ if self.request.body
958
+ else {}
959
+ )
960
+ files = body.get("files", [])
961
+ dataframes = body.get("dataframes", [])
962
+ file_size_command = body.get("file_size_command", "")
963
+ dataframe_check_code = body.get("dataframe_check_code", "")
964
+
965
+ server_root = self.settings.get("server_root_dir", os.getcwd())
966
+ server_root = os.path.expanduser(server_root)
967
+ workspace_root = _resolve_workspace_root(server_root)
968
+
969
+ result: Dict[str, Any] = {"success": True}
970
+
971
+ # 1. Get system resources
972
+ resource = get_integrated_resources(workspace_root=workspace_root)
973
+ memory = resource.get("memory", {})
974
+ result["system"] = {
975
+ "ram_available_mb": round(
976
+ (memory.get("available_gb") or 0) * 1024, 2
977
+ ),
978
+ "ram_total_mb": round((memory.get("total_gb") or 0) * 1024, 2),
979
+ "cpu_cores": resource.get("cpu", {}).get("cores"),
980
+ "environment": resource.get("environment"),
981
+ }
982
+
983
+ # 2. Get file sizes
984
+ file_info = []
985
+ for file_path in files:
986
+ # Resolve path relative to server_root (Jupyter's working directory)
987
+ # NOT workspace_root (project root) to match list_files_tool behavior
988
+ if os.path.isabs(file_path):
989
+ abs_path = file_path
990
+ else:
991
+ abs_path = os.path.join(server_root, file_path)
992
+
993
+ try:
994
+ stat = os.stat(abs_path)
995
+ file_info.append({
996
+ "name": os.path.basename(file_path),
997
+ "path": file_path,
998
+ "size_bytes": stat.st_size,
999
+ "size_mb": round(stat.st_size / (1024 * 1024), 2),
1000
+ "exists": True,
1001
+ })
1002
+ except (OSError, IOError) as e:
1003
+ file_info.append({
1004
+ "name": os.path.basename(file_path),
1005
+ "path": file_path,
1006
+ "exists": False,
1007
+ "error": str(e),
1008
+ })
1009
+ result["files"] = file_info
1010
+
1011
+ # 3. Get DataFrame info (if dataframe_check_code provided)
1012
+ df_info = []
1013
+ if dataframe_check_code and dataframes:
1014
+ try:
1015
+ # Execute the code in the active kernel
1016
+ kernel_manager = self.settings.get("kernel_manager")
1017
+ if kernel_manager:
1018
+ # Try to get the active kernel
1019
+ kernel_ids = list(kernel_manager.list_kernel_ids())
1020
+ if kernel_ids:
1021
+ kernel_id = kernel_ids[0]
1022
+ kernel = kernel_manager.get_kernel(kernel_id)
1023
+ if kernel and hasattr(kernel, "client"):
1024
+ client = kernel.client()
1025
+ # Execute code and get result
1026
+ msg_id = client.execute(dataframe_check_code, silent=True)
1027
+ # Wait for result (with timeout)
1028
+ import asyncio
1029
+ try:
1030
+ reply = await asyncio.wait_for(
1031
+ asyncio.to_thread(
1032
+ client.get_shell_msg, msg_id, timeout=5
1033
+ ),
1034
+ timeout=10,
1035
+ )
1036
+ # Parse output
1037
+ if reply.get("content", {}).get("status") == "ok":
1038
+ # Get stdout from iopub
1039
+ while True:
1040
+ try:
1041
+ iopub_msg = client.get_iopub_msg(timeout=1)
1042
+ if iopub_msg.get("msg_type") == "stream":
1043
+ text = iopub_msg.get("content", {}).get("text", "")
1044
+ if text:
1045
+ df_info = json.loads(text)
1046
+ break
1047
+ except Exception:
1048
+ break
1049
+ except asyncio.TimeoutError:
1050
+ logger.warning("DataFrame check timed out")
1051
+ except Exception as e:
1052
+ logger.warning(f"DataFrame check failed: {e}")
1053
+ # Return placeholder for requested dataframes
1054
+ for df_name in dataframes:
1055
+ df_info.append({
1056
+ "name": df_name,
1057
+ "exists": False,
1058
+ "error": "Kernel execution failed",
1059
+ })
1060
+ result["dataframes"] = df_info
1061
+
1062
+ self.set_header("Content-Type", "application/json")
1063
+ self.write(result)
1064
+
1065
+ except Exception as e:
1066
+ logger.error(f"Resource check failed: {e}", exc_info=True)
1067
+ self.set_status(500)
1068
+ self.write({"success": False, "error": str(e)})
1069
+
1070
+
1071
+ class WriteFileHandler(APIHandler):
1072
+ """Handler for /write-file endpoint (runs on Jupyter server)."""
1073
+
1074
+ async def post(self):
1075
+ """Write file content after user approval."""
1076
+ try:
1077
+ body = (
1078
+ json.loads(self.request.body.decode("utf-8"))
1079
+ if self.request.body
1080
+ else {}
1081
+ )
1082
+ path = (body.get("path") or "").strip()
1083
+ content = body.get("content") or ""
1084
+ encoding = body.get("encoding") or "utf-8"
1085
+ overwrite = bool(body.get("overwrite", False))
1086
+ requested_cwd = (body.get("cwd") or "").strip()
1087
+
1088
+ if not path:
1089
+ self.set_status(400)
1090
+ self.write({"error": "path is required"})
1091
+ return
1092
+
1093
+ server_root = self.settings.get("server_root_dir", os.getcwd())
1094
+ server_root = os.path.expanduser(server_root)
1095
+ workspace_root = _resolve_workspace_root(server_root)
1096
+
1097
+ # If no cwd requested, use server_root as default (notebook directory)
1098
+ # This ensures files are saved relative to where Jupyter was started
1099
+ effective_cwd = requested_cwd
1100
+ if not effective_cwd:
1101
+ abs_server_root = os.path.abspath(server_root)
1102
+ # Use server_root if it's within workspace_root, otherwise use workspace_root
1103
+ if (
1104
+ os.path.commonpath([workspace_root, abs_server_root])
1105
+ == workspace_root
1106
+ ):
1107
+ effective_cwd = abs_server_root
1108
+ else:
1109
+ effective_cwd = workspace_root
1110
+
1111
+ try:
1112
+ resolved_path = _resolve_path_in_workspace(
1113
+ path, workspace_root, effective_cwd
1114
+ )
1115
+ except ValueError as exc:
1116
+ self.set_status(400)
1117
+ self.write({"error": str(exc)})
1118
+ return
1119
+
1120
+ loop = asyncio.get_running_loop()
1121
+ result = await loop.run_in_executor(
1122
+ None, _write_file, resolved_path, content, encoding, overwrite
1123
+ )
1124
+
1125
+ result.update(
1126
+ {
1127
+ "path": path,
1128
+ "resolved_path": resolved_path,
1129
+ "overwrite": overwrite,
1130
+ }
1131
+ )
1132
+ self.set_header("Content-Type", "application/json")
1133
+ self.write(result)
1134
+
1135
+ except Exception as e:
1136
+ logger.error(f"Write file failed: {e}", exc_info=True)
1137
+ self.set_status(500)
1138
+ self.write({"error": str(e)})
1139
+
1140
+
238
1141
  class RAGStatusHandler(APIHandler):
239
1142
  """Handler for /rag/status endpoint using ServiceFactory."""
240
1143
 
@@ -265,6 +1168,7 @@ class BaseProxyHandler(APIHandler):
265
1168
  def agent_server_url(self) -> str:
266
1169
  """Get the Agent Server base URL."""
267
1170
  from .config import get_agent_server_config
1171
+
268
1172
  config = get_agent_server_config()
269
1173
  return config.base_url
270
1174
 
@@ -272,6 +1176,7 @@ class BaseProxyHandler(APIHandler):
272
1176
  def timeout(self) -> float:
273
1177
  """Get request timeout."""
274
1178
  from .config import get_agent_server_config
1179
+
275
1180
  config = get_agent_server_config()
276
1181
  return config.timeout
277
1182
 
@@ -281,7 +1186,7 @@ class BaseProxyHandler(APIHandler):
281
1186
  base_url = self.settings.get("base_url", "/")
282
1187
  prefix = url_path_join(base_url, "hdsp-agent")
283
1188
  if request_path.startswith(prefix):
284
- return request_path[len(prefix):]
1189
+ return request_path[len(prefix) :]
285
1190
  return request_path
286
1191
 
287
1192
  async def proxy_request(self, method: str = "GET", body: bytes = None):
@@ -300,9 +1205,13 @@ class BaseProxyHandler(APIHandler):
300
1205
  if method == "GET":
301
1206
  response = await client.get(target_url, headers=headers)
302
1207
  elif method == "POST":
303
- response = await client.post(target_url, headers=headers, content=body)
1208
+ response = await client.post(
1209
+ target_url, headers=headers, content=body
1210
+ )
304
1211
  elif method == "PUT":
305
- response = await client.put(target_url, headers=headers, content=body)
1212
+ response = await client.put(
1213
+ target_url, headers=headers, content=body
1214
+ )
306
1215
  elif method == "DELETE":
307
1216
  response = await client.delete(target_url, headers=headers)
308
1217
  else:
@@ -312,22 +1221,30 @@ class BaseProxyHandler(APIHandler):
312
1221
 
313
1222
  self.set_status(response.status_code)
314
1223
  for name, value in response.headers.items():
315
- if name.lower() not in ("content-encoding", "transfer-encoding", "content-length"):
1224
+ if name.lower() not in (
1225
+ "content-encoding",
1226
+ "transfer-encoding",
1227
+ "content-length",
1228
+ ):
316
1229
  self.set_header(name, value)
317
1230
  self.write(response.content)
318
1231
 
319
1232
  except httpx.ConnectError:
320
1233
  self.set_status(503)
321
- self.write({
322
- "error": "Agent Server is not available",
323
- "detail": f"Could not connect to {self.agent_server_url}",
324
- })
1234
+ self.write(
1235
+ {
1236
+ "error": "Agent Server is not available",
1237
+ "detail": f"Could not connect to {self.agent_server_url}",
1238
+ }
1239
+ )
325
1240
  except httpx.TimeoutException:
326
1241
  self.set_status(504)
327
- self.write({
328
- "error": "Agent Server timeout",
329
- "detail": f"Request to {target_url} timed out after {self.timeout}s",
330
- })
1242
+ self.write(
1243
+ {
1244
+ "error": "Agent Server timeout",
1245
+ "detail": f"Request to {target_url} timed out after {self.timeout}s",
1246
+ }
1247
+ )
331
1248
  except Exception as e:
332
1249
  self.set_status(500)
333
1250
  self.write({"error": "Proxy error", "detail": str(e)})
@@ -351,12 +1268,14 @@ class StreamProxyHandler(APIHandler):
351
1268
  @property
352
1269
  def agent_server_url(self) -> str:
353
1270
  from .config import get_agent_server_config
1271
+
354
1272
  config = get_agent_server_config()
355
1273
  return config.base_url
356
1274
 
357
1275
  @property
358
1276
  def timeout(self) -> float:
359
1277
  from .config import get_agent_server_config
1278
+
360
1279
  config = get_agent_server_config()
361
1280
  return config.timeout
362
1281
 
@@ -365,7 +1284,7 @@ class StreamProxyHandler(APIHandler):
365
1284
  base_url = self.settings.get("base_url", "/")
366
1285
  prefix = url_path_join(base_url, "hdsp-agent")
367
1286
  if request_path.startswith(prefix):
368
- return request_path[len(prefix):]
1287
+ return request_path[len(prefix) :]
369
1288
  return request_path
370
1289
 
371
1290
  async def post(self, *args, **kwargs):
@@ -391,11 +1310,13 @@ class StreamProxyHandler(APIHandler):
391
1310
  await self.flush()
392
1311
 
393
1312
  except httpx.ConnectError:
394
- self.write(f'data: {json.dumps({"error": "Agent Server is not available"})}\n\n')
1313
+ self.write(
1314
+ f"data: {json.dumps({'error': 'Agent Server is not available'})}\n\n"
1315
+ )
395
1316
  except httpx.TimeoutException:
396
- self.write(f'data: {json.dumps({"error": "Agent Server timeout"})}\n\n')
1317
+ self.write(f"data: {json.dumps({'error': 'Agent Server timeout'})}\n\n")
397
1318
  except Exception as e:
398
- self.write(f'data: {json.dumps({"error": str(e)})}\n\n')
1319
+ self.write(f"data: {json.dumps({'error': str(e)})}\n\n")
399
1320
  finally:
400
1321
  self.finish()
401
1322
 
@@ -427,6 +1348,7 @@ class HealthHandler(APIHandler):
427
1348
  else:
428
1349
  # In proxy mode, check agent server connectivity
429
1350
  from .config import get_agent_server_config
1351
+
430
1352
  config = get_agent_server_config()
431
1353
 
432
1354
  agent_server_healthy = False
@@ -449,10 +1371,12 @@ class HealthHandler(APIHandler):
449
1371
 
450
1372
  except Exception as e:
451
1373
  logger.error(f"Health check failed: {e}")
452
- self.write({
453
- "status": "degraded",
454
- "error": str(e),
455
- })
1374
+ self.write(
1375
+ {
1376
+ "status": "degraded",
1377
+ "error": str(e),
1378
+ }
1379
+ )
456
1380
 
457
1381
 
458
1382
  class ConfigProxyHandler(BaseProxyHandler):
@@ -583,7 +1507,25 @@ class LangChainStreamProxyHandler(StreamProxyHandler):
583
1507
  async def post(self, *args, **kwargs):
584
1508
  """Inject workspaceRoot based on Jupyter server root."""
585
1509
  try:
586
- body = json.loads(self.request.body.decode("utf-8")) if self.request.body else {}
1510
+ # Log request info for debugging
1511
+ body_len = len(self.request.body) if self.request.body else 0
1512
+ logger.info(
1513
+ "LangChainStreamProxy: Received request, body size=%d bytes",
1514
+ body_len,
1515
+ )
1516
+
1517
+ body = (
1518
+ json.loads(self.request.body.decode("utf-8"))
1519
+ if self.request.body
1520
+ else {}
1521
+ )
1522
+
1523
+ # Log parsed request info
1524
+ request_text = body.get("request", "")
1525
+ logger.info(
1526
+ "LangChainStreamProxy: Parsed request, message length=%d chars",
1527
+ len(request_text),
1528
+ )
587
1529
  server_root = self.settings.get("server_root_dir", os.getcwd())
588
1530
  server_root = os.path.expanduser(server_root)
589
1531
  resolved_root = _resolve_workspace_root(server_root)
@@ -613,12 +1555,14 @@ class LangChainStreamProxyHandler(StreamProxyHandler):
613
1555
  self.write(chunk)
614
1556
  await self.flush()
615
1557
  except httpx.ConnectError:
616
- self.write(f'data: {json.dumps({"error": "Agent Server is not available"})}\n\n')
1558
+ self.write(
1559
+ f"data: {json.dumps({'error': 'Agent Server is not available'})}\n\n"
1560
+ )
617
1561
  except httpx.TimeoutException:
618
- self.write(f'data: {json.dumps({"error": "Agent Server timeout"})}\n\n')
1562
+ self.write(f"data: {json.dumps({'error': 'Agent Server timeout'})}\n\n")
619
1563
  except Exception as e:
620
1564
  logger.error(f"LangChainStreamProxy error: {e}", exc_info=True)
621
- self.write(f'data: {json.dumps({"error": str(e)})}\n\n')
1565
+ self.write(f"data: {json.dumps({'error': str(e)})}\n\n")
622
1566
  finally:
623
1567
  self.finish()
624
1568
 
@@ -632,7 +1576,11 @@ class LangChainResumeProxyHandler(StreamProxyHandler):
632
1576
  async def post(self, *args, **kwargs):
633
1577
  """Inject workspaceRoot based on Jupyter server root."""
634
1578
  try:
635
- body = json.loads(self.request.body.decode("utf-8")) if self.request.body else {}
1579
+ body = (
1580
+ json.loads(self.request.body.decode("utf-8"))
1581
+ if self.request.body
1582
+ else {}
1583
+ )
636
1584
  server_root = self.settings.get("server_root_dir", os.getcwd())
637
1585
  server_root = os.path.expanduser(server_root)
638
1586
  resolved_root = _resolve_workspace_root(server_root)
@@ -662,12 +1610,14 @@ class LangChainResumeProxyHandler(StreamProxyHandler):
662
1610
  self.write(chunk)
663
1611
  await self.flush()
664
1612
  except httpx.ConnectError:
665
- self.write(f'data: {json.dumps({"error": "Agent Server is not available"})}\n\n')
1613
+ self.write(
1614
+ f"data: {json.dumps({'error': 'Agent Server is not available'})}\n\n"
1615
+ )
666
1616
  except httpx.TimeoutException:
667
- self.write(f'data: {json.dumps({"error": "Agent Server timeout"})}\n\n')
1617
+ self.write(f"data: {json.dumps({'error': 'Agent Server timeout'})}\n\n")
668
1618
  except Exception as e:
669
1619
  logger.error(f"LangChainResumeProxy error: {e}", exc_info=True)
670
- self.write(f'data: {json.dumps({"error": str(e)})}\n\n')
1620
+ self.write(f"data: {json.dumps({'error': str(e)})}\n\n")
671
1621
  finally:
672
1622
  self.finish()
673
1623
 
@@ -679,6 +1629,100 @@ class LangChainHealthProxyHandler(BaseProxyHandler):
679
1629
  return "/agent/langchain/health"
680
1630
 
681
1631
 
1632
+ class SearchWorkspaceHandler(APIHandler):
1633
+ """Handler for /search-workspace endpoint (runs on Jupyter server)."""
1634
+
1635
+ async def post(self):
1636
+ """Execute workspace search."""
1637
+ try:
1638
+ body = (
1639
+ json.loads(self.request.body.decode("utf-8"))
1640
+ if self.request.body
1641
+ else {}
1642
+ )
1643
+ pattern = (body.get("pattern") or "").strip()
1644
+ file_types = body.get("file_types") or ["*.py", "*.ipynb"]
1645
+ path = body.get("path") or "."
1646
+ max_results = body.get("max_results", 50)
1647
+ case_sensitive = bool(body.get("case_sensitive", False))
1648
+
1649
+ if not pattern:
1650
+ self.set_status(400)
1651
+ self.write({"error": "pattern is required"})
1652
+ return
1653
+
1654
+ server_root = self.settings.get("server_root_dir", os.getcwd())
1655
+ server_root = os.path.expanduser(server_root)
1656
+ workspace_root = _resolve_workspace_root(server_root)
1657
+
1658
+ loop = asyncio.get_running_loop()
1659
+ result = await loop.run_in_executor(
1660
+ None,
1661
+ _execute_search_workspace,
1662
+ pattern,
1663
+ file_types,
1664
+ path,
1665
+ max_results,
1666
+ case_sensitive,
1667
+ workspace_root,
1668
+ )
1669
+
1670
+ self.set_header("Content-Type", "application/json")
1671
+ self.write(result)
1672
+
1673
+ except Exception as e:
1674
+ logger.error(f"Search workspace failed: {e}", exc_info=True)
1675
+ self.set_status(500)
1676
+ self.write({"error": str(e)})
1677
+
1678
+
1679
+ class SearchNotebookCellsHandler(APIHandler):
1680
+ """Handler for /search-notebook-cells endpoint (runs on Jupyter server)."""
1681
+
1682
+ async def post(self):
1683
+ """Execute notebook cells search."""
1684
+ try:
1685
+ body = (
1686
+ json.loads(self.request.body.decode("utf-8"))
1687
+ if self.request.body
1688
+ else {}
1689
+ )
1690
+ pattern = (body.get("pattern") or "").strip()
1691
+ notebook_path = body.get("notebook_path")
1692
+ cell_type = body.get("cell_type")
1693
+ max_results = body.get("max_results", 30)
1694
+ case_sensitive = bool(body.get("case_sensitive", False))
1695
+
1696
+ if not pattern:
1697
+ self.set_status(400)
1698
+ self.write({"error": "pattern is required"})
1699
+ return
1700
+
1701
+ server_root = self.settings.get("server_root_dir", os.getcwd())
1702
+ server_root = os.path.expanduser(server_root)
1703
+ workspace_root = _resolve_workspace_root(server_root)
1704
+
1705
+ loop = asyncio.get_running_loop()
1706
+ result = await loop.run_in_executor(
1707
+ None,
1708
+ _execute_search_notebook_cells,
1709
+ pattern,
1710
+ notebook_path,
1711
+ cell_type,
1712
+ max_results,
1713
+ case_sensitive,
1714
+ workspace_root,
1715
+ )
1716
+
1717
+ self.set_header("Content-Type", "application/json")
1718
+ self.write(result)
1719
+
1720
+ except Exception as e:
1721
+ logger.error(f"Search notebook cells failed: {e}", exc_info=True)
1722
+ self.set_status(500)
1723
+ self.write({"error": str(e)})
1724
+
1725
+
682
1726
  class RAGReindexHandler(APIHandler):
683
1727
  """Handler for /rag/reindex endpoint using ServiceFactory."""
684
1728
 
@@ -688,7 +1732,11 @@ class RAGReindexHandler(APIHandler):
688
1732
  factory = _get_service_factory()
689
1733
  rag_service = factory.get_rag_service()
690
1734
 
691
- body = json.loads(self.request.body.decode("utf-8")) if self.request.body else {}
1735
+ body = (
1736
+ json.loads(self.request.body.decode("utf-8"))
1737
+ if self.request.body
1738
+ else {}
1739
+ )
692
1740
  force = body.get("force", False)
693
1741
 
694
1742
  response = await rag_service.trigger_reindex(force=force)
@@ -715,43 +1763,113 @@ def setup_handlers(web_app):
715
1763
  (url_path_join(base_url, "hdsp-agent", "health"), HealthHandler),
716
1764
  # Config endpoint (still proxied)
717
1765
  (url_path_join(base_url, "hdsp-agent", "config"), ConfigProxyHandler),
718
-
719
1766
  # ===== ServiceFactory-based handlers =====
720
1767
  # Agent endpoints
721
1768
  (url_path_join(base_url, "hdsp-agent", "auto-agent", "plan"), AgentPlanHandler),
722
- (url_path_join(base_url, "hdsp-agent", "auto-agent", "refine"), AgentRefineHandler),
723
- (url_path_join(base_url, "hdsp-agent", "auto-agent", "replan"), AgentReplanHandler),
724
- (url_path_join(base_url, "hdsp-agent", "auto-agent", "validate"), AgentValidateHandler),
725
-
1769
+ (
1770
+ url_path_join(base_url, "hdsp-agent", "auto-agent", "refine"),
1771
+ AgentRefineHandler,
1772
+ ),
1773
+ (
1774
+ url_path_join(base_url, "hdsp-agent", "auto-agent", "replan"),
1775
+ AgentReplanHandler,
1776
+ ),
1777
+ (
1778
+ url_path_join(base_url, "hdsp-agent", "auto-agent", "validate"),
1779
+ AgentValidateHandler,
1780
+ ),
726
1781
  # Chat endpoints
727
1782
  (url_path_join(base_url, "hdsp-agent", "chat", "message"), ChatMessageHandler),
728
1783
  (url_path_join(base_url, "hdsp-agent", "chat", "stream"), ChatStreamHandler),
729
-
730
1784
  # LangChain agent endpoints (proxy to agent-server)
731
- (url_path_join(base_url, "hdsp-agent", "agent", "langchain", "stream"), LangChainStreamProxyHandler),
732
- (url_path_join(base_url, "hdsp-agent", "agent", "langchain", "resume"), LangChainResumeProxyHandler),
733
- (url_path_join(base_url, "hdsp-agent", "agent", "langchain", "health"), LangChainHealthProxyHandler),
734
-
1785
+ (
1786
+ url_path_join(base_url, "hdsp-agent", "agent", "langchain", "stream"),
1787
+ LangChainStreamProxyHandler,
1788
+ ),
1789
+ (
1790
+ url_path_join(base_url, "hdsp-agent", "agent", "langchain", "resume"),
1791
+ LangChainResumeProxyHandler,
1792
+ ),
1793
+ (
1794
+ url_path_join(base_url, "hdsp-agent", "agent", "langchain", "health"),
1795
+ LangChainHealthProxyHandler,
1796
+ ),
1797
+ # Shell command execution (server-side, approval required)
1798
+ (
1799
+ url_path_join(base_url, "hdsp-agent", "execute-command"),
1800
+ ExecuteCommandHandler,
1801
+ ),
1802
+ (
1803
+ url_path_join(base_url, "hdsp-agent", "execute-command", "stream"),
1804
+ ExecuteCommandStreamHandler,
1805
+ ),
1806
+ (
1807
+ url_path_join(base_url, "hdsp-agent", "resource-usage"),
1808
+ ResourceUsageHandler,
1809
+ ),
1810
+ # Resource check for data processing (server-side, auto-approved)
1811
+ (
1812
+ url_path_join(base_url, "hdsp-agent", "check-resource"),
1813
+ CheckResourceHandler,
1814
+ ),
1815
+ # File write execution (server-side, approval required)
1816
+ (url_path_join(base_url, "hdsp-agent", "write-file"), WriteFileHandler),
1817
+ # Search endpoints (server-side, no approval required)
1818
+ (
1819
+ url_path_join(base_url, "hdsp-agent", "search-workspace"),
1820
+ SearchWorkspaceHandler,
1821
+ ),
1822
+ (
1823
+ url_path_join(base_url, "hdsp-agent", "search-notebook-cells"),
1824
+ SearchNotebookCellsHandler,
1825
+ ),
735
1826
  # RAG endpoints
736
1827
  (url_path_join(base_url, "hdsp-agent", "rag", "search"), RAGSearchHandler),
737
1828
  (url_path_join(base_url, "hdsp-agent", "rag", "status"), RAGStatusHandler),
738
1829
  (url_path_join(base_url, "hdsp-agent", "rag", "reindex"), RAGReindexHandler),
739
-
740
1830
  # ===== Proxy-only handlers (not yet migrated to ServiceFactory) =====
741
- (url_path_join(base_url, "hdsp-agent", "auto-agent", "reflect"), AgentReflectProxyHandler),
742
- (url_path_join(base_url, "hdsp-agent", "auto-agent", "verify-state"), AgentVerifyStateProxyHandler),
743
- (url_path_join(base_url, "hdsp-agent", "auto-agent", "plan", "stream"), AgentPlanStreamProxyHandler),
744
-
1831
+ (
1832
+ url_path_join(base_url, "hdsp-agent", "auto-agent", "reflect"),
1833
+ AgentReflectProxyHandler,
1834
+ ),
1835
+ (
1836
+ url_path_join(base_url, "hdsp-agent", "auto-agent", "verify-state"),
1837
+ AgentVerifyStateProxyHandler,
1838
+ ),
1839
+ (
1840
+ url_path_join(base_url, "hdsp-agent", "auto-agent", "plan", "stream"),
1841
+ AgentPlanStreamProxyHandler,
1842
+ ),
745
1843
  # Cell/File action endpoints
746
- (url_path_join(base_url, "hdsp-agent", "cell", "action"), CellActionProxyHandler),
747
- (url_path_join(base_url, "hdsp-agent", "file", "action"), FileActionProxyHandler),
748
- (url_path_join(base_url, "hdsp-agent", "file", "resolve"), FileResolveProxyHandler),
749
- (url_path_join(base_url, "hdsp-agent", "file", "select"), FileSelectProxyHandler),
750
-
1844
+ (
1845
+ url_path_join(base_url, "hdsp-agent", "cell", "action"),
1846
+ CellActionProxyHandler,
1847
+ ),
1848
+ (
1849
+ url_path_join(base_url, "hdsp-agent", "file", "action"),
1850
+ FileActionProxyHandler,
1851
+ ),
1852
+ (
1853
+ url_path_join(base_url, "hdsp-agent", "file", "resolve"),
1854
+ FileResolveProxyHandler,
1855
+ ),
1856
+ (
1857
+ url_path_join(base_url, "hdsp-agent", "file", "select"),
1858
+ FileSelectProxyHandler,
1859
+ ),
751
1860
  # Task endpoints
752
- (url_path_join(base_url, "hdsp-agent", "task", r"([^/]+)", "status"), TaskStatusProxyHandler),
753
- (url_path_join(base_url, "hdsp-agent", "task", r"([^/]+)", "stream"), TaskStreamProxyHandler),
754
- (url_path_join(base_url, "hdsp-agent", "task", r"([^/]+)", "cancel"), TaskCancelProxyHandler),
1861
+ (
1862
+ url_path_join(base_url, "hdsp-agent", "task", r"([^/]+)", "status"),
1863
+ TaskStatusProxyHandler,
1864
+ ),
1865
+ (
1866
+ url_path_join(base_url, "hdsp-agent", "task", r"([^/]+)", "stream"),
1867
+ TaskStreamProxyHandler,
1868
+ ),
1869
+ (
1870
+ url_path_join(base_url, "hdsp-agent", "task", r"([^/]+)", "cancel"),
1871
+ TaskCancelProxyHandler,
1872
+ ),
755
1873
  ]
756
1874
 
757
1875
  web_app.add_handlers(host_pattern, handlers)