hdsp-jupyter-extension 2.0.6__py3-none-any.whl → 2.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. agent_server/core/reflection_engine.py +0 -1
  2. agent_server/knowledge/watchdog_service.py +1 -1
  3. agent_server/langchain/ARCHITECTURE.md +1193 -0
  4. agent_server/langchain/agent.py +74 -588
  5. agent_server/langchain/custom_middleware.py +636 -0
  6. agent_server/langchain/executors/__init__.py +2 -7
  7. agent_server/langchain/executors/notebook_searcher.py +46 -38
  8. agent_server/langchain/hitl_config.py +66 -0
  9. agent_server/langchain/llm_factory.py +166 -0
  10. agent_server/langchain/logging_utils.py +184 -0
  11. agent_server/langchain/prompts.py +119 -0
  12. agent_server/langchain/state.py +16 -6
  13. agent_server/langchain/tools/__init__.py +6 -0
  14. agent_server/langchain/tools/file_tools.py +91 -129
  15. agent_server/langchain/tools/jupyter_tools.py +18 -18
  16. agent_server/langchain/tools/resource_tools.py +161 -0
  17. agent_server/langchain/tools/search_tools.py +198 -216
  18. agent_server/langchain/tools/shell_tools.py +54 -0
  19. agent_server/main.py +4 -1
  20. agent_server/routers/health.py +1 -1
  21. agent_server/routers/langchain_agent.py +940 -285
  22. hdsp_agent_core/prompts/auto_agent_prompts.py +3 -3
  23. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/build_log.json +1 -1
  24. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/package.json +2 -2
  25. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.02d346171474a0fb2dc1.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js +312 -6
  26. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js.map +1 -0
  27. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.a223ea20056954479ae9.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.29cf4312af19e86f82af.js +1547 -330
  28. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.29cf4312af19e86f82af.js.map +1 -0
  29. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.addf2fa038fa60304aa2.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.61343eb4cf0577e74b50.js +8 -8
  30. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.61343eb4cf0577e74b50.js.map +1 -0
  31. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js +209 -2
  32. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js.map +1 -0
  33. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js +2 -209
  34. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js.map +1 -0
  35. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js → hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js +3 -212
  36. hdsp_jupyter_extension-2.0.7.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js.map +1 -0
  37. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/METADATA +2 -1
  38. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/RECORD +71 -68
  39. jupyter_ext/_version.py +1 -1
  40. jupyter_ext/handlers.py +1176 -58
  41. jupyter_ext/labextension/build_log.json +1 -1
  42. jupyter_ext/labextension/package.json +2 -2
  43. jupyter_ext/labextension/static/{frontend_styles_index_js.02d346171474a0fb2dc1.js → frontend_styles_index_js.4770ec0fb2d173b6deb4.js} +312 -6
  44. jupyter_ext/labextension/static/frontend_styles_index_js.4770ec0fb2d173b6deb4.js.map +1 -0
  45. jupyter_ext/labextension/static/{lib_index_js.a223ea20056954479ae9.js → lib_index_js.29cf4312af19e86f82af.js} +1547 -330
  46. jupyter_ext/labextension/static/lib_index_js.29cf4312af19e86f82af.js.map +1 -0
  47. jupyter_ext/labextension/static/{remoteEntry.addf2fa038fa60304aa2.js → remoteEntry.61343eb4cf0577e74b50.js} +8 -8
  48. jupyter_ext/labextension/static/remoteEntry.61343eb4cf0577e74b50.js.map +1 -0
  49. jupyter_ext/labextension/static/{vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js → vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js} +209 -2
  50. jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js-node_modules-782ee5.d9ed8645ef1d311657d8.js.map +1 -0
  51. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js → jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js +2 -209
  52. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.36b49c71871f98d4f549.js.map +1 -0
  53. jupyter_ext/labextension/static/{vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js → vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js} +3 -212
  54. jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.2e13df4ea61496e95d45.js.map +1 -0
  55. jupyter_ext/resource_usage.py +180 -0
  56. jupyter_ext/tests/test_handlers.py +58 -0
  57. agent_server/langchain/executors/jupyter_executor.py +0 -429
  58. agent_server/langchain/middleware/__init__.py +0 -36
  59. agent_server/langchain/middleware/code_search_middleware.py +0 -278
  60. agent_server/langchain/middleware/error_handling_middleware.py +0 -338
  61. agent_server/langchain/middleware/jupyter_execution_middleware.py +0 -301
  62. agent_server/langchain/middleware/rag_middleware.py +0 -227
  63. agent_server/langchain/middleware/validation_middleware.py +0 -240
  64. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.02d346171474a0fb2dc1.js.map +0 -1
  65. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.a223ea20056954479ae9.js.map +0 -1
  66. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.addf2fa038fa60304aa2.js.map +0 -1
  67. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js.map +0 -1
  68. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js.map +0 -1
  69. hdsp_jupyter_extension-2.0.6.data/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js.map +0 -1
  70. jupyter_ext/labextension/static/frontend_styles_index_js.02d346171474a0fb2dc1.js.map +0 -1
  71. jupyter_ext/labextension/static/lib_index_js.a223ea20056954479ae9.js.map +0 -1
  72. jupyter_ext/labextension/static/remoteEntry.addf2fa038fa60304aa2.js.map +0 -1
  73. jupyter_ext/labextension/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js.map +0 -1
  74. jupyter_ext/labextension/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js.map +0 -1
  75. jupyter_ext/labextension/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js.map +0 -1
  76. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/etc/jupyter/jupyter_server_config.d/hdsp_jupyter_extension.json +0 -0
  77. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/install.json +0 -0
  78. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js +0 -0
  79. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js.map +0 -0
  80. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js +0 -0
  81. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js.map +0 -0
  82. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/style.js +0 -0
  83. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js +0 -0
  84. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js.map +0 -0
  85. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js +0 -0
  86. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js.map +0 -0
  87. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js +0 -0
  88. {hdsp_jupyter_extension-2.0.6.data → hdsp_jupyter_extension-2.0.7.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js.map +0 -0
  89. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/WHEEL +0 -0
  90. {hdsp_jupyter_extension-2.0.6.dist-info → hdsp_jupyter_extension-2.0.7.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,180 @@
1
+ """
2
+ Resource Usage Utilities for the Jupyter server host.
3
+
4
+ Collects CPU, memory, disk, and GPU usage to guide client-side execution.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import os
10
+ import shutil
11
+ import subprocess
12
+ from typing import Optional
13
+
14
+ try:
15
+ import psutil
16
+ except ImportError: # pragma: no cover - optional dependency fallback
17
+ psutil = None
18
+
19
+
20
+ def _read_cgroup_value(path: str) -> Optional[str]:
21
+ try:
22
+ with open(path, "r", encoding="utf-8") as handle:
23
+ return handle.read().strip()
24
+ except Exception:
25
+ return None
26
+
27
+
28
+ def _format_gb(value_bytes: float) -> float:
29
+ return round(value_bytes / (1024**3), 2)
30
+
31
+
32
+ def _safe_float(value: Optional[str]) -> Optional[float]:
33
+ if value is None:
34
+ return None
35
+ try:
36
+ return float(value)
37
+ except (TypeError, ValueError):
38
+ return None
39
+
40
+
41
+ def _safe_cpu_count() -> Optional[int]:
42
+ count = os.cpu_count()
43
+ return int(count) if count is not None else None
44
+
45
+
46
+ def get_integrated_resources(
47
+ env_type: str = "auto", workspace_root: str = "."
48
+ ) -> dict[str, object]:
49
+ """
50
+ Collect resource usage snapshot for the Jupyter server host.
51
+
52
+ Args:
53
+ env_type: "auto", "pod", or "host" (auto detects Kubernetes)
54
+ workspace_root: Path used to compute disk usage
55
+
56
+ Returns:
57
+ JSON-serializable resource snapshot dict.
58
+ """
59
+ is_pod = False
60
+ if env_type == "auto":
61
+ is_pod = os.path.exists("/var/run/secrets/kubernetes.io") or bool(
62
+ os.environ.get("KUBERNETES_SERVICE_HOST")
63
+ )
64
+ else:
65
+ is_pod = env_type.lower() == "pod"
66
+
67
+ environment = "Kubernetes Pod" if is_pod else "Host/VM"
68
+ cpu: dict[str, Optional[float]] = {"cores": None, "usage_percent": None}
69
+ memory: dict[str, Optional[float]] = {
70
+ "total_gb": None,
71
+ "available_gb": None,
72
+ "used_gb": None,
73
+ }
74
+ disk: dict[str, Optional[object]] = {
75
+ "path": None,
76
+ "total_gb": None,
77
+ "free_gb": None,
78
+ "used_gb": None,
79
+ }
80
+ gpus: list[dict[str, Optional[object]]] = []
81
+ gpu_status = "not_detected"
82
+
83
+ if is_pod:
84
+ try:
85
+ cpu_max = _read_cgroup_value("/sys/fs/cgroup/cpu.max")
86
+ if cpu_max:
87
+ quota, period = cpu_max.split()
88
+ if quota != "max":
89
+ cpu_limit = float(quota) / float(period)
90
+ else:
91
+ cpu_limit = psutil.cpu_count() if psutil else _safe_cpu_count() or 0
92
+ else:
93
+ cpu_limit = psutil.cpu_count() if psutil else _safe_cpu_count() or 0
94
+
95
+ mem_limit_raw = _read_cgroup_value("/sys/fs/cgroup/memory.max")
96
+ mem_current_raw = _read_cgroup_value("/sys/fs/cgroup/memory.current")
97
+
98
+ if mem_limit_raw and mem_limit_raw != "max":
99
+ mem_limit_gb = _format_gb(float(mem_limit_raw))
100
+ else:
101
+ mem_limit_gb = (
102
+ _format_gb(psutil.virtual_memory().total) if psutil else 0.0
103
+ )
104
+
105
+ mem_used_gb = _format_gb(float(mem_current_raw)) if mem_current_raw else 0.0
106
+
107
+ cpu["cores"] = round(cpu_limit, 2)
108
+ memory["total_gb"] = mem_limit_gb
109
+ memory["used_gb"] = mem_used_gb
110
+ if mem_limit_gb is not None and mem_used_gb is not None:
111
+ memory["available_gb"] = round(mem_limit_gb - mem_used_gb, 2)
112
+ except Exception:
113
+ cpu_count = psutil.cpu_count() if psutil else _safe_cpu_count()
114
+ cpu["cores"] = float(cpu_count) if cpu_count is not None else None
115
+ if psutil:
116
+ vm = psutil.virtual_memory()
117
+ memory["total_gb"] = _format_gb(vm.total)
118
+ memory["available_gb"] = _format_gb(vm.available)
119
+ memory["used_gb"] = _format_gb(vm.total - vm.available)
120
+ else:
121
+ cpu_count = psutil.cpu_count() if psutil else _safe_cpu_count()
122
+ cpu_percent = psutil.cpu_percent() if psutil else None
123
+ cpu["cores"] = float(cpu_count) if cpu_count is not None else None
124
+ cpu["usage_percent"] = cpu_percent
125
+ if psutil:
126
+ vm = psutil.virtual_memory()
127
+ memory["total_gb"] = _format_gb(vm.total)
128
+ memory["available_gb"] = _format_gb(vm.available)
129
+ memory["used_gb"] = _format_gb(vm.total - vm.available)
130
+
131
+ disk_path = workspace_root if workspace_root else "."
132
+ if not os.path.exists(disk_path):
133
+ disk_path = "."
134
+ disk["path"] = os.path.abspath(disk_path)
135
+ try:
136
+ total, used, free = shutil.disk_usage(disk_path)
137
+ disk["total_gb"] = _format_gb(total)
138
+ disk["free_gb"] = _format_gb(free)
139
+ disk["used_gb"] = _format_gb(used)
140
+ except Exception:
141
+ pass
142
+
143
+ if shutil.which("nvidia-smi"):
144
+ gpu_status = "available"
145
+ try:
146
+ result = subprocess.check_output(
147
+ [
148
+ "nvidia-smi",
149
+ "--query-gpu=name,utilization.gpu,memory.used,memory.total",
150
+ "--format=csv,noheader,nounits",
151
+ ],
152
+ encoding="utf-8",
153
+ timeout=2,
154
+ ).strip()
155
+ if result:
156
+ for line in result.split("\n"):
157
+ name, gpu_util, mem_used, mem_total = [
158
+ value.strip() for value in line.split(",")
159
+ ]
160
+ gpus.append(
161
+ {
162
+ "name": name,
163
+ "utilization_percent": _safe_float(gpu_util),
164
+ "memory_used_mb": _safe_float(mem_used),
165
+ "memory_total_mb": _safe_float(mem_total),
166
+ }
167
+ )
168
+ except Exception:
169
+ gpu_status = "unavailable"
170
+ else:
171
+ gpu_status = "not_detected"
172
+
173
+ return {
174
+ "environment": environment,
175
+ "cpu": cpu,
176
+ "memory": memory,
177
+ "disk": disk,
178
+ "gpus": gpus,
179
+ "gpu_status": gpu_status,
180
+ }
@@ -0,0 +1,58 @@
1
+ import os
2
+
3
+ from jupyter_ext.handlers import (
4
+ DEFAULT_EXECUTE_COMMAND_TIMEOUT_MS,
5
+ _append_stream_output,
6
+ _resolve_path_in_workspace,
7
+ _resolve_stream_timeout_ms,
8
+ _resolve_timeout_ms,
9
+ )
10
+
11
+
12
+ def test_resolve_path_strips_duplicate_cwd_prefix() -> None:
13
+ workspace_root = "/workspace"
14
+ requested_cwd = os.path.join(workspace_root, "extensions", "jupyter")
15
+ path = os.path.join("extensions", "jupyter", "cal.py")
16
+ resolved = _resolve_path_in_workspace(path, workspace_root, requested_cwd)
17
+ assert resolved == os.path.join(requested_cwd, "cal.py")
18
+
19
+
20
+ def test_resolve_path_relative_to_cwd() -> None:
21
+ workspace_root = "/workspace"
22
+ requested_cwd = os.path.join(workspace_root, "extensions", "jupyter")
23
+ resolved = _resolve_path_in_workspace("cal.py", workspace_root, requested_cwd)
24
+ assert resolved == os.path.join(requested_cwd, "cal.py")
25
+
26
+
27
+ def test_resolve_timeout_ms_uses_default_on_invalid() -> None:
28
+ assert _resolve_timeout_ms(None) == DEFAULT_EXECUTE_COMMAND_TIMEOUT_MS
29
+ assert _resolve_timeout_ms("not-a-number") == DEFAULT_EXECUTE_COMMAND_TIMEOUT_MS
30
+ assert _resolve_timeout_ms(-1) == DEFAULT_EXECUTE_COMMAND_TIMEOUT_MS
31
+
32
+
33
+ def test_resolve_timeout_ms_accepts_value() -> None:
34
+ assert _resolve_timeout_ms(120000) == 120000
35
+
36
+
37
+ def test_resolve_stream_timeout_ms_uses_default_on_invalid() -> None:
38
+ assert _resolve_stream_timeout_ms(None) == DEFAULT_EXECUTE_COMMAND_TIMEOUT_MS
39
+ assert (
40
+ _resolve_stream_timeout_ms("not-a-number") == DEFAULT_EXECUTE_COMMAND_TIMEOUT_MS
41
+ )
42
+
43
+
44
+ def test_resolve_stream_timeout_ms_disables_on_non_positive() -> None:
45
+ assert _resolve_stream_timeout_ms(0) is None
46
+ assert _resolve_stream_timeout_ms(-1) is None
47
+
48
+
49
+ def test_append_stream_output_appends() -> None:
50
+ output, truncated = _append_stream_output("abc", "def", max_bytes=10)
51
+ assert output == "abcdef"
52
+ assert truncated is False
53
+
54
+
55
+ def test_append_stream_output_truncates() -> None:
56
+ output, truncated = _append_stream_output("abc", "def", max_bytes=5)
57
+ assert output == "abcde"
58
+ assert truncated is True
@@ -1,429 +0,0 @@
1
- """
2
- Jupyter Executor (Embedded Mode)
3
-
4
- Provides direct access to Jupyter kernel for code execution
5
- when running inside JupyterLab server.
6
-
7
- This executor uses the Jupyter server's kernel manager to:
8
- - Execute code in the current notebook's kernel
9
- - Add cells to the notebook
10
- - Retrieve execution results
11
- """
12
-
13
- import asyncio
14
- import logging
15
- from dataclasses import dataclass, field
16
- from typing import Any, Dict, List, Optional
17
-
18
- logger = logging.getLogger(__name__)
19
-
20
-
21
- @dataclass
22
- class ExecutionResult:
23
- """Result of code execution in Jupyter kernel"""
24
- success: bool
25
- output: str = ""
26
- error_type: Optional[str] = None
27
- error_message: Optional[str] = None
28
- traceback: Optional[List[str]] = None
29
- execution_count: int = 0
30
- cell_index: int = -1
31
- display_data: List[Dict[str, Any]] = field(default_factory=list)
32
-
33
- def to_dict(self) -> Dict[str, Any]:
34
- return {
35
- "success": self.success,
36
- "output": self.output,
37
- "error_type": self.error_type,
38
- "error_message": self.error_message,
39
- "traceback": self.traceback,
40
- "execution_count": self.execution_count,
41
- "cell_index": self.cell_index,
42
- "display_data": self.display_data,
43
- }
44
-
45
-
46
- class JupyterExecutor:
47
- """
48
- Executes code in Jupyter kernel (Embedded Mode).
49
-
50
- In Embedded Mode, this class directly accesses the Jupyter server's
51
- kernel manager and contents manager to execute code and modify notebooks.
52
-
53
- Usage:
54
- executor = JupyterExecutor()
55
- await executor.initialize(kernel_id, notebook_path)
56
- result = await executor.execute_code("print('hello')")
57
- """
58
-
59
- def __init__(self):
60
- self._kernel_manager = None
61
- self._contents_manager = None
62
- self._kernel_id: Optional[str] = None
63
- self._notebook_path: Optional[str] = None
64
- self._kernel_client = None
65
- self._initialized = False
66
-
67
- @property
68
- def is_initialized(self) -> bool:
69
- return self._initialized
70
-
71
- async def initialize(
72
- self,
73
- kernel_id: str,
74
- notebook_path: str,
75
- kernel_manager: Any = None,
76
- contents_manager: Any = None,
77
- ) -> bool:
78
- """
79
- Initialize the executor with kernel and notebook information.
80
-
81
- Args:
82
- kernel_id: ID of the kernel to use
83
- notebook_path: Path to the notebook file
84
- kernel_manager: Jupyter's MappingKernelManager (optional, auto-detect)
85
- contents_manager: Jupyter's ContentsManager (optional, auto-detect)
86
-
87
- Returns:
88
- True if initialization successful
89
- """
90
- self._kernel_id = kernel_id
91
- self._notebook_path = notebook_path
92
-
93
- # Try to get kernel manager from Jupyter server if not provided
94
- if kernel_manager is None:
95
- kernel_manager = self._get_kernel_manager()
96
-
97
- if contents_manager is None:
98
- contents_manager = self._get_contents_manager()
99
-
100
- self._kernel_manager = kernel_manager
101
- self._contents_manager = contents_manager
102
-
103
- if self._kernel_manager is None:
104
- logger.warning("Kernel manager not available. Running in mock mode.")
105
- self._initialized = True
106
- return True
107
-
108
- # Get kernel client
109
- try:
110
- self._kernel_client = self._kernel_manager.get_kernel(kernel_id)
111
- self._initialized = True
112
- logger.info(f"JupyterExecutor initialized with kernel {kernel_id}")
113
- return True
114
- except Exception as e:
115
- logger.error(f"Failed to get kernel client: {e}")
116
- self._initialized = False
117
- return False
118
-
119
- def _get_kernel_manager(self) -> Optional[Any]:
120
- """Try to get kernel manager from Jupyter server app"""
121
- try:
122
- from jupyter_server.serverapp import ServerApp
123
- app = ServerApp.instance()
124
- return app.kernel_manager
125
- except Exception:
126
- try:
127
- # Fallback for older versions
128
- from notebook.notebookapp import NotebookApp
129
- app = NotebookApp.instance()
130
- return app.kernel_manager
131
- except Exception:
132
- return None
133
-
134
- def _get_contents_manager(self) -> Optional[Any]:
135
- """Try to get contents manager from Jupyter server app"""
136
- try:
137
- from jupyter_server.serverapp import ServerApp
138
- app = ServerApp.instance()
139
- return app.contents_manager
140
- except Exception:
141
- try:
142
- from notebook.notebookapp import NotebookApp
143
- app = NotebookApp.instance()
144
- return app.contents_manager
145
- except Exception:
146
- return None
147
-
148
- async def execute_code(
149
- self,
150
- code: str,
151
- timeout: float = 60.0,
152
- add_to_notebook: bool = True,
153
- ) -> ExecutionResult:
154
- """
155
- Execute Python code in the Jupyter kernel.
156
-
157
- Args:
158
- code: Python code to execute
159
- timeout: Execution timeout in seconds
160
- add_to_notebook: Whether to add the code as a new cell
161
-
162
- Returns:
163
- ExecutionResult with output or error
164
- """
165
- if not self._initialized:
166
- return ExecutionResult(
167
- success=False,
168
- error_type="NotInitialized",
169
- error_message="Executor not initialized. Call initialize() first."
170
- )
171
-
172
- # If no kernel manager, use mock execution
173
- if self._kernel_manager is None:
174
- return await self._mock_execute(code)
175
-
176
- try:
177
- # Add cell to notebook if requested
178
- cell_index = -1
179
- if add_to_notebook and self._contents_manager:
180
- cell_index = await self._add_cell_to_notebook(code)
181
-
182
- # Execute code in kernel
183
- result = await self._execute_in_kernel(code, timeout)
184
- result.cell_index = cell_index
185
-
186
- return result
187
-
188
- except asyncio.TimeoutError:
189
- return ExecutionResult(
190
- success=False,
191
- error_type="TimeoutError",
192
- error_message=f"Execution timed out after {timeout} seconds"
193
- )
194
- except Exception as e:
195
- logger.error(f"Execution failed: {e}")
196
- return ExecutionResult(
197
- success=False,
198
- error_type=type(e).__name__,
199
- error_message=str(e)
200
- )
201
-
202
- async def _execute_in_kernel(
203
- self,
204
- code: str,
205
- timeout: float
206
- ) -> ExecutionResult:
207
- """Execute code using kernel client"""
208
- # This is a simplified implementation
209
- # In production, you would use jupyter_client's async API
210
-
211
- try:
212
- from jupyter_client import KernelClient
213
-
214
- # Get connection info from kernel manager
215
- km = self._kernel_manager
216
- kernel = km.get_kernel(self._kernel_id)
217
-
218
- # Create a client and execute
219
- client = kernel.client()
220
- client.start_channels()
221
-
222
- try:
223
- # Send execute request
224
- msg_id = client.execute(code)
225
-
226
- # Wait for results
227
- output_parts = []
228
- error_info = None
229
- execution_count = 0
230
- display_data = []
231
-
232
- deadline = asyncio.get_event_loop().time() + timeout
233
-
234
- while True:
235
- remaining = deadline - asyncio.get_event_loop().time()
236
- if remaining <= 0:
237
- raise asyncio.TimeoutError()
238
-
239
- try:
240
- msg = client.get_iopub_msg(timeout=min(remaining, 1.0))
241
- except Exception:
242
- continue
243
-
244
- if msg["parent_header"].get("msg_id") != msg_id:
245
- continue
246
-
247
- msg_type = msg["msg_type"]
248
- content = msg["content"]
249
-
250
- if msg_type == "stream":
251
- output_parts.append(content.get("text", ""))
252
- elif msg_type == "execute_result":
253
- output_parts.append(str(content.get("data", {}).get("text/plain", "")))
254
- execution_count = content.get("execution_count", 0)
255
- elif msg_type == "display_data":
256
- display_data.append(content.get("data", {}))
257
- elif msg_type == "error":
258
- error_info = {
259
- "ename": content.get("ename", "Error"),
260
- "evalue": content.get("evalue", ""),
261
- "traceback": content.get("traceback", []),
262
- }
263
- elif msg_type == "status" and content.get("execution_state") == "idle":
264
- break
265
-
266
- if error_info:
267
- return ExecutionResult(
268
- success=False,
269
- output="".join(output_parts),
270
- error_type=error_info["ename"],
271
- error_message=error_info["evalue"],
272
- traceback=error_info["traceback"],
273
- execution_count=execution_count,
274
- display_data=display_data,
275
- )
276
-
277
- return ExecutionResult(
278
- success=True,
279
- output="".join(output_parts),
280
- execution_count=execution_count,
281
- display_data=display_data,
282
- )
283
-
284
- finally:
285
- client.stop_channels()
286
-
287
- except ImportError:
288
- logger.warning("jupyter_client not available, using mock execution")
289
- return await self._mock_execute(code)
290
- except Exception as e:
291
- return ExecutionResult(
292
- success=False,
293
- error_type=type(e).__name__,
294
- error_message=str(e),
295
- )
296
-
297
- async def _add_cell_to_notebook(self, code: str) -> int:
298
- """Add a new code cell to the notebook"""
299
- if not self._contents_manager or not self._notebook_path:
300
- return -1
301
-
302
- try:
303
- # Read current notebook
304
- model = self._contents_manager.get(self._notebook_path, content=True)
305
- notebook = model["content"]
306
-
307
- # Create new cell
308
- new_cell = {
309
- "cell_type": "code",
310
- "execution_count": None,
311
- "metadata": {},
312
- "outputs": [],
313
- "source": code,
314
- }
315
-
316
- # Add cell
317
- notebook["cells"].append(new_cell)
318
- cell_index = len(notebook["cells"]) - 1
319
-
320
- # Save notebook
321
- self._contents_manager.save(model, self._notebook_path)
322
-
323
- return cell_index
324
-
325
- except Exception as e:
326
- logger.error(f"Failed to add cell to notebook: {e}")
327
- return -1
328
-
329
- async def add_markdown_cell(self, content: str) -> int:
330
- """Add a markdown cell to the notebook"""
331
- if not self._contents_manager or not self._notebook_path:
332
- return -1
333
-
334
- try:
335
- model = self._contents_manager.get(self._notebook_path, content=True)
336
- notebook = model["content"]
337
-
338
- new_cell = {
339
- "cell_type": "markdown",
340
- "metadata": {},
341
- "source": content,
342
- }
343
-
344
- notebook["cells"].append(new_cell)
345
- cell_index = len(notebook["cells"]) - 1
346
-
347
- self._contents_manager.save(model, self._notebook_path)
348
-
349
- return cell_index
350
-
351
- except Exception as e:
352
- logger.error(f"Failed to add markdown cell: {e}")
353
- return -1
354
-
355
- async def _mock_execute(self, code: str) -> ExecutionResult:
356
- """Mock execution for testing or when kernel is not available"""
357
- logger.info(f"Mock executing code: {code[:100]}...")
358
-
359
- # Simple mock that just returns success
360
- return ExecutionResult(
361
- success=True,
362
- output=f"[Mock] Code executed successfully:\n{code[:200]}",
363
- execution_count=1,
364
- )
365
-
366
- async def get_notebook_state(self) -> Dict[str, Any]:
367
- """Get current notebook state"""
368
- if not self._contents_manager or not self._notebook_path:
369
- return {
370
- "cell_count": 0,
371
- "imported_libraries": [],
372
- "defined_variables": [],
373
- }
374
-
375
- try:
376
- model = self._contents_manager.get(self._notebook_path, content=True)
377
- notebook = model["content"]
378
- cells = notebook.get("cells", [])
379
-
380
- # Extract imports and variables from code cells
381
- imported_libraries = set()
382
- defined_variables = set()
383
-
384
- import re
385
- import_pattern = re.compile(r'^(?:import|from)\s+([\w.]+)', re.MULTILINE)
386
- var_pattern = re.compile(r'^(\w+)\s*=', re.MULTILINE)
387
-
388
- for cell in cells:
389
- if cell.get("cell_type") != "code":
390
- continue
391
-
392
- source = cell.get("source", "")
393
- if isinstance(source, list):
394
- source = "".join(source)
395
-
396
- # Find imports
397
- for match in import_pattern.finditer(source):
398
- lib = match.group(1).split(".")[0]
399
- imported_libraries.add(lib)
400
-
401
- # Find variable definitions
402
- for match in var_pattern.finditer(source):
403
- defined_variables.add(match.group(1))
404
-
405
- return {
406
- "cell_count": len(cells),
407
- "imported_libraries": list(imported_libraries),
408
- "defined_variables": list(defined_variables),
409
- }
410
-
411
- except Exception as e:
412
- logger.error(f"Failed to get notebook state: {e}")
413
- return {
414
- "cell_count": 0,
415
- "imported_libraries": [],
416
- "defined_variables": [],
417
- }
418
-
419
-
420
- # Singleton instance
421
- _executor_instance: Optional[JupyterExecutor] = None
422
-
423
-
424
- def get_jupyter_executor() -> JupyterExecutor:
425
- """Get or create the JupyterExecutor singleton"""
426
- global _executor_instance
427
- if _executor_instance is None:
428
- _executor_instance = JupyterExecutor()
429
- return _executor_instance
@@ -1,36 +0,0 @@
1
- """
2
- LangChain Middleware for Jupyter Agent
3
-
4
- Middleware stack (execution order):
5
- 1. RAGMiddleware: Inject RAG context before model calls
6
- 2. CodeSearchMiddleware: Search workspace/notebook for relevant code
7
- 3. ValidationMiddleware: Validate code before execution
8
- 4. JupyterExecutionMiddleware: Execute code in Jupyter kernel
9
- 5. ErrorHandlingMiddleware: Classify errors and decide recovery strategy
10
-
11
- Built-in middleware used:
12
- - SummarizationMiddleware: Compress long conversations
13
- - ModelRetryMiddleware: Retry on rate limits
14
- - ToolRetryMiddleware: Retry failed tool calls
15
- - ModelCallLimitMiddleware: Prevent infinite loops
16
- """
17
-
18
- from agent_server.langchain.middleware.code_search_middleware import (
19
- CodeSearchMiddleware,
20
- )
21
- from agent_server.langchain.middleware.error_handling_middleware import (
22
- ErrorHandlingMiddleware,
23
- )
24
- from agent_server.langchain.middleware.jupyter_execution_middleware import (
25
- JupyterExecutionMiddleware,
26
- )
27
- from agent_server.langchain.middleware.rag_middleware import RAGMiddleware
28
- from agent_server.langchain.middleware.validation_middleware import ValidationMiddleware
29
-
30
- __all__ = [
31
- "RAGMiddleware",
32
- "CodeSearchMiddleware",
33
- "ValidationMiddleware",
34
- "JupyterExecutionMiddleware",
35
- "ErrorHandlingMiddleware",
36
- ]