ms-enclave 0.0.0__py3-none-any.whl → 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ms-enclave might be problematic. Click here for more details.
- ms_enclave/__init__.py +2 -2
- ms_enclave/cli/__init__.py +1 -0
- ms_enclave/cli/base.py +20 -0
- ms_enclave/cli/cli.py +27 -0
- ms_enclave/cli/start_server.py +84 -0
- ms_enclave/sandbox/__init__.py +27 -0
- ms_enclave/sandbox/boxes/__init__.py +16 -0
- ms_enclave/sandbox/boxes/base.py +267 -0
- ms_enclave/sandbox/boxes/docker_notebook.py +216 -0
- ms_enclave/sandbox/boxes/docker_sandbox.py +252 -0
- ms_enclave/sandbox/manager/__init__.py +11 -0
- ms_enclave/sandbox/manager/base.py +155 -0
- ms_enclave/sandbox/manager/http_manager.py +405 -0
- ms_enclave/sandbox/manager/local_manager.py +295 -0
- ms_enclave/sandbox/model/__init__.py +21 -0
- ms_enclave/sandbox/model/base.py +36 -0
- ms_enclave/sandbox/model/config.py +97 -0
- ms_enclave/sandbox/model/requests.py +57 -0
- ms_enclave/sandbox/model/responses.py +57 -0
- ms_enclave/sandbox/server/__init__.py +0 -0
- ms_enclave/sandbox/server/server.py +195 -0
- ms_enclave/sandbox/tools/__init__.py +4 -0
- ms_enclave/sandbox/tools/base.py +95 -0
- ms_enclave/sandbox/tools/sandbox_tool.py +46 -0
- ms_enclave/sandbox/tools/sandbox_tools/__init__.py +4 -0
- ms_enclave/sandbox/tools/sandbox_tools/file_operation.py +215 -0
- ms_enclave/sandbox/tools/sandbox_tools/notebook_executor.py +167 -0
- ms_enclave/sandbox/tools/sandbox_tools/python_executor.py +87 -0
- ms_enclave/sandbox/tools/sandbox_tools/shell_executor.py +63 -0
- ms_enclave/sandbox/tools/tool_info.py +141 -0
- ms_enclave/utils/__init__.py +1 -0
- ms_enclave/utils/json_schema.py +208 -0
- ms_enclave/utils/logger.py +106 -0
- ms_enclave/version.py +2 -2
- ms_enclave-0.0.1.dist-info/METADATA +314 -0
- ms_enclave-0.0.1.dist-info/RECORD +40 -0
- {ms_enclave-0.0.0.dist-info → ms_enclave-0.0.1.dist-info}/WHEEL +1 -1
- ms_enclave-0.0.1.dist-info/entry_points.txt +2 -0
- ms_enclave/run_server.py +0 -21
- ms_enclave-0.0.0.dist-info/METADATA +0 -329
- ms_enclave-0.0.0.dist-info/RECORD +0 -8
- {ms_enclave-0.0.0.dist-info → ms_enclave-0.0.1.dist-info}/licenses/LICENSE +0 -0
- {ms_enclave-0.0.0.dist-info → ms_enclave-0.0.1.dist-info}/top_level.txt +0 -0
ms_enclave/__init__.py
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
from .version import __version__
|
|
1
|
+
from .version import __release_date__, __version__
|
|
2
2
|
|
|
3
|
-
__all__ = ['__version__']
|
|
3
|
+
__all__ = ['__version__', '__release_date__']
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Copyright (c) Alibaba, Inc. and its affiliates.
|
ms_enclave/cli/base.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# Copyright (c) Alibaba, Inc. and its affiliates.
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from argparse import ArgumentParser
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class CLICommand(ABC):
|
|
8
|
+
"""
|
|
9
|
+
Base class for command line tool.
|
|
10
|
+
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
@staticmethod
|
|
14
|
+
@abstractmethod
|
|
15
|
+
def define_args(parsers: ArgumentParser):
|
|
16
|
+
raise NotImplementedError()
|
|
17
|
+
|
|
18
|
+
@abstractmethod
|
|
19
|
+
def execute(self):
|
|
20
|
+
raise NotImplementedError()
|
ms_enclave/cli/cli.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# Copyright (c) Alibaba, Inc. and its affiliates.
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
|
|
5
|
+
from ms_enclave import __version__
|
|
6
|
+
from ms_enclave.cli.start_server import ServerCMD
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def run_cmd():
|
|
10
|
+
parser = argparse.ArgumentParser('MS-Enclave Command Line tool', usage='ms-enclave <command> [<args>]')
|
|
11
|
+
parser.add_argument('-v', '--version', action='version', version=f'ms-enclave {__version__}')
|
|
12
|
+
subparsers = parser.add_subparsers(help='MS-Enclave command line helper.')
|
|
13
|
+
|
|
14
|
+
ServerCMD.define_args(subparsers)
|
|
15
|
+
|
|
16
|
+
args = parser.parse_args()
|
|
17
|
+
|
|
18
|
+
if not hasattr(args, 'func'):
|
|
19
|
+
parser.print_help()
|
|
20
|
+
exit(1)
|
|
21
|
+
|
|
22
|
+
cmd = args.func(args)
|
|
23
|
+
cmd.execute()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
if __name__ == '__main__':
|
|
27
|
+
run_cmd()
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
# Copyright (c) Alibaba, Inc. and its affiliates.
|
|
2
|
+
from argparse import ArgumentParser
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from ms_enclave.cli.base import CLICommand
|
|
6
|
+
from ms_enclave.sandbox import create_server
|
|
7
|
+
from ms_enclave.utils import get_logger
|
|
8
|
+
|
|
9
|
+
logger = get_logger()
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def subparser_func(args):
|
|
13
|
+
""" Function which will be called for a specific sub parser.
|
|
14
|
+
"""
|
|
15
|
+
return ServerCMD(args)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ServerCMD(CLICommand):
|
|
19
|
+
name = 'server'
|
|
20
|
+
|
|
21
|
+
def __init__(self, args):
|
|
22
|
+
self.args = args
|
|
23
|
+
|
|
24
|
+
@staticmethod
|
|
25
|
+
def define_args(parsers: ArgumentParser):
|
|
26
|
+
"""Define args for the server command.
|
|
27
|
+
"""
|
|
28
|
+
parser = parsers.add_parser(ServerCMD.name, help='Start the MS-Enclave sandbox HTTP server')
|
|
29
|
+
add_argument(parser)
|
|
30
|
+
parser.set_defaults(func=subparser_func)
|
|
31
|
+
|
|
32
|
+
def execute(self):
|
|
33
|
+
"""Start the sandbox server using provided CLI arguments."""
|
|
34
|
+
cleanup_interval: int = getattr(self.args, 'cleanup_interval', 300)
|
|
35
|
+
host: str = getattr(self.args, 'host', '0.0.0.0')
|
|
36
|
+
port: int = getattr(self.args, 'port', 8000)
|
|
37
|
+
log_level: str = getattr(self.args, 'log_level', 'info')
|
|
38
|
+
api_key: Optional[str] = getattr(self.args, 'api_key', None)
|
|
39
|
+
|
|
40
|
+
server = create_server(cleanup_interval=cleanup_interval, api_key=api_key)
|
|
41
|
+
|
|
42
|
+
logger.info('Starting Sandbox Server...')
|
|
43
|
+
logger.info('API docs: http://%s:%d/docs', host, port)
|
|
44
|
+
logger.info('Health check: http://%s:%d/health', host, port)
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
server.run(host=host, port=port, log_level=log_level)
|
|
48
|
+
except KeyboardInterrupt:
|
|
49
|
+
logger.info('Server interrupted by user, shutting down...')
|
|
50
|
+
except Exception as e: # pragma: no cover - runtime dependent
|
|
51
|
+
logger.error('Failed to start server: %s', e)
|
|
52
|
+
raise
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def add_argument(parser: ArgumentParser) -> None:
|
|
56
|
+
"""Register command line arguments for the server command.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
parser: The argparse parser to add arguments to.
|
|
60
|
+
"""
|
|
61
|
+
parser.add_argument(
|
|
62
|
+
'--host', type=str, default='0.0.0.0', help='Host interface to bind the server (default: 0.0.0.0)'
|
|
63
|
+
)
|
|
64
|
+
parser.add_argument('--port', type=int, default=8000, help='Port for the HTTP server (default: 8000)')
|
|
65
|
+
parser.add_argument(
|
|
66
|
+
'--log-level',
|
|
67
|
+
type=str,
|
|
68
|
+
choices=['critical', 'error', 'warning', 'info', 'debug'],
|
|
69
|
+
default='info',
|
|
70
|
+
help='Log level for the server (default: info)'
|
|
71
|
+
)
|
|
72
|
+
parser.add_argument(
|
|
73
|
+
'--cleanup-interval',
|
|
74
|
+
type=int,
|
|
75
|
+
default=300,
|
|
76
|
+
metavar='SECONDS',
|
|
77
|
+
help='Background cleanup interval in seconds (default: 300)'
|
|
78
|
+
)
|
|
79
|
+
parser.add_argument(
|
|
80
|
+
'--api-key',
|
|
81
|
+
type=str,
|
|
82
|
+
default=None,
|
|
83
|
+
help='Optional API key to protect endpoints. If omitted, no authentication is enforced.'
|
|
84
|
+
)
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# Copyright (c) Alibaba, Inc. and its affiliates.
|
|
2
|
+
"""Modern agent sandbox system.
|
|
3
|
+
|
|
4
|
+
A modular, extensible sandbox system for safe code execution with Docker isolation,
|
|
5
|
+
FastAPI-based client/server architecture, and comprehensive tool support.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .boxes import DockerSandbox, Sandbox, SandboxFactory
|
|
9
|
+
from .manager import HttpSandboxManager, LocalSandboxManager
|
|
10
|
+
|
|
11
|
+
# Import main components
|
|
12
|
+
from .model import (
|
|
13
|
+
DockerSandboxConfig,
|
|
14
|
+
ExecuteCodeRequest,
|
|
15
|
+
ExecuteCommandRequest,
|
|
16
|
+
ExecutionStatus,
|
|
17
|
+
HealthCheckResult,
|
|
18
|
+
ReadFileRequest,
|
|
19
|
+
SandboxConfig,
|
|
20
|
+
SandboxInfo,
|
|
21
|
+
SandboxStatus,
|
|
22
|
+
ToolExecutionRequest,
|
|
23
|
+
ToolResult,
|
|
24
|
+
WriteFileRequest,
|
|
25
|
+
)
|
|
26
|
+
from .server.server import SandboxServer, create_server
|
|
27
|
+
from .tools import PythonExecutor, Tool, ToolFactory
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""Sandbox implementations."""
|
|
2
|
+
|
|
3
|
+
from .base import Sandbox, SandboxFactory, register_sandbox
|
|
4
|
+
from .docker_notebook import DockerNotebookSandbox
|
|
5
|
+
from .docker_sandbox import DockerSandbox
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
# Base interfaces
|
|
9
|
+
'Sandbox',
|
|
10
|
+
'SandboxFactory',
|
|
11
|
+
'register_sandbox',
|
|
12
|
+
|
|
13
|
+
# Implementations
|
|
14
|
+
'DockerSandbox',
|
|
15
|
+
'DockerNotebookSandbox',
|
|
16
|
+
]
|
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
"""Base sandbox interface and factory."""
|
|
2
|
+
|
|
3
|
+
import abc
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Any, Dict, List, Optional, Type, Union
|
|
6
|
+
|
|
7
|
+
import shortuuid as uuid
|
|
8
|
+
|
|
9
|
+
from ms_enclave.utils import get_logger
|
|
10
|
+
|
|
11
|
+
from ..model import (
|
|
12
|
+
CommandResult,
|
|
13
|
+
DockerNotebookConfig,
|
|
14
|
+
DockerSandboxConfig,
|
|
15
|
+
SandboxConfig,
|
|
16
|
+
SandboxInfo,
|
|
17
|
+
SandboxStatus,
|
|
18
|
+
SandboxType,
|
|
19
|
+
ToolResult,
|
|
20
|
+
)
|
|
21
|
+
from ..tools import Tool, ToolFactory
|
|
22
|
+
|
|
23
|
+
logger = get_logger()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class Sandbox(abc.ABC):
|
|
27
|
+
"""Abstract base class for all sandbox implementations."""
|
|
28
|
+
|
|
29
|
+
def __init__(self, config: SandboxConfig, sandbox_id: Optional[str] = None):
|
|
30
|
+
"""Initialize sandbox.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
config: Sandbox configuration
|
|
34
|
+
sandbox_id: Optional sandbox ID (will be generated if not provided)
|
|
35
|
+
"""
|
|
36
|
+
self.id = sandbox_id or str(uuid.uuid())
|
|
37
|
+
self.config = config
|
|
38
|
+
self.status = SandboxStatus.INITIALIZING
|
|
39
|
+
self.created_at = datetime.now()
|
|
40
|
+
self.updated_at = datetime.now()
|
|
41
|
+
self.metadata: Dict[str, Any] = {}
|
|
42
|
+
self._tools: Dict[str, Tool] = {}
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
@abc.abstractmethod
|
|
46
|
+
def sandbox_type(self) -> SandboxType:
|
|
47
|
+
"""Return the sandbox type identifier."""
|
|
48
|
+
pass
|
|
49
|
+
|
|
50
|
+
@abc.abstractmethod
|
|
51
|
+
async def start(self) -> None:
|
|
52
|
+
"""Start the sandbox environment."""
|
|
53
|
+
pass
|
|
54
|
+
|
|
55
|
+
@abc.abstractmethod
|
|
56
|
+
async def stop(self) -> None:
|
|
57
|
+
"""Stop the sandbox environment."""
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
@abc.abstractmethod
|
|
61
|
+
async def cleanup(self) -> None:
|
|
62
|
+
"""Clean up sandbox resources."""
|
|
63
|
+
pass
|
|
64
|
+
|
|
65
|
+
async def initialize_tools(self) -> None:
|
|
66
|
+
"""Initialize sandbox tools."""
|
|
67
|
+
for tool_name, config in self.config.tools_config.items():
|
|
68
|
+
try:
|
|
69
|
+
tool = ToolFactory.create_tool(tool_name, **config)
|
|
70
|
+
if tool.enabled:
|
|
71
|
+
# Check if tool is compatible with this sandbox
|
|
72
|
+
if (tool.required_sandbox_type is None or tool.required_sandbox_type == self.sandbox_type):
|
|
73
|
+
self._tools[tool_name] = tool
|
|
74
|
+
else:
|
|
75
|
+
logger.warning(
|
|
76
|
+
f'Tool {tool_name} requires {tool.required_sandbox_type} but sandbox is {self.sandbox_type}'
|
|
77
|
+
)
|
|
78
|
+
except Exception as e:
|
|
79
|
+
logger.error(f'Failed to initialize tool {tool_name}: {e}')
|
|
80
|
+
|
|
81
|
+
def get_available_tools(self) -> Dict[str, Any]:
|
|
82
|
+
"""Get list of available tools."""
|
|
83
|
+
return {tool.name: tool.schema for tool in self._tools.values() if tool.enabled}
|
|
84
|
+
|
|
85
|
+
def get_tool(self, tool_name: str) -> Optional[Tool]:
|
|
86
|
+
"""Get tool instance by type.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
tool_name: Tool name
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
Tool instance or None if not available
|
|
93
|
+
"""
|
|
94
|
+
return self._tools.get(tool_name)
|
|
95
|
+
|
|
96
|
+
def add_tool(self, tool: Tool) -> None:
|
|
97
|
+
"""Add a tool to the sandbox.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
tool: Tool instance to add
|
|
101
|
+
"""
|
|
102
|
+
if tool.name in self._tools:
|
|
103
|
+
logger.warning(f'Tool {tool.name} is already added to the sandbox')
|
|
104
|
+
return
|
|
105
|
+
if tool.enabled:
|
|
106
|
+
if (tool.required_sandbox_type is None or tool.required_sandbox_type == self.sandbox_type):
|
|
107
|
+
self._tools[tool.name] = tool
|
|
108
|
+
else:
|
|
109
|
+
logger.warning(
|
|
110
|
+
f'Tool {tool.name} requires {tool.required_sandbox_type} but sandbox is {self.sandbox_type}'
|
|
111
|
+
)
|
|
112
|
+
else:
|
|
113
|
+
logger.warning(f'Tool {tool.name} is not enabled and cannot be added')
|
|
114
|
+
|
|
115
|
+
async def execute_tool(self, tool_name: str, parameters: Dict[str, Any]) -> ToolResult:
|
|
116
|
+
"""Execute a tool with given parameters.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
tool_name: Tool name
|
|
120
|
+
parameters: Tool parameters
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
Tool execution result
|
|
124
|
+
|
|
125
|
+
Raises:
|
|
126
|
+
ValueError: If tool is not found or not enabled
|
|
127
|
+
TimeoutError: If tool execution exceeds timeout
|
|
128
|
+
Exception: For other execution errors
|
|
129
|
+
"""
|
|
130
|
+
tool = self.get_tool(tool_name)
|
|
131
|
+
if not tool:
|
|
132
|
+
raise ValueError(f'Tool {tool_name} is not available')
|
|
133
|
+
if not tool.enabled:
|
|
134
|
+
raise ValueError(f'Tool {tool_name} is not enabled')
|
|
135
|
+
|
|
136
|
+
result = await tool.execute(sandbox_context=self, **parameters)
|
|
137
|
+
return result
|
|
138
|
+
|
|
139
|
+
async def execute_command(self, command: Union[str, List[str]], timeout: Optional[int] = None) -> CommandResult:
|
|
140
|
+
"""Execute a command in the sandbox environment.
|
|
141
|
+
|
|
142
|
+
Args:
|
|
143
|
+
command: Command to execute
|
|
144
|
+
timeout: Optional execution timeout in seconds
|
|
145
|
+
"""
|
|
146
|
+
raise NotImplementedError('execute_command must be implemented by subclasses')
|
|
147
|
+
|
|
148
|
+
@abc.abstractmethod
|
|
149
|
+
async def get_execution_context(self) -> Any:
|
|
150
|
+
"""Get the execution context for tools (e.g., container, process, etc.)."""
|
|
151
|
+
pass
|
|
152
|
+
|
|
153
|
+
def update_status(self, status: SandboxStatus) -> None:
|
|
154
|
+
"""Update sandbox status.
|
|
155
|
+
|
|
156
|
+
Args:
|
|
157
|
+
status: New status
|
|
158
|
+
"""
|
|
159
|
+
self.status = status
|
|
160
|
+
self.updated_at = datetime.now()
|
|
161
|
+
|
|
162
|
+
def get_info(self) -> SandboxInfo:
|
|
163
|
+
"""Get sandbox information.
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
Sandbox information
|
|
167
|
+
"""
|
|
168
|
+
return SandboxInfo(
|
|
169
|
+
id=self.id,
|
|
170
|
+
status=self.status,
|
|
171
|
+
type=self.sandbox_type,
|
|
172
|
+
config=self.config.model_dump(exclude_none=True),
|
|
173
|
+
created_at=self.created_at,
|
|
174
|
+
updated_at=self.updated_at,
|
|
175
|
+
metadata=self.metadata,
|
|
176
|
+
available_tools=self.get_available_tools()
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
async def __aenter__(self):
|
|
180
|
+
"""Async context manager entry."""
|
|
181
|
+
await self.start()
|
|
182
|
+
return self
|
|
183
|
+
|
|
184
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
185
|
+
"""Async context manager exit."""
|
|
186
|
+
await self.stop()
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
class SandboxFactory:
|
|
190
|
+
"""Factory for creating sandbox instances."""
|
|
191
|
+
|
|
192
|
+
_sandboxes: Dict[SandboxType, Type[Sandbox]] = {}
|
|
193
|
+
|
|
194
|
+
@classmethod
|
|
195
|
+
def register_sandbox(cls, sandbox_type: SandboxType, sandbox_class: Type[Sandbox]):
|
|
196
|
+
"""Register a sandbox class.
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
sandbox_type: Sandbox type identifier
|
|
200
|
+
sandbox_class: Sandbox class
|
|
201
|
+
"""
|
|
202
|
+
cls._sandboxes[sandbox_type] = sandbox_class
|
|
203
|
+
|
|
204
|
+
@classmethod
|
|
205
|
+
def create_sandbox(
|
|
206
|
+
cls,
|
|
207
|
+
sandbox_type: SandboxType,
|
|
208
|
+
config: Optional[Union[SandboxConfig, Dict]] = None,
|
|
209
|
+
sandbox_id: Optional[str] = None
|
|
210
|
+
) -> Sandbox:
|
|
211
|
+
"""Create a sandbox instance.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
sandbox_type: Sandbox type
|
|
215
|
+
config: Sandbox configuration
|
|
216
|
+
sandbox_id: Optional sandbox ID
|
|
217
|
+
|
|
218
|
+
Returns:
|
|
219
|
+
Sandbox instance
|
|
220
|
+
|
|
221
|
+
Raises:
|
|
222
|
+
ValueError: If sandbox type is not registered
|
|
223
|
+
"""
|
|
224
|
+
if sandbox_type not in cls._sandboxes:
|
|
225
|
+
raise ValueError(f'Sandbox type {sandbox_type} is not registered')
|
|
226
|
+
|
|
227
|
+
# Parse config based on sandbox type
|
|
228
|
+
if not config:
|
|
229
|
+
if sandbox_type == SandboxType.DOCKER:
|
|
230
|
+
config = DockerSandboxConfig()
|
|
231
|
+
elif sandbox_type == SandboxType.DOCKER_NOTEBOOK:
|
|
232
|
+
config = DockerNotebookConfig()
|
|
233
|
+
else:
|
|
234
|
+
config = SandboxConfig()
|
|
235
|
+
elif isinstance(config, dict):
|
|
236
|
+
if sandbox_type == SandboxType.DOCKER:
|
|
237
|
+
config = DockerSandboxConfig(**config)
|
|
238
|
+
elif sandbox_type == SandboxType.DOCKER_NOTEBOOK:
|
|
239
|
+
config = DockerNotebookConfig(**config)
|
|
240
|
+
else:
|
|
241
|
+
config = SandboxConfig(**config)
|
|
242
|
+
|
|
243
|
+
sandbox_class = cls._sandboxes[sandbox_type]
|
|
244
|
+
return sandbox_class(config, sandbox_id)
|
|
245
|
+
|
|
246
|
+
@classmethod
|
|
247
|
+
def get_available_types(cls) -> List[SandboxType]:
|
|
248
|
+
"""Get list of available sandbox types.
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
List of available sandbox types
|
|
252
|
+
"""
|
|
253
|
+
return list(cls._sandboxes.keys())
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def register_sandbox(sandbox_type: SandboxType):
|
|
257
|
+
"""Decorator for registering sandboxes.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
sandbox_type: Sandbox type identifier
|
|
261
|
+
"""
|
|
262
|
+
|
|
263
|
+
def decorator(sandbox_class: Type[Sandbox]):
|
|
264
|
+
SandboxFactory.register_sandbox(sandbox_type, sandbox_class)
|
|
265
|
+
return sandbox_class
|
|
266
|
+
|
|
267
|
+
return decorator
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
# flake8: noqa E501
|
|
2
|
+
import asyncio
|
|
3
|
+
import json
|
|
4
|
+
import tempfile
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from textwrap import dedent
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
from docker import DockerClient
|
|
10
|
+
|
|
11
|
+
from ms_enclave.utils import get_logger
|
|
12
|
+
|
|
13
|
+
from ..model import DockerNotebookConfig, SandboxStatus, SandboxType
|
|
14
|
+
from .base import register_sandbox
|
|
15
|
+
from .docker_sandbox import DockerSandbox
|
|
16
|
+
|
|
17
|
+
logger = get_logger()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@register_sandbox(SandboxType.DOCKER_NOTEBOOK)
|
|
21
|
+
class DockerNotebookSandbox(DockerSandbox):
|
|
22
|
+
"""
|
|
23
|
+
Docker sandbox that executes Python code using Jupyter Kernel Gateway.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(
|
|
27
|
+
self,
|
|
28
|
+
config: DockerNotebookConfig,
|
|
29
|
+
sandbox_id: Optional[str] = None,
|
|
30
|
+
):
|
|
31
|
+
"""
|
|
32
|
+
Initialize the Docker-based Jupyter Kernel Gateway executor.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
config: Docker sandbox configuration
|
|
36
|
+
sandbox_id: Optional sandbox ID
|
|
37
|
+
host: Host to bind to.
|
|
38
|
+
port: Port to bind to.
|
|
39
|
+
"""
|
|
40
|
+
super().__init__(config, sandbox_id)
|
|
41
|
+
|
|
42
|
+
self.config: DockerNotebookConfig = config
|
|
43
|
+
self.host = self.config.host
|
|
44
|
+
self.port = self.config.port
|
|
45
|
+
self.kernel_id = None
|
|
46
|
+
self.ws = None
|
|
47
|
+
self.base_url = None
|
|
48
|
+
self.config.ports['8888/tcp'] = (self.host, self.port)
|
|
49
|
+
self.config.network_enabled = True # Ensure network is enabled for Jupyter
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def sandbox_type(self) -> SandboxType:
|
|
53
|
+
"""Return sandbox type."""
|
|
54
|
+
return SandboxType.DOCKER_NOTEBOOK
|
|
55
|
+
|
|
56
|
+
async def start(self) -> None:
|
|
57
|
+
"""Start the Docker container with Jupyter Kernel Gateway."""
|
|
58
|
+
try:
|
|
59
|
+
self.update_status(SandboxStatus.INITIALIZING)
|
|
60
|
+
|
|
61
|
+
# Initialize Docker client first
|
|
62
|
+
import docker
|
|
63
|
+
self.client = docker.from_env()
|
|
64
|
+
|
|
65
|
+
# Build Jupyter image if needed before creating container
|
|
66
|
+
await self._build_jupyter_image()
|
|
67
|
+
|
|
68
|
+
# Now start the base container with the Jupyter image
|
|
69
|
+
await super().start()
|
|
70
|
+
|
|
71
|
+
# Setup Jupyter kernel gateway services
|
|
72
|
+
await self._setup_jupyter()
|
|
73
|
+
|
|
74
|
+
self.update_status(SandboxStatus.RUNNING)
|
|
75
|
+
|
|
76
|
+
except Exception as e:
|
|
77
|
+
self.update_status(SandboxStatus.ERROR)
|
|
78
|
+
self.metadata['error'] = str(e)
|
|
79
|
+
logger.error(f'Failed to start Jupyter Docker sandbox: {e}')
|
|
80
|
+
raise RuntimeError(f'Failed to start Jupyter Docker sandbox: {e}')
|
|
81
|
+
|
|
82
|
+
async def _setup_jupyter(self) -> None:
|
|
83
|
+
"""Setup Jupyter Kernel Gateway services in the container."""
|
|
84
|
+
try:
|
|
85
|
+
# Wait for Jupyter Kernel Gateway to be ready
|
|
86
|
+
await self._wait_for_jupyter_ready()
|
|
87
|
+
|
|
88
|
+
# Create kernel and establish websocket connection
|
|
89
|
+
await self._create_kernel()
|
|
90
|
+
|
|
91
|
+
except Exception as e:
|
|
92
|
+
logger.error(f'Failed to setup Jupyter: {e}')
|
|
93
|
+
raise
|
|
94
|
+
|
|
95
|
+
async def _wait_for_jupyter_ready(self) -> None:
|
|
96
|
+
"""Wait for Jupyter Kernel Gateway to be ready."""
|
|
97
|
+
import requests
|
|
98
|
+
|
|
99
|
+
self.base_url = f'http://{self.host}:{self.port}'
|
|
100
|
+
max_retries = 10 # Wait up to 30 seconds
|
|
101
|
+
retry_interval = 3 # Check every 3 second
|
|
102
|
+
|
|
103
|
+
for attempt in range(max_retries):
|
|
104
|
+
try:
|
|
105
|
+
# Try to get the API status
|
|
106
|
+
response = requests.get(f'{self.base_url}/api', timeout=5)
|
|
107
|
+
if response.status_code == 200:
|
|
108
|
+
logger.info(f'Jupyter Kernel Gateway is ready at {self.base_url}')
|
|
109
|
+
return
|
|
110
|
+
except requests.exceptions.RequestException:
|
|
111
|
+
# Connection failed, Jupyter not ready yet
|
|
112
|
+
pass
|
|
113
|
+
|
|
114
|
+
if attempt < max_retries - 1:
|
|
115
|
+
logger.info(f'Waiting for Jupyter Kernel Gateway to be ready... (attempt {attempt + 1}/{max_retries})')
|
|
116
|
+
await asyncio.sleep(retry_interval)
|
|
117
|
+
|
|
118
|
+
raise RuntimeError(f'Jupyter Kernel Gateway failed to become ready within {max_retries} seconds')
|
|
119
|
+
|
|
120
|
+
async def _build_jupyter_image(self) -> None:
|
|
121
|
+
"""Build or ensure Jupyter image exists."""
|
|
122
|
+
try:
|
|
123
|
+
# Check if image exists
|
|
124
|
+
self.client.images.get(self.config.image)
|
|
125
|
+
logger.info(f'Using existing Docker image: {self.config.image}')
|
|
126
|
+
except Exception:
|
|
127
|
+
logger.info(f'Building Docker image {self.config.image}...')
|
|
128
|
+
|
|
129
|
+
# Create Dockerfile
|
|
130
|
+
dockerfile_content = dedent(
|
|
131
|
+
"""\
|
|
132
|
+
FROM python:3.12-slim
|
|
133
|
+
|
|
134
|
+
RUN pip install jupyter_kernel_gateway jupyter_client ipykernel
|
|
135
|
+
|
|
136
|
+
# Install and register the Python kernel
|
|
137
|
+
RUN python -m ipykernel install --sys-prefix --name python3 --display-name "Python 3"
|
|
138
|
+
|
|
139
|
+
EXPOSE 8888
|
|
140
|
+
CMD ["jupyter", "kernelgateway", "--KernelGatewayApp.ip=0.0.0.0", "--KernelGatewayApp.port=8888", "--KernelGatewayApp.allow_origin=*"]
|
|
141
|
+
"""
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
145
|
+
dockerfile_path = Path(tmpdir) / 'Dockerfile'
|
|
146
|
+
dockerfile_path.write_text(dockerfile_content)
|
|
147
|
+
|
|
148
|
+
# Build image with output
|
|
149
|
+
def build_image():
|
|
150
|
+
build_logs = self.client.images.build(
|
|
151
|
+
path=tmpdir, dockerfile='Dockerfile', tag=self.config.image, rm=True
|
|
152
|
+
)
|
|
153
|
+
# Process and log build output
|
|
154
|
+
for log in build_logs[1]: # build_logs[1] contains the build log generator
|
|
155
|
+
if 'stream' in log:
|
|
156
|
+
logger.info(f"Docker build: {log['stream'].strip()}")
|
|
157
|
+
elif 'error' in log:
|
|
158
|
+
logger.error(f"Docker build error: {log['error']}")
|
|
159
|
+
return build_logs[0] # Return the built image
|
|
160
|
+
|
|
161
|
+
await asyncio.get_event_loop().run_in_executor(None, build_image)
|
|
162
|
+
|
|
163
|
+
async def _create_kernel(self) -> None:
|
|
164
|
+
"""Create a new kernel and establish websocket connection."""
|
|
165
|
+
import requests
|
|
166
|
+
|
|
167
|
+
# Create new kernel via HTTP
|
|
168
|
+
response = requests.post(f'{self.base_url}/api/kernels')
|
|
169
|
+
if response.status_code != 201:
|
|
170
|
+
error_details = {
|
|
171
|
+
'status_code': response.status_code,
|
|
172
|
+
'headers': dict(response.headers),
|
|
173
|
+
'url': response.url,
|
|
174
|
+
'body': response.text,
|
|
175
|
+
'request_method': response.request.method,
|
|
176
|
+
'request_headers': dict(response.request.headers),
|
|
177
|
+
'request_body': response.request.body,
|
|
178
|
+
}
|
|
179
|
+
raise RuntimeError(f'Failed to create kernel: {json.dumps(error_details, indent=2)}')
|
|
180
|
+
|
|
181
|
+
self.kernel_id = response.json()['id']
|
|
182
|
+
|
|
183
|
+
# Establish websocket connection
|
|
184
|
+
try:
|
|
185
|
+
from websocket import create_connection
|
|
186
|
+
ws_url = f'ws://{self.host}:{self.port}/api/kernels/{self.kernel_id}/channels'
|
|
187
|
+
self.ws = create_connection(ws_url)
|
|
188
|
+
logger.info(f'Kernel {self.kernel_id} created and connected')
|
|
189
|
+
except ImportError:
|
|
190
|
+
raise RuntimeError('websocket-client package is required. Install with: pip install websocket-client')
|
|
191
|
+
|
|
192
|
+
async def cleanup(self) -> None:
|
|
193
|
+
"""Clean up Jupyter resources and Docker container."""
|
|
194
|
+
try:
|
|
195
|
+
# Close websocket connection
|
|
196
|
+
if self.ws:
|
|
197
|
+
try:
|
|
198
|
+
self.ws.close()
|
|
199
|
+
except Exception:
|
|
200
|
+
pass
|
|
201
|
+
self.ws = None
|
|
202
|
+
|
|
203
|
+
# Delete kernel
|
|
204
|
+
if self.kernel_id and self.base_url:
|
|
205
|
+
try:
|
|
206
|
+
import requests
|
|
207
|
+
requests.delete(f'{self.base_url}/api/kernels/{self.kernel_id}')
|
|
208
|
+
except Exception:
|
|
209
|
+
pass
|
|
210
|
+
self.kernel_id = None
|
|
211
|
+
|
|
212
|
+
except Exception as e:
|
|
213
|
+
logger.error(f'Error during Jupyter cleanup: {e}')
|
|
214
|
+
|
|
215
|
+
# Call parent cleanup
|
|
216
|
+
await super().cleanup()
|