ms-enclave 0.0.1__py3-none-any.whl → 0.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ms-enclave might be problematic. Click here for more details.
- ms_enclave/sandbox/boxes/base.py +5 -2
- ms_enclave/sandbox/boxes/docker_notebook.py +3 -5
- ms_enclave/sandbox/boxes/docker_sandbox.py +87 -22
- ms_enclave/sandbox/model/config.py +2 -2
- ms_enclave/sandbox/tools/sandbox_tools/file_operation.py +133 -17
- ms_enclave/utils/logger.py +115 -51
- ms_enclave/version.py +2 -2
- {ms_enclave-0.0.1.dist-info → ms_enclave-0.0.2.dist-info}/METADATA +56 -4
- {ms_enclave-0.0.1.dist-info → ms_enclave-0.0.2.dist-info}/RECORD +13 -13
- {ms_enclave-0.0.1.dist-info → ms_enclave-0.0.2.dist-info}/WHEEL +0 -0
- {ms_enclave-0.0.1.dist-info → ms_enclave-0.0.2.dist-info}/entry_points.txt +0 -0
- {ms_enclave-0.0.1.dist-info → ms_enclave-0.0.2.dist-info}/licenses/LICENSE +0 -0
- {ms_enclave-0.0.1.dist-info → ms_enclave-0.0.2.dist-info}/top_level.txt +0 -0
ms_enclave/sandbox/boxes/base.py
CHANGED
|
@@ -33,7 +33,7 @@ class Sandbox(abc.ABC):
|
|
|
33
33
|
config: Sandbox configuration
|
|
34
34
|
sandbox_id: Optional sandbox ID (will be generated if not provided)
|
|
35
35
|
"""
|
|
36
|
-
self.id = sandbox_id or
|
|
36
|
+
self.id = sandbox_id or uuid.ShortUUID(alphabet='23456789abcdefghijkmnopqrstuvwxyz').random(length=8)
|
|
37
37
|
self.config = config
|
|
38
38
|
self.status = SandboxStatus.INITIALIZING
|
|
39
39
|
self.created_at = datetime.now()
|
|
@@ -136,12 +136,15 @@ class Sandbox(abc.ABC):
|
|
|
136
136
|
result = await tool.execute(sandbox_context=self, **parameters)
|
|
137
137
|
return result
|
|
138
138
|
|
|
139
|
-
async def execute_command(
|
|
139
|
+
async def execute_command(
|
|
140
|
+
self, command: Union[str, List[str]], timeout: Optional[int] = None, stream: bool = True
|
|
141
|
+
) -> CommandResult:
|
|
140
142
|
"""Execute a command in the sandbox environment.
|
|
141
143
|
|
|
142
144
|
Args:
|
|
143
145
|
command: Command to execute
|
|
144
146
|
timeout: Optional execution timeout in seconds
|
|
147
|
+
stream: Whether to stream output (if supported)
|
|
145
148
|
"""
|
|
146
149
|
raise NotImplementedError('execute_command must be implemented by subclasses')
|
|
147
150
|
|
|
@@ -6,8 +6,6 @@ from pathlib import Path
|
|
|
6
6
|
from textwrap import dedent
|
|
7
7
|
from typing import Optional
|
|
8
8
|
|
|
9
|
-
from docker import DockerClient
|
|
10
|
-
|
|
11
9
|
from ms_enclave.utils import get_logger
|
|
12
10
|
|
|
13
11
|
from ..model import DockerNotebookConfig, SandboxStatus, SandboxType
|
|
@@ -45,7 +43,7 @@ class DockerNotebookSandbox(DockerSandbox):
|
|
|
45
43
|
self.kernel_id = None
|
|
46
44
|
self.ws = None
|
|
47
45
|
self.base_url = None
|
|
48
|
-
self.config.ports['8888/tcp'] =
|
|
46
|
+
self.config.ports['8888/tcp'] = self.port
|
|
49
47
|
self.config.network_enabled = True # Ensure network is enabled for Jupyter
|
|
50
48
|
|
|
51
49
|
@property
|
|
@@ -153,9 +151,9 @@ class DockerNotebookSandbox(DockerSandbox):
|
|
|
153
151
|
# Process and log build output
|
|
154
152
|
for log in build_logs[1]: # build_logs[1] contains the build log generator
|
|
155
153
|
if 'stream' in log:
|
|
156
|
-
logger.info(f"
|
|
154
|
+
logger.info(f"[📦 {self.id}] {log['stream'].strip()}")
|
|
157
155
|
elif 'error' in log:
|
|
158
|
-
logger.error(f"
|
|
156
|
+
logger.error(f"[📦 {self.id}] {log['error']}")
|
|
159
157
|
return build_logs[0] # Return the built image
|
|
160
158
|
|
|
161
159
|
await asyncio.get_event_loop().run_in_executor(None, build_image)
|
|
@@ -132,40 +132,105 @@ class DockerSandbox(Sandbox):
|
|
|
132
132
|
"""Return the container for tool execution."""
|
|
133
133
|
return self.container
|
|
134
134
|
|
|
135
|
-
|
|
136
|
-
"""Execute
|
|
137
|
-
|
|
135
|
+
def _run_streaming(self, command: Union[str, List[str]]) -> tuple[int, str, str]:
|
|
136
|
+
"""Execute command with streaming logs using low-level API.
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
A tuple of (exit_code, stdout, stderr)
|
|
140
|
+
"""
|
|
141
|
+
if not self.client or not self.container:
|
|
138
142
|
raise RuntimeError('Container is not running')
|
|
139
143
|
|
|
144
|
+
# Use low-level API for precise control over streaming and exit code.
|
|
145
|
+
exec_id = self.client.api.exec_create(
|
|
146
|
+
container=self.container.id,
|
|
147
|
+
cmd=command,
|
|
148
|
+
tty=False,
|
|
149
|
+
)['Id']
|
|
150
|
+
|
|
151
|
+
stdout_parts: List[str] = []
|
|
152
|
+
stderr_parts: List[str] = []
|
|
153
|
+
|
|
140
154
|
try:
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
155
|
+
for chunk in self.client.api.exec_start(exec_id, stream=True, demux=True):
|
|
156
|
+
if not chunk:
|
|
157
|
+
continue
|
|
158
|
+
out, err = chunk # each is Optional[bytes]
|
|
159
|
+
if out:
|
|
160
|
+
text = out.decode('utf-8', errors='replace')
|
|
161
|
+
stdout_parts.append(text)
|
|
162
|
+
for line in text.splitlines():
|
|
163
|
+
logger.info(f'[📦 {self.id}] {line}')
|
|
164
|
+
if err:
|
|
165
|
+
text = err.decode('utf-8', errors='replace')
|
|
166
|
+
stderr_parts.append(text)
|
|
167
|
+
for line in text.splitlines():
|
|
168
|
+
logger.error(f'[📦 {self.id}] {line}')
|
|
169
|
+
finally:
|
|
170
|
+
inspect = self.client.api.exec_inspect(exec_id)
|
|
171
|
+
exit_code = inspect.get('ExitCode')
|
|
172
|
+
if exit_code is None:
|
|
173
|
+
exit_code = -1
|
|
174
|
+
|
|
175
|
+
return exit_code, ''.join(stdout_parts), ''.join(stderr_parts)
|
|
176
|
+
|
|
177
|
+
def _run_buffered(self, command: Union[str, List[str]]) -> tuple[int, str, str]:
|
|
178
|
+
"""Execute command and return buffered output using high-level API.
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
A tuple of (exit_code, stdout, stderr)
|
|
182
|
+
"""
|
|
183
|
+
if not self.container:
|
|
184
|
+
raise RuntimeError('Container is not running')
|
|
151
185
|
|
|
152
|
-
|
|
153
|
-
|
|
186
|
+
res = self.container.exec_run(command, tty=False, stream=False, demux=True)
|
|
187
|
+
out_tuple = res.output
|
|
188
|
+
if isinstance(out_tuple, tuple):
|
|
189
|
+
out_bytes, err_bytes = out_tuple
|
|
190
|
+
else:
|
|
191
|
+
# Fallback: when demux was not honored, treat all as stdout
|
|
192
|
+
out_bytes, err_bytes = out_tuple, b''
|
|
154
193
|
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
194
|
+
stdout = out_bytes.decode('utf-8', errors='replace') if out_bytes else ''
|
|
195
|
+
stderr = err_bytes.decode('utf-8', errors='replace') if err_bytes else ''
|
|
196
|
+
return res.exit_code, stdout, stderr
|
|
197
|
+
|
|
198
|
+
async def execute_command(
|
|
199
|
+
self, command: Union[str, List[str]], timeout: Optional[int] = None, stream: bool = True
|
|
200
|
+
) -> CommandResult:
|
|
201
|
+
"""Execute a command in the container.
|
|
202
|
+
|
|
203
|
+
When stream=True (default), logs are printed in real-time through the logger,
|
|
204
|
+
while stdout/stderr are still accumulated and returned in the result.
|
|
205
|
+
When stream=False, the command is executed and buffered, returning the full output at once.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
command: Command to run (str or list)
|
|
209
|
+
timeout: Optional timeout in seconds
|
|
210
|
+
stream: Whether to stream logs in real time
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
CommandResult with status, exit_code, stdout and stderr
|
|
214
|
+
"""
|
|
215
|
+
if not self.container or not self.client:
|
|
216
|
+
raise RuntimeError('Container is not running')
|
|
217
|
+
|
|
218
|
+
loop = asyncio.get_running_loop()
|
|
219
|
+
|
|
220
|
+
run_func = self._run_streaming if stream else self._run_buffered
|
|
221
|
+
try:
|
|
222
|
+
exit_code, stdout, stderr = await asyncio.wait_for(
|
|
223
|
+
loop.run_in_executor(None, lambda: run_func(command)), timeout=timeout
|
|
161
224
|
)
|
|
225
|
+
status = ExecutionStatus.SUCCESS if exit_code == 0 else ExecutionStatus.ERROR
|
|
226
|
+
return CommandResult(command=command, status=status, exit_code=exit_code, stdout=stdout, stderr=stderr)
|
|
162
227
|
except asyncio.TimeoutError:
|
|
163
228
|
return CommandResult(
|
|
164
229
|
command=command,
|
|
165
230
|
status=ExecutionStatus.TIMEOUT,
|
|
166
231
|
exit_code=-1,
|
|
167
232
|
stdout='',
|
|
168
|
-
stderr=f'Command timed out after {
|
|
233
|
+
stderr=f'Command timed out after {timeout} seconds'
|
|
169
234
|
)
|
|
170
235
|
except Exception as e:
|
|
171
236
|
return CommandResult(command=command, status=ExecutionStatus.ERROR, exit_code=-1, stdout='', stderr=str(e))
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"""Configuration data models."""
|
|
2
2
|
|
|
3
|
-
from typing import Any, Dict, List, Optional, Union
|
|
3
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
4
4
|
|
|
5
5
|
from pydantic import BaseModel, Field, field_validator
|
|
6
6
|
|
|
@@ -25,7 +25,7 @@ class DockerSandboxConfig(SandboxConfig):
|
|
|
25
25
|
default_factory=dict,
|
|
26
26
|
description="Volume mounts. Format: { host_path: {'bind': container_path, 'mode': 'rw|ro'} }"
|
|
27
27
|
)
|
|
28
|
-
ports: Dict[str, str] = Field(default_factory=dict, description='Port mappings')
|
|
28
|
+
ports: Dict[str, Union[int, str, Tuple[str, int]]] = Field(default_factory=dict, description='Port mappings')
|
|
29
29
|
network: Optional[str] = Field('bridge', description='Network name')
|
|
30
30
|
memory_limit: str = Field(default='1g', description='Memory limit')
|
|
31
31
|
cpu_limit: float = Field(default=1.0, description='CPU limit')
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
import io
|
|
4
4
|
import os
|
|
5
5
|
import tarfile
|
|
6
|
+
import uuid
|
|
6
7
|
from typing import TYPE_CHECKING, Literal, Optional
|
|
7
8
|
|
|
8
9
|
from ms_enclave.sandbox.model import ExecutionStatus, SandboxType, ToolResult
|
|
@@ -16,6 +17,12 @@ if TYPE_CHECKING:
|
|
|
16
17
|
|
|
17
18
|
@register_tool('file_operation')
|
|
18
19
|
class FileOperation(SandboxTool):
|
|
20
|
+
"""Tool for performing file operations within Docker containers.
|
|
21
|
+
|
|
22
|
+
Supports read, write, create, delete, list, and exists operations.
|
|
23
|
+
Uses temporary files and copy strategy to avoid permission issues
|
|
24
|
+
with mounted directories.
|
|
25
|
+
"""
|
|
19
26
|
|
|
20
27
|
_name = 'file_operation'
|
|
21
28
|
_sandbox_type = SandboxType.DOCKER
|
|
@@ -53,17 +60,31 @@ class FileOperation(SandboxTool):
|
|
|
53
60
|
content: Optional[str] = None,
|
|
54
61
|
encoding: str = 'utf-8'
|
|
55
62
|
) -> ToolResult:
|
|
56
|
-
"""Perform file operations in the Docker container.
|
|
63
|
+
"""Perform file operations in the Docker container.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
sandbox_context: The sandbox instance to execute operations in
|
|
67
|
+
operation: Type of operation (read, write, create, delete, list, exists)
|
|
68
|
+
file_path: Path to the file or directory
|
|
69
|
+
content: Content to write (required for write/create operations)
|
|
70
|
+
encoding: File encoding for read/write operations
|
|
57
71
|
|
|
72
|
+
Returns:
|
|
73
|
+
ToolResult with operation status and output/error information
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
# Validate file path is provided and not empty
|
|
58
77
|
if not file_path.strip():
|
|
59
78
|
return ToolResult(
|
|
60
79
|
tool_name=self.name, status=ExecutionStatus.ERROR, output='', error='No file path provided'
|
|
61
80
|
)
|
|
62
81
|
|
|
63
82
|
try:
|
|
83
|
+
# Route to appropriate operation handler
|
|
64
84
|
if operation == 'read':
|
|
65
85
|
return await self._read_file(sandbox_context, file_path, encoding)
|
|
66
86
|
elif operation == 'write':
|
|
87
|
+
# Write operation requires content parameter
|
|
67
88
|
if content is None:
|
|
68
89
|
return ToolResult(
|
|
69
90
|
tool_name=self.name,
|
|
@@ -79,6 +100,7 @@ class FileOperation(SandboxTool):
|
|
|
79
100
|
elif operation == 'exists':
|
|
80
101
|
return await self._check_exists(sandbox_context, file_path)
|
|
81
102
|
elif operation == 'create':
|
|
103
|
+
# Create operation with empty content if none provided
|
|
82
104
|
if content is None:
|
|
83
105
|
content = ''
|
|
84
106
|
return await self._write_file(sandbox_context, file_path, content, encoding)
|
|
@@ -91,14 +113,24 @@ class FileOperation(SandboxTool):
|
|
|
91
113
|
)
|
|
92
114
|
|
|
93
115
|
except Exception as e:
|
|
116
|
+
# Catch-all error handler for unexpected exceptions
|
|
94
117
|
return ToolResult(
|
|
95
118
|
tool_name=self.name, status=ExecutionStatus.ERROR, output='', error=f'Operation failed: {str(e)}'
|
|
96
119
|
)
|
|
97
120
|
|
|
98
121
|
async def _read_file(self, sandbox_context: 'Sandbox', file_path: str, encoding: str) -> ToolResult:
|
|
99
|
-
"""Read file content from the container.
|
|
122
|
+
"""Read file content from the container using cat command.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
sandbox_context: Sandbox instance
|
|
126
|
+
file_path: Path to file to read
|
|
127
|
+
encoding: File encoding (currently not used by cat command)
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
ToolResult with file content or error message
|
|
131
|
+
"""
|
|
100
132
|
try:
|
|
101
|
-
# Use cat command to read file content
|
|
133
|
+
# Use cat command to read file content - handles most file types well
|
|
102
134
|
result = await sandbox_context.execute_command(f'cat "{file_path}"')
|
|
103
135
|
|
|
104
136
|
if result.exit_code == 0:
|
|
@@ -116,15 +148,49 @@ class FileOperation(SandboxTool):
|
|
|
116
148
|
)
|
|
117
149
|
|
|
118
150
|
async def _write_file(self, sandbox_context: 'Sandbox', file_path: str, content: str, encoding: str) -> ToolResult:
|
|
119
|
-
"""Write content to a file in the container.
|
|
151
|
+
"""Write content to a file in the container using temp file strategy.
|
|
152
|
+
|
|
153
|
+
This method uses a two-step process to avoid permission issues:
|
|
154
|
+
1. Write content to a temporary file in /tmp (always writable)
|
|
155
|
+
2. Copy the temp file to the target location using cp command
|
|
156
|
+
3. Clean up the temporary file
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
sandbox_context: Sandbox instance
|
|
160
|
+
file_path: Target file path to write to
|
|
161
|
+
content: Content to write to file
|
|
162
|
+
encoding: File encoding for content
|
|
163
|
+
|
|
164
|
+
Returns:
|
|
165
|
+
ToolResult indicating success or failure
|
|
166
|
+
"""
|
|
120
167
|
try:
|
|
121
|
-
# Create directory if it doesn't exist
|
|
168
|
+
# Create target directory structure if it doesn't exist
|
|
122
169
|
dir_path = os.path.dirname(file_path)
|
|
123
170
|
if dir_path:
|
|
124
171
|
await sandbox_context.execute_command(f'mkdir -p "{dir_path}"')
|
|
125
172
|
|
|
126
|
-
#
|
|
127
|
-
|
|
173
|
+
# Generate unique temporary file name to avoid conflicts
|
|
174
|
+
temp_file = f'/tmp/file_op_{uuid.uuid4().hex}'
|
|
175
|
+
|
|
176
|
+
# Step 1: Write content to temporary location using tar archive
|
|
177
|
+
await self._write_file_to_container(sandbox_context, temp_file, content, encoding)
|
|
178
|
+
|
|
179
|
+
# Step 2: Copy from temp to target location using cp command
|
|
180
|
+
# This handles permission issues better than direct tar extraction
|
|
181
|
+
copy_result = await sandbox_context.execute_command(f'cp "{temp_file}" "{file_path}"')
|
|
182
|
+
|
|
183
|
+
# Step 3: Clean up temporary file regardless of copy result
|
|
184
|
+
await sandbox_context.execute_command(f'rm -f "{temp_file}"')
|
|
185
|
+
|
|
186
|
+
# Check if copy operation succeeded
|
|
187
|
+
if copy_result.exit_code != 0:
|
|
188
|
+
return ToolResult(
|
|
189
|
+
tool_name=self.name,
|
|
190
|
+
status=ExecutionStatus.ERROR,
|
|
191
|
+
output='',
|
|
192
|
+
error=f'Failed to copy file to target location: {copy_result.stderr or "Unknown error"}'
|
|
193
|
+
)
|
|
128
194
|
|
|
129
195
|
return ToolResult(
|
|
130
196
|
tool_name=self.name,
|
|
@@ -138,8 +204,19 @@ class FileOperation(SandboxTool):
|
|
|
138
204
|
)
|
|
139
205
|
|
|
140
206
|
async def _delete_file(self, sandbox_context: 'Sandbox', file_path: str) -> ToolResult:
|
|
141
|
-
"""Delete a file or directory from the container.
|
|
207
|
+
"""Delete a file or directory from the container.
|
|
208
|
+
|
|
209
|
+
Uses 'rm -rf' to handle both files and directories recursively.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
sandbox_context: Sandbox instance
|
|
213
|
+
file_path: Path to file or directory to delete
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
ToolResult indicating success or failure
|
|
217
|
+
"""
|
|
142
218
|
try:
|
|
219
|
+
# Use rm -rf to handle both files and directories
|
|
143
220
|
result = await sandbox_context.execute_command(f'rm -rf "{file_path}"')
|
|
144
221
|
|
|
145
222
|
if result.exit_code == 0:
|
|
@@ -162,8 +239,19 @@ class FileOperation(SandboxTool):
|
|
|
162
239
|
)
|
|
163
240
|
|
|
164
241
|
async def _list_directory(self, sandbox_context: 'Sandbox', dir_path: str) -> ToolResult:
|
|
165
|
-
"""List contents of a directory.
|
|
242
|
+
"""List contents of a directory with detailed information.
|
|
243
|
+
|
|
244
|
+
Uses 'ls -la' to show permissions, ownership, size, and timestamps.
|
|
245
|
+
|
|
246
|
+
Args:
|
|
247
|
+
sandbox_context: Sandbox instance
|
|
248
|
+
dir_path: Path to directory to list
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
ToolResult with directory listing or error message
|
|
252
|
+
"""
|
|
166
253
|
try:
|
|
254
|
+
# Use ls -la for detailed directory listing
|
|
167
255
|
result = await sandbox_context.execute_command(f'ls -la "{dir_path}"')
|
|
168
256
|
|
|
169
257
|
if result.exit_code == 0:
|
|
@@ -181,10 +269,22 @@ class FileOperation(SandboxTool):
|
|
|
181
269
|
)
|
|
182
270
|
|
|
183
271
|
async def _check_exists(self, sandbox_context: 'Sandbox', file_path: str) -> ToolResult:
|
|
184
|
-
"""Check if a file or directory exists.
|
|
272
|
+
"""Check if a file or directory exists.
|
|
273
|
+
|
|
274
|
+
Uses 'test -e' command which returns exit code 0 if path exists.
|
|
275
|
+
|
|
276
|
+
Args:
|
|
277
|
+
sandbox_context: Sandbox instance
|
|
278
|
+
file_path: Path to check for existence
|
|
279
|
+
|
|
280
|
+
Returns:
|
|
281
|
+
ToolResult with existence status message
|
|
282
|
+
"""
|
|
185
283
|
try:
|
|
284
|
+
# Use test -e to check existence (works for files and directories)
|
|
186
285
|
result = await sandbox_context.execute_command(f'test -e "{file_path}"')
|
|
187
286
|
|
|
287
|
+
# test command returns 0 if path exists, non-zero otherwise
|
|
188
288
|
exists = result.exit_code == 0
|
|
189
289
|
return ToolResult(
|
|
190
290
|
tool_name=self.name,
|
|
@@ -197,19 +297,35 @@ class FileOperation(SandboxTool):
|
|
|
197
297
|
tool_name=self.name, status=ExecutionStatus.ERROR, output='', error=f'Exists check failed: {str(e)}'
|
|
198
298
|
)
|
|
199
299
|
|
|
200
|
-
async def _write_file_to_container(
|
|
201
|
-
|
|
202
|
-
|
|
300
|
+
async def _write_file_to_container(
|
|
301
|
+
self, sandbox_context: 'Sandbox', file_path: str, content: str, encoding: str
|
|
302
|
+
) -> None:
|
|
303
|
+
"""Write content to a file in the container using tar archive method.
|
|
304
|
+
|
|
305
|
+
This is a low-level method that creates a tar archive containing the file
|
|
306
|
+
and extracts it to the container. Used internally by _write_file.
|
|
307
|
+
|
|
308
|
+
Args:
|
|
309
|
+
sandbox_context: Sandbox instance with container access
|
|
310
|
+
file_path: Target file path in container
|
|
311
|
+
content: File content as string
|
|
312
|
+
encoding: Text encoding for content conversion
|
|
313
|
+
|
|
314
|
+
Raises:
|
|
315
|
+
Exception: If tar creation or container extraction fails
|
|
316
|
+
"""
|
|
317
|
+
# Create a tar archive in memory to transfer file content
|
|
203
318
|
tar_stream = io.BytesIO()
|
|
204
319
|
tar = tarfile.TarFile(fileobj=tar_stream, mode='w')
|
|
205
320
|
|
|
206
|
-
#
|
|
207
|
-
file_data = content.encode(
|
|
321
|
+
# Encode content using specified encoding and create tar entry
|
|
322
|
+
file_data = content.encode(encoding)
|
|
208
323
|
tarinfo = tarfile.TarInfo(name=os.path.basename(file_path))
|
|
209
324
|
tarinfo.size = len(file_data)
|
|
210
325
|
tar.addfile(tarinfo, io.BytesIO(file_data))
|
|
211
326
|
tar.close()
|
|
212
327
|
|
|
213
|
-
#
|
|
328
|
+
# Extract tar archive to container filesystem
|
|
329
|
+
# Note: This writes to the directory containing the target file
|
|
214
330
|
tar_stream.seek(0)
|
|
215
|
-
sandbox_context.container.put_archive(os.path.dirname(file_path), tar_stream.getvalue())
|
|
331
|
+
sandbox_context.container.put_archive(os.path.dirname(file_path) or '/', tar_stream.getvalue())
|
ms_enclave/utils/logger.py
CHANGED
|
@@ -2,76 +2,127 @@
|
|
|
2
2
|
import importlib.util
|
|
3
3
|
import logging
|
|
4
4
|
import os
|
|
5
|
-
|
|
5
|
+
import sys
|
|
6
|
+
import threading
|
|
6
7
|
from types import MethodType
|
|
7
8
|
from typing import Optional
|
|
8
9
|
|
|
9
10
|
init_loggers = {}
|
|
10
11
|
|
|
12
|
+
# ANSI color helpers for levelname coloring in TTY streams
|
|
13
|
+
RESET = '\033[0m'
|
|
14
|
+
LEVEL_COLORS = {
|
|
15
|
+
'DEBUG': '\033[34m', # Blue
|
|
16
|
+
'INFO': '\033[32m', # Green
|
|
17
|
+
'WARNING': '\033[33m', # Yellow
|
|
18
|
+
'ERROR': '\033[31m', # Red
|
|
19
|
+
'CRITICAL': '\033[35m', # Magenta
|
|
20
|
+
}
|
|
21
|
+
|
|
11
22
|
logger_format = logging.Formatter('[%(levelname)s:%(name)s] %(message)s')
|
|
12
23
|
|
|
13
24
|
info_set = set()
|
|
14
25
|
warning_set = set()
|
|
26
|
+
_once_lock = threading.Lock()
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ColorFormatter(logging.Formatter):
|
|
30
|
+
"""Formatter that colors only the levelname for TTY streams."""
|
|
31
|
+
|
|
32
|
+
def __init__(self, fmt: str, datefmt: Optional[str] = None, style: str = '%', use_color: bool = True) -> None:
|
|
33
|
+
super().__init__(fmt=fmt, datefmt=datefmt, style=style)
|
|
34
|
+
self.use_color = use_color
|
|
35
|
+
|
|
36
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
37
|
+
original_levelname = record.levelname
|
|
38
|
+
try:
|
|
39
|
+
if self.use_color:
|
|
40
|
+
color = LEVEL_COLORS.get(record.levelname, '')
|
|
41
|
+
if color:
|
|
42
|
+
record.levelname = f'{color}{record.levelname}{RESET}'
|
|
43
|
+
return super().format(record)
|
|
44
|
+
finally:
|
|
45
|
+
record.levelname = original_levelname
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _should_use_color(stream) -> bool:
|
|
49
|
+
"""Decide if we should use colors for a given stream based on TTY and env."""
|
|
50
|
+
# Respect NO_COLOR to disable, FORCE_COLOR or LOG_COLOR=1 to force enable
|
|
51
|
+
if os.getenv('NO_COLOR'):
|
|
52
|
+
return False
|
|
53
|
+
if os.getenv('FORCE_COLOR') or os.getenv('LOG_COLOR') == '1':
|
|
54
|
+
return True
|
|
55
|
+
try:
|
|
56
|
+
return hasattr(stream, 'isatty') and stream.isatty()
|
|
57
|
+
except Exception:
|
|
58
|
+
return False
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def info_once(self: logging.Logger, msg: str, *args, **kwargs) -> None:
|
|
62
|
+
hash_id = kwargs.pop('hash_id', msg)
|
|
63
|
+
with _once_lock:
|
|
64
|
+
if hash_id in info_set:
|
|
65
|
+
return
|
|
66
|
+
info_set.add(hash_id)
|
|
67
|
+
self.info(msg, *args, **kwargs)
|
|
15
68
|
|
|
16
69
|
|
|
17
|
-
def
|
|
18
|
-
hash_id = kwargs.
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
70
|
+
def warning_once(self: logging.Logger, msg: str, *args, **kwargs) -> None:
|
|
71
|
+
hash_id = kwargs.pop('hash_id', msg)
|
|
72
|
+
with _once_lock:
|
|
73
|
+
if hash_id in warning_set:
|
|
74
|
+
return
|
|
75
|
+
warning_set.add(hash_id)
|
|
76
|
+
self.warning(msg, *args, **kwargs)
|
|
23
77
|
|
|
24
78
|
|
|
25
|
-
def
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
warning_set.add(hash_id)
|
|
30
|
-
self.warning(msg)
|
|
79
|
+
def _update_handler_levels(logger: logging.Logger, log_level: int) -> None:
|
|
80
|
+
"""Set all handler levels to the given log level."""
|
|
81
|
+
for handler in logger.handlers:
|
|
82
|
+
handler.setLevel(log_level)
|
|
31
83
|
|
|
32
84
|
|
|
33
85
|
def get_logger(log_file: Optional[str] = None, log_level: Optional[int] = None, file_mode: str = 'w'):
|
|
34
|
-
"""
|
|
86
|
+
"""Get project logger configured with colored console output and optional file output.
|
|
35
87
|
|
|
36
88
|
Args:
|
|
37
|
-
log_file: Log filename
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
file_mode: Specifies the mode to open the file, if filename is
|
|
41
|
-
specified (if filemode is unspecified, it defaults to 'w').
|
|
89
|
+
log_file: Log filename. If specified, a FileHandler will be added to the logger.
|
|
90
|
+
log_level: Logging level. If None, resolve from env LOG_LEVEL (default INFO).
|
|
91
|
+
file_mode: Mode to open the log file if log_file is provided (default 'w').
|
|
42
92
|
"""
|
|
43
93
|
if log_level is None:
|
|
44
|
-
|
|
45
|
-
log_level = getattr(logging,
|
|
94
|
+
env_level = os.getenv('LOG_LEVEL', 'INFO').upper()
|
|
95
|
+
log_level = getattr(logging, env_level, logging.INFO)
|
|
96
|
+
|
|
46
97
|
logger_name = __name__.split('.')[0]
|
|
47
98
|
logger = logging.getLogger(logger_name)
|
|
48
99
|
logger.propagate = False
|
|
100
|
+
|
|
101
|
+
# If logger is already initialized, just ensure file handler and update handler levels.
|
|
49
102
|
if logger_name in init_loggers:
|
|
50
103
|
add_file_handler_if_needed(logger, log_file, file_mode, log_level)
|
|
104
|
+
_update_handler_levels(logger, log_level)
|
|
51
105
|
return logger
|
|
52
106
|
|
|
53
|
-
#
|
|
54
|
-
# Starting in 1.8.0, PyTorch DDP attaches a StreamHandler <stderr> (NOTSET)
|
|
55
|
-
# to the root logger. As logger.propagate is True by default, this root
|
|
56
|
-
# level handler causes logging messages from rank>0 processes to
|
|
57
|
-
# unexpectedly show up on the console, creating much unwanted clutter.
|
|
58
|
-
# To fix this issue, we set the root logger's StreamHandler, if any, to log
|
|
59
|
-
# at the ERROR level.
|
|
107
|
+
# Handle duplicate logs to the console (PyTorch DDP root StreamHandler quirk)
|
|
60
108
|
for handler in logger.root.handlers:
|
|
61
|
-
if
|
|
109
|
+
if isinstance(handler, logging.StreamHandler):
|
|
62
110
|
handler.setLevel(logging.ERROR)
|
|
63
111
|
|
|
64
|
-
|
|
65
|
-
|
|
112
|
+
# Console handler with colorized levelname when appropriate
|
|
113
|
+
stream_handler = logging.StreamHandler(stream=sys.stderr)
|
|
114
|
+
use_color = _should_use_color(getattr(stream_handler, 'stream', sys.stderr))
|
|
115
|
+
color_fmt = ColorFormatter('[%(levelname)s:%(name)s] %(message)s', use_color=use_color)
|
|
116
|
+
stream_handler.setFormatter(color_fmt)
|
|
117
|
+
stream_handler.setLevel(log_level)
|
|
118
|
+
logger.addHandler(stream_handler)
|
|
66
119
|
|
|
120
|
+
# Optional file handler (no color)
|
|
67
121
|
if log_file is not None:
|
|
68
122
|
file_handler = logging.FileHandler(log_file, file_mode)
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
handler.setFormatter(logger_format)
|
|
73
|
-
handler.setLevel(log_level)
|
|
74
|
-
logger.addHandler(handler)
|
|
123
|
+
file_handler.setFormatter(logger_format)
|
|
124
|
+
file_handler.setLevel(log_level)
|
|
125
|
+
logger.addHandler(file_handler)
|
|
75
126
|
|
|
76
127
|
logger.setLevel(log_level)
|
|
77
128
|
init_loggers[logger_name] = True
|
|
@@ -81,26 +132,39 @@ def get_logger(log_file: Optional[str] = None, log_level: Optional[int] = None,
|
|
|
81
132
|
|
|
82
133
|
|
|
83
134
|
logger = get_logger()
|
|
84
|
-
# ms_logger = get_ms_logger()
|
|
85
135
|
|
|
86
|
-
logger.handlers[0].setFormatter(logger_format)
|
|
87
|
-
# ms_logger.handlers[0].setFormatter(logger_format)
|
|
88
|
-
log_level = os.getenv('LOG_LEVEL', 'INFO').upper()
|
|
89
|
-
# ms_logger.setLevel(log_level)
|
|
90
136
|
|
|
137
|
+
def add_file_handler_if_needed(logger: logging.Logger, log_file: Optional[str], file_mode: str, log_level: int) -> None:
|
|
138
|
+
"""Attach a FileHandler for the given log_file if not already present.
|
|
91
139
|
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
140
|
+
Ensures:
|
|
141
|
+
- Only one FileHandler per log file path.
|
|
142
|
+
- FileHandler uses the standard, uncolored formatter.
|
|
143
|
+
- FileHandler level matches the requested log_level.
|
|
144
|
+
"""
|
|
145
|
+
if log_file is None:
|
|
146
|
+
return
|
|
96
147
|
|
|
148
|
+
# Only worker 0 writes logs when torch DDP is present
|
|
97
149
|
if importlib.util.find_spec('torch') is not None:
|
|
98
150
|
is_worker0 = int(os.getenv('LOCAL_RANK', -1)) in {-1, 0}
|
|
99
151
|
else:
|
|
100
152
|
is_worker0 = True
|
|
101
153
|
|
|
102
|
-
if
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
154
|
+
if not is_worker0:
|
|
155
|
+
return
|
|
156
|
+
|
|
157
|
+
abs_path = os.path.abspath(log_file)
|
|
158
|
+
for handler in logger.handlers:
|
|
159
|
+
if isinstance(handler, logging.FileHandler):
|
|
160
|
+
# If a handler is already logging to the same file, just update it
|
|
161
|
+
if getattr(handler, 'baseFilename', None) == abs_path:
|
|
162
|
+
handler.setFormatter(logger_format)
|
|
163
|
+
handler.setLevel(log_level)
|
|
164
|
+
return
|
|
165
|
+
|
|
166
|
+
# Add a new file handler for this log file
|
|
167
|
+
file_handler = logging.FileHandler(abs_path, file_mode)
|
|
168
|
+
file_handler.setFormatter(logger_format)
|
|
169
|
+
file_handler.setLevel(log_level)
|
|
170
|
+
logger.addHandler(file_handler)
|
ms_enclave/version.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
__version__ = '0.0.
|
|
2
|
-
__release_date__ = '2025-10-
|
|
1
|
+
__version__ = '0.0.2'
|
|
2
|
+
__release_date__ = '2025-10-30 12:00:00'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ms-enclave
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.2
|
|
4
4
|
Summary: Modularized and Stable Sandbox runtime environment.
|
|
5
5
|
Author: ModelScope team
|
|
6
6
|
Author-email: contact@modelscope.cn
|
|
@@ -29,13 +29,26 @@ Requires-Dist: docker>=7.1.0; extra == "docker"
|
|
|
29
29
|
Requires-Dist: websocket-client; extra == "docker"
|
|
30
30
|
Dynamic: license-file
|
|
31
31
|
|
|
32
|
-
|
|
32
|
+
<p align="center">
|
|
33
|
+
<br>
|
|
34
|
+
<img src="doc/asset/image/logo.png"/>
|
|
35
|
+
<br>
|
|
36
|
+
<p>
|
|
33
37
|
|
|
34
|
-
|
|
38
|
+
<p align="center">
|
|
39
|
+
<a href="README_zh.md">中文</a>   |   English  
|
|
40
|
+
</p>
|
|
41
|
+
|
|
42
|
+
<p align="center">
|
|
43
|
+
<img src="https://img.shields.io/badge/python-%E2%89%A53.10-5be.svg">
|
|
44
|
+
<a href="https://badge.fury.io/py/ms-enclave"><img src="https://badge.fury.io/py/ms-enclave.svg" alt="PyPI version" height="18"></a>
|
|
45
|
+
<a href="https://pypi.org/project/ms-enclave"><img alt="PyPI - Downloads" src="https://static.pepy.tech/badge/ms-enclave"></a>
|
|
46
|
+
<a href="https://github.com/modelscope/ms-enclave/pulls"><img src="https://img.shields.io/badge/PR-welcome-55EB99.svg"></a>
|
|
47
|
+
<p>
|
|
35
48
|
|
|
36
49
|
## Overview
|
|
37
50
|
|
|
38
|
-
ms-enclave is a modular and stable sandbox runtime environment that provides a secure isolated execution environment for applications. It achieves strong isolation through Docker containers, with accompanying local/HTTP managers and an extensible tool system, enabling you to safely and efficiently execute code in a controlled environment.
|
|
51
|
+
ms-enclave is a modular and stable agent sandbox runtime environment that provides a secure isolated execution environment for applications. It achieves strong isolation through Docker containers, with accompanying local/HTTP managers and an extensible tool system, enabling you to safely and efficiently execute code in a controlled environment.
|
|
39
52
|
|
|
40
53
|
- 🔒 Secure Isolation: Full isolation and resource limitation based on Docker
|
|
41
54
|
- 🧩 Modular: Extensible sandbox and tools (registration factory)
|
|
@@ -246,6 +259,45 @@ DockerNotebookConfig(tools_config={'notebook_executor': {}})
|
|
|
246
259
|
- `network_enabled`: Enable network (Notebook sandbox requires True)
|
|
247
260
|
- `remove_on_exit`: Automatically remove container on exit (default True)
|
|
248
261
|
|
|
262
|
+
**Example of Installing Additional Dependencies in Sandbox**
|
|
263
|
+
|
|
264
|
+
```python
|
|
265
|
+
async with SandboxFactory.create_sandbox(SandboxType.DOCKER, config) as sandbox:
|
|
266
|
+
# 1) Write a file
|
|
267
|
+
requirements_file = '/sandbox/requirements.txt'
|
|
268
|
+
await sandbox.execute_tool('file_operation', {
|
|
269
|
+
'operation': 'write', 'file_path': f'{requirements_file}', 'content': 'numpy\npandas\nmodelscope\n'
|
|
270
|
+
})
|
|
271
|
+
# 2) Execute Python code
|
|
272
|
+
result = await sandbox.execute_tool('python_executor', {
|
|
273
|
+
'code': f"print('Hello from sandbox!')\nprint(open(f'{requirements_file}').read())"
|
|
274
|
+
})
|
|
275
|
+
print(result.output)
|
|
276
|
+
|
|
277
|
+
# 3) Execute CLI
|
|
278
|
+
result_cli = await sandbox.execute_command(f'pip install -r {requirements_file}')
|
|
279
|
+
print(result_cli.stdout, flush=True)
|
|
280
|
+
```
|
|
281
|
+
|
|
282
|
+
**Example of Reading and Writing Host Files in Sandbox**
|
|
283
|
+
|
|
284
|
+
```python
|
|
285
|
+
async with LocalSandboxManager() as manager:
|
|
286
|
+
# Create sandbox
|
|
287
|
+
config = DockerSandboxConfig(
|
|
288
|
+
# image='python-sandbox',
|
|
289
|
+
image='python:3.11-slim',
|
|
290
|
+
tools_config={'python_executor': {}, 'file_operation': {}},
|
|
291
|
+
volumes={'~/Code/ms-enclave/output': {'bind': '/sandbox/data', 'mode': 'rw'}}
|
|
292
|
+
)
|
|
293
|
+
sandbox_id = await manager.create_sandbox(SandboxType.DOCKER, config)
|
|
294
|
+
|
|
295
|
+
# Write file
|
|
296
|
+
result = await manager.execute_tool(
|
|
297
|
+
sandbox_id, 'file_operation', {'operation': 'write', 'file_path': '/sandbox/data/hello.txt', 'content': 'Hello, Sandbox!'}
|
|
298
|
+
)
|
|
299
|
+
print(result.model_dump())
|
|
300
|
+
```
|
|
249
301
|
---
|
|
250
302
|
|
|
251
303
|
## Error Handling and Debugging
|
|
@@ -1,21 +1,21 @@
|
|
|
1
1
|
ms_enclave/__init__.py,sha256=IKXP5d9APyqXs14IU1mBKSr8tGxAwxeCtCnAWTOGhAU,98
|
|
2
|
-
ms_enclave/version.py,sha256=
|
|
2
|
+
ms_enclave/version.py,sha256=WqqLptwhThO-50eN9GEdqDPEtMsOV0svfTEXxTezYvE,63
|
|
3
3
|
ms_enclave/cli/__init__.py,sha256=I_ANdxdcIHpkIzIXc1yKOlWwzb4oY0FwTPq1kYtgzQw,50
|
|
4
4
|
ms_enclave/cli/base.py,sha256=m1DFlF16L0Lyrn0YNuFj8ByGjVJIoI0jKzAoodIXjRk,404
|
|
5
5
|
ms_enclave/cli/cli.py,sha256=AoSPw65_7OBMT8qgv5vPz1S3Fo91Y6yluaDGGHbUDj0,693
|
|
6
6
|
ms_enclave/cli/start_server.py,sha256=FPmZ97MhWgsyXkl0y32D4XjhAak5Ogrt0Am3izRdi74,2858
|
|
7
7
|
ms_enclave/sandbox/__init__.py,sha256=OPU_W5fZE98IQ8_pAaYkN66R8TZw435Co8uw9oAyZo4,783
|
|
8
8
|
ms_enclave/sandbox/boxes/__init__.py,sha256=it3KgV8cAU4a1TQbiRz-kg7GgI5xA0nqhEAwEgUnpnQ,356
|
|
9
|
-
ms_enclave/sandbox/boxes/base.py,sha256=
|
|
10
|
-
ms_enclave/sandbox/boxes/docker_notebook.py,sha256
|
|
11
|
-
ms_enclave/sandbox/boxes/docker_sandbox.py,sha256=
|
|
9
|
+
ms_enclave/sandbox/boxes/base.py,sha256=lLjktdh4YQABr7X-lTLne2VN2Uv6Rcc5TN5LYx_NLxc,8532
|
|
10
|
+
ms_enclave/sandbox/boxes/docker_notebook.py,sha256=-6O54MRNpiGAaQTiI9_1LCl0zzvB2se2_M1Bi8FuZ3s,7989
|
|
11
|
+
ms_enclave/sandbox/boxes/docker_sandbox.py,sha256=ouFPtJyoSeZrQKD61Qyhq13QSuZl1Ut2gD1KgBbKPUs,11755
|
|
12
12
|
ms_enclave/sandbox/manager/__init__.py,sha256=juYJsmpRoCThcILPemx6VGU-brpNF4cbu1FPV-d-tNc,255
|
|
13
13
|
ms_enclave/sandbox/manager/base.py,sha256=nXznN8Yysgkgk8i-yfL29gAh2gBi3sdxPlQciTXnN9g,3848
|
|
14
14
|
ms_enclave/sandbox/manager/http_manager.py,sha256=heVehKDi_VMJ5g9BllzmK5o68c2WIagsl8e231SKWtc,15731
|
|
15
15
|
ms_enclave/sandbox/manager/local_manager.py,sha256=lv7vF9sNOThOPnCz9RzNTcD0pZHoNTVldVanfF7c5Io,9402
|
|
16
16
|
ms_enclave/sandbox/model/__init__.py,sha256=3Sbj5Id77MWOZ1IXHGbu2q5U_p5KMLKvwZEr1QwjUqg,550
|
|
17
17
|
ms_enclave/sandbox/model/base.py,sha256=1X5PaCUC5B5GXVCTzQMybqF7t0QEhWUkYFC-DyYXdhY,709
|
|
18
|
-
ms_enclave/sandbox/model/config.py,sha256=
|
|
18
|
+
ms_enclave/sandbox/model/config.py,sha256=afiSu5QcaZsd36lSoWMDmzz6o6kxTGebHzAqgLQOG_w,4616
|
|
19
19
|
ms_enclave/sandbox/model/requests.py,sha256=JDcANsACotKscWrLs0H_-J8BKEhALUNgmJnoy6_JZuA,2336
|
|
20
20
|
ms_enclave/sandbox/model/responses.py,sha256=AZ_BspRHzlqIP7xjI9XnoeQasIzJqMV8Qmdw23-KAwA,2502
|
|
21
21
|
ms_enclave/sandbox/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -25,16 +25,16 @@ ms_enclave/sandbox/tools/base.py,sha256=vLTJMFOQnfKyTyMMcgeq5f7KBFEFV5y7PFZmb4Ss
|
|
|
25
25
|
ms_enclave/sandbox/tools/sandbox_tool.py,sha256=iATkvNax7aae53ZnmMwmQDuDb5qcXEUI-aPXD5eVfzo,1370
|
|
26
26
|
ms_enclave/sandbox/tools/tool_info.py,sha256=A3RZLETWG9834J7r2KZfG6_JdV4UWnrFKSrEBfeCLHE,5223
|
|
27
27
|
ms_enclave/sandbox/tools/sandbox_tools/__init__.py,sha256=Mtm2jQTrztLbRVDBiitogIB3GdIrwF3Hwk5KlruyntQ,176
|
|
28
|
-
ms_enclave/sandbox/tools/sandbox_tools/file_operation.py,sha256=
|
|
28
|
+
ms_enclave/sandbox/tools/sandbox_tools/file_operation.py,sha256=3yqkMdx_yrHUM4oyhd5WQm_tSV3o1-JwmBeVoIwaLfE,13348
|
|
29
29
|
ms_enclave/sandbox/tools/sandbox_tools/notebook_executor.py,sha256=kZf9QA0yk-aFOJjtAbzYdKfNwjcoBSrscT9mWsv906w,6029
|
|
30
30
|
ms_enclave/sandbox/tools/sandbox_tools/python_executor.py,sha256=SlhenlO09B_eVfcnrqSZAVYC_JCMws0eXj-caR2BTmw,3259
|
|
31
31
|
ms_enclave/sandbox/tools/sandbox_tools/shell_executor.py,sha256=d5qPv6O1K1V91fsGAf9MiArcRuLtjFWEthi4iwtmAEE,2263
|
|
32
32
|
ms_enclave/utils/__init__.py,sha256=KYWYfti4m8TcWzjOfmollEfEArTgTasq2Zuaz9AkzZI,31
|
|
33
33
|
ms_enclave/utils/json_schema.py,sha256=hBhdMilb9_7JZaFZBb_THPrq8N4m_rJPD43mfwnTx14,7431
|
|
34
|
-
ms_enclave/utils/logger.py,sha256=
|
|
35
|
-
ms_enclave-0.0.
|
|
36
|
-
ms_enclave-0.0.
|
|
37
|
-
ms_enclave-0.0.
|
|
38
|
-
ms_enclave-0.0.
|
|
39
|
-
ms_enclave-0.0.
|
|
40
|
-
ms_enclave-0.0.
|
|
34
|
+
ms_enclave/utils/logger.py,sha256=Hg_iVhgUX39LlWiQsvCyDx9NwQt3wQUukbOSCiLYEQU,5974
|
|
35
|
+
ms_enclave-0.0.2.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
36
|
+
ms_enclave-0.0.2.dist-info/METADATA,sha256=Pqe2xTBcURL8-edgOc9wBSo7SAUb8L2gjcURrqqYvyU,12016
|
|
37
|
+
ms_enclave-0.0.2.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91
|
|
38
|
+
ms_enclave-0.0.2.dist-info/entry_points.txt,sha256=Av3oIAE91Jj-742sTPA90ktrrq8lDowFC2RfXLwM8JM,58
|
|
39
|
+
ms_enclave-0.0.2.dist-info/top_level.txt,sha256=V_Q9rBOF-RGwACDP9ppukoyjaOtpjdht7dhe7StS86A,11
|
|
40
|
+
ms_enclave-0.0.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|