acontext 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- acontext/__init__.py +0 -8
- acontext/agent/__init__.py +2 -0
- acontext/agent/base.py +2 -1
- acontext/agent/disk.py +25 -18
- acontext/agent/prompts.py +96 -0
- acontext/agent/sandbox.py +532 -0
- acontext/agent/skill.py +35 -44
- acontext/agent/text_editor.py +436 -0
- acontext/async_client.py +6 -5
- acontext/client.py +6 -5
- acontext/client_types.py +2 -0
- acontext/resources/__init__.py +4 -8
- acontext/resources/async_disks.py +92 -0
- acontext/resources/async_sandboxes.py +85 -0
- acontext/resources/async_sessions.py +0 -41
- acontext/resources/async_skills.py +40 -0
- acontext/resources/async_users.py +2 -2
- acontext/resources/disks.py +131 -33
- acontext/resources/sandboxes.py +85 -0
- acontext/resources/sessions.py +0 -41
- acontext/resources/skills.py +40 -0
- acontext/resources/users.py +2 -2
- acontext/types/__init__.py +15 -22
- acontext/types/disk.py +6 -3
- acontext/types/sandbox.py +47 -0
- acontext/types/session.py +0 -16
- acontext/types/skill.py +11 -0
- acontext/types/tool.py +0 -6
- acontext/types/user.py +0 -1
- {acontext-0.1.2.dist-info → acontext-0.1.4.dist-info}/METADATA +1 -1
- acontext-0.1.4.dist-info/RECORD +41 -0
- acontext/resources/async_blocks.py +0 -164
- acontext/resources/async_spaces.py +0 -200
- acontext/resources/blocks.py +0 -163
- acontext/resources/spaces.py +0 -198
- acontext/types/block.py +0 -26
- acontext/types/space.py +0 -70
- acontext-0.1.2.dist-info/RECORD +0 -41
- {acontext-0.1.2.dist-info → acontext-0.1.4.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,436 @@
|
|
|
1
|
+
"""Text editor file operations for sandbox environments."""
|
|
2
|
+
|
|
3
|
+
import base64
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from .sandbox import AsyncSandboxContext, SandboxContext
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def escape_for_shell(s: str) -> str:
|
|
11
|
+
"""Escape a string for safe use in shell commands."""
|
|
12
|
+
# Use single quotes and escape any single quotes in the string
|
|
13
|
+
return "'" + s.replace("'", "'\"'\"'") + "'"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# ============================================================================
|
|
17
|
+
# Sync Operations
|
|
18
|
+
# ============================================================================
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def view_file(
|
|
22
|
+
ctx: "SandboxContext", path: str, view_range: list | None, timeout: float | None
|
|
23
|
+
) -> dict:
|
|
24
|
+
"""View file content with line numbers.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
ctx: The sandbox context.
|
|
28
|
+
path: The file path to view.
|
|
29
|
+
view_range: Optional [start_line, end_line] to view specific lines.
|
|
30
|
+
timeout: Optional timeout for command execution.
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
A dict with file content and metadata, or error information.
|
|
34
|
+
"""
|
|
35
|
+
# First check if file exists and get total lines
|
|
36
|
+
check_cmd = f"wc -l < {escape_for_shell(path)} 2>/dev/null || echo 'FILE_NOT_FOUND'"
|
|
37
|
+
result = ctx.client.sandboxes.exec_command(
|
|
38
|
+
sandbox_id=ctx.sandbox_id,
|
|
39
|
+
command=check_cmd,
|
|
40
|
+
timeout=timeout,
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
if "FILE_NOT_FOUND" in result.stdout or result.exit_code != 0:
|
|
44
|
+
return {
|
|
45
|
+
"error": f"File not found: {path}",
|
|
46
|
+
"stderr": result.stderr,
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
total_lines = int(result.stdout.strip()) if result.stdout.strip().isdigit() else 0
|
|
50
|
+
|
|
51
|
+
# Build the view command with line numbers
|
|
52
|
+
if view_range and len(view_range) == 2:
|
|
53
|
+
start_line, end_line = view_range
|
|
54
|
+
cmd = f"sed -n '{start_line},{end_line}p' {escape_for_shell(path)} | nl -ba -v {start_line}"
|
|
55
|
+
else:
|
|
56
|
+
cmd = f"nl -ba {escape_for_shell(path)}"
|
|
57
|
+
start_line = 1
|
|
58
|
+
|
|
59
|
+
result = ctx.client.sandboxes.exec_command(
|
|
60
|
+
sandbox_id=ctx.sandbox_id,
|
|
61
|
+
command=cmd,
|
|
62
|
+
timeout=timeout,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
if result.exit_code != 0:
|
|
66
|
+
return {
|
|
67
|
+
"error": f"Failed to view file: {path}",
|
|
68
|
+
"stderr": result.stderr,
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
# Count lines in output
|
|
72
|
+
content_lines = (
|
|
73
|
+
result.stdout.rstrip("\n").split("\n") if result.stdout.strip() else []
|
|
74
|
+
)
|
|
75
|
+
num_lines = len(content_lines)
|
|
76
|
+
|
|
77
|
+
return {
|
|
78
|
+
"file_type": "text",
|
|
79
|
+
"content": result.stdout,
|
|
80
|
+
"numLines": num_lines,
|
|
81
|
+
"startLine": start_line if view_range else 1,
|
|
82
|
+
"totalLines": total_lines + 1, # wc -l doesn't count last line without newline
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def create_file(
|
|
87
|
+
ctx: "SandboxContext", path: str, file_text: str, timeout: float | None
|
|
88
|
+
) -> dict:
|
|
89
|
+
"""Create a new file with content.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
ctx: The sandbox context.
|
|
93
|
+
path: The file path to create.
|
|
94
|
+
file_text: The content to write to the file.
|
|
95
|
+
timeout: Optional timeout for command execution.
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
A dict with creation status or error information.
|
|
99
|
+
"""
|
|
100
|
+
# Check if file already exists
|
|
101
|
+
check_cmd = f"test -f {escape_for_shell(path)} && echo 'EXISTS' || echo 'NEW'"
|
|
102
|
+
check_result = ctx.client.sandboxes.exec_command(
|
|
103
|
+
sandbox_id=ctx.sandbox_id,
|
|
104
|
+
command=check_cmd,
|
|
105
|
+
timeout=timeout,
|
|
106
|
+
)
|
|
107
|
+
is_update = "EXISTS" in check_result.stdout
|
|
108
|
+
|
|
109
|
+
# Create directory if needed
|
|
110
|
+
dir_path = "/".join(path.split("/")[:-1])
|
|
111
|
+
if dir_path:
|
|
112
|
+
mkdir_cmd = f"mkdir -p {escape_for_shell(dir_path)}"
|
|
113
|
+
ctx.client.sandboxes.exec_command(
|
|
114
|
+
sandbox_id=ctx.sandbox_id,
|
|
115
|
+
command=mkdir_cmd,
|
|
116
|
+
timeout=timeout,
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
# Write file using base64 encoding to safely transfer content
|
|
120
|
+
encoded_content = base64.b64encode(file_text.encode()).decode()
|
|
121
|
+
write_cmd = f"echo {escape_for_shell(encoded_content)} | base64 -d > {escape_for_shell(path)}"
|
|
122
|
+
|
|
123
|
+
result = ctx.client.sandboxes.exec_command(
|
|
124
|
+
sandbox_id=ctx.sandbox_id,
|
|
125
|
+
command=write_cmd,
|
|
126
|
+
timeout=timeout,
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
if result.exit_code != 0:
|
|
130
|
+
return {
|
|
131
|
+
"error": f"Failed to create file: {path}",
|
|
132
|
+
"stderr": result.stderr,
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
return {
|
|
136
|
+
"is_file_update": is_update,
|
|
137
|
+
"message": f"File {'updated' if is_update else 'created'}: {path}",
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def str_replace(
|
|
142
|
+
ctx: "SandboxContext", path: str, old_str: str, new_str: str, timeout: float | None
|
|
143
|
+
) -> dict:
|
|
144
|
+
"""Replace a string in a file.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
ctx: The sandbox context.
|
|
148
|
+
path: The file path to modify.
|
|
149
|
+
old_str: The exact string to find and replace.
|
|
150
|
+
new_str: The string to replace old_str with.
|
|
151
|
+
timeout: Optional timeout for command execution.
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
A dict with diff information or error details.
|
|
155
|
+
"""
|
|
156
|
+
# First read the file content
|
|
157
|
+
read_cmd = f"cat {escape_for_shell(path)}"
|
|
158
|
+
result = ctx.client.sandboxes.exec_command(
|
|
159
|
+
sandbox_id=ctx.sandbox_id,
|
|
160
|
+
command=read_cmd,
|
|
161
|
+
timeout=timeout,
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
if result.exit_code != 0:
|
|
165
|
+
return {
|
|
166
|
+
"error": f"File not found: {path}",
|
|
167
|
+
"stderr": result.stderr,
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
original_content = result.stdout
|
|
171
|
+
|
|
172
|
+
# Check if old_str exists in the file
|
|
173
|
+
if old_str not in original_content:
|
|
174
|
+
return {
|
|
175
|
+
"error": f"String not found in file: {old_str[:50]}...",
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
# Count occurrences
|
|
179
|
+
occurrences = original_content.count(old_str)
|
|
180
|
+
if occurrences > 1:
|
|
181
|
+
return {
|
|
182
|
+
"error": f"Multiple occurrences ({occurrences}) of the string found. Please provide more context to make the match unique.",
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
# Perform the replacement
|
|
186
|
+
new_content = original_content.replace(old_str, new_str, 1)
|
|
187
|
+
|
|
188
|
+
# Find the line numbers affected
|
|
189
|
+
old_lines = original_content.split("\n")
|
|
190
|
+
new_lines = new_content.split("\n")
|
|
191
|
+
|
|
192
|
+
# Find where the change starts
|
|
193
|
+
old_start = 1
|
|
194
|
+
for i, (old_line, new_line) in enumerate(zip(old_lines, new_lines)):
|
|
195
|
+
if old_line != new_line:
|
|
196
|
+
old_start = i + 1
|
|
197
|
+
break
|
|
198
|
+
|
|
199
|
+
# Write the new content
|
|
200
|
+
encoded_content = base64.b64encode(new_content.encode()).decode()
|
|
201
|
+
write_cmd = f"echo {escape_for_shell(encoded_content)} | base64 -d > {escape_for_shell(path)}"
|
|
202
|
+
|
|
203
|
+
result = ctx.client.sandboxes.exec_command(
|
|
204
|
+
sandbox_id=ctx.sandbox_id,
|
|
205
|
+
command=write_cmd,
|
|
206
|
+
timeout=timeout,
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
if result.exit_code != 0:
|
|
210
|
+
return {
|
|
211
|
+
"error": f"Failed to write file: {path}",
|
|
212
|
+
"stderr": result.stderr,
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
# Calculate diff info
|
|
216
|
+
old_str_lines = old_str.count("\n") + 1
|
|
217
|
+
new_str_lines = new_str.count("\n") + 1
|
|
218
|
+
|
|
219
|
+
# Build diff lines
|
|
220
|
+
diff_lines = []
|
|
221
|
+
for line in old_str.split("\n"):
|
|
222
|
+
diff_lines.append(f"-{line}")
|
|
223
|
+
for line in new_str.split("\n"):
|
|
224
|
+
diff_lines.append(f"+{line}")
|
|
225
|
+
|
|
226
|
+
return {
|
|
227
|
+
"oldStart": old_start,
|
|
228
|
+
"oldLines": old_str_lines,
|
|
229
|
+
"newStart": old_start,
|
|
230
|
+
"newLines": new_str_lines,
|
|
231
|
+
"lines": diff_lines,
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
# ============================================================================
|
|
236
|
+
# Async Operations
|
|
237
|
+
# ============================================================================
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
async def async_view_file(
|
|
241
|
+
ctx: "AsyncSandboxContext", path: str, view_range: list | None, timeout: float | None
|
|
242
|
+
) -> dict:
|
|
243
|
+
"""View file content with line numbers (async).
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
ctx: The async sandbox context.
|
|
247
|
+
path: The file path to view.
|
|
248
|
+
view_range: Optional [start_line, end_line] to view specific lines.
|
|
249
|
+
timeout: Optional timeout for command execution.
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
A dict with file content and metadata, or error information.
|
|
253
|
+
"""
|
|
254
|
+
check_cmd = f"wc -l < {escape_for_shell(path)} 2>/dev/null || echo 'FILE_NOT_FOUND'"
|
|
255
|
+
result = await ctx.client.sandboxes.exec_command(
|
|
256
|
+
sandbox_id=ctx.sandbox_id,
|
|
257
|
+
command=check_cmd,
|
|
258
|
+
timeout=timeout,
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
if "FILE_NOT_FOUND" in result.stdout or result.exit_code != 0:
|
|
262
|
+
return {
|
|
263
|
+
"error": f"File not found: {path}",
|
|
264
|
+
"stderr": result.stderr,
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
total_lines = int(result.stdout.strip()) if result.stdout.strip().isdigit() else 0
|
|
268
|
+
|
|
269
|
+
if view_range and len(view_range) == 2:
|
|
270
|
+
start_line, end_line = view_range
|
|
271
|
+
cmd = f"sed -n '{start_line},{end_line}p' {escape_for_shell(path)} | nl -ba -v {start_line}"
|
|
272
|
+
else:
|
|
273
|
+
cmd = f"nl -ba {escape_for_shell(path)}"
|
|
274
|
+
start_line = 1
|
|
275
|
+
|
|
276
|
+
result = await ctx.client.sandboxes.exec_command(
|
|
277
|
+
sandbox_id=ctx.sandbox_id,
|
|
278
|
+
command=cmd,
|
|
279
|
+
timeout=timeout,
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
if result.exit_code != 0:
|
|
283
|
+
return {
|
|
284
|
+
"error": f"Failed to view file: {path}",
|
|
285
|
+
"stderr": result.stderr,
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
content_lines = (
|
|
289
|
+
result.stdout.rstrip("\n").split("\n") if result.stdout.strip() else []
|
|
290
|
+
)
|
|
291
|
+
num_lines = len(content_lines)
|
|
292
|
+
|
|
293
|
+
return {
|
|
294
|
+
"file_type": "text",
|
|
295
|
+
"content": result.stdout,
|
|
296
|
+
"numLines": num_lines,
|
|
297
|
+
"startLine": start_line if view_range else 1,
|
|
298
|
+
"totalLines": total_lines + 1,
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
async def async_create_file(
|
|
303
|
+
ctx: "AsyncSandboxContext", path: str, file_text: str, timeout: float | None
|
|
304
|
+
) -> dict:
|
|
305
|
+
"""Create a new file with content (async).
|
|
306
|
+
|
|
307
|
+
Args:
|
|
308
|
+
ctx: The async sandbox context.
|
|
309
|
+
path: The file path to create.
|
|
310
|
+
file_text: The content to write to the file.
|
|
311
|
+
timeout: Optional timeout for command execution.
|
|
312
|
+
|
|
313
|
+
Returns:
|
|
314
|
+
A dict with creation status or error information.
|
|
315
|
+
"""
|
|
316
|
+
check_cmd = f"test -f {escape_for_shell(path)} && echo 'EXISTS' || echo 'NEW'"
|
|
317
|
+
check_result = await ctx.client.sandboxes.exec_command(
|
|
318
|
+
sandbox_id=ctx.sandbox_id,
|
|
319
|
+
command=check_cmd,
|
|
320
|
+
timeout=timeout,
|
|
321
|
+
)
|
|
322
|
+
is_update = "EXISTS" in check_result.stdout
|
|
323
|
+
|
|
324
|
+
dir_path = "/".join(path.split("/")[:-1])
|
|
325
|
+
if dir_path:
|
|
326
|
+
mkdir_cmd = f"mkdir -p {escape_for_shell(dir_path)}"
|
|
327
|
+
await ctx.client.sandboxes.exec_command(
|
|
328
|
+
sandbox_id=ctx.sandbox_id,
|
|
329
|
+
command=mkdir_cmd,
|
|
330
|
+
timeout=timeout,
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
encoded_content = base64.b64encode(file_text.encode()).decode()
|
|
334
|
+
write_cmd = f"echo {escape_for_shell(encoded_content)} | base64 -d > {escape_for_shell(path)}"
|
|
335
|
+
|
|
336
|
+
result = await ctx.client.sandboxes.exec_command(
|
|
337
|
+
sandbox_id=ctx.sandbox_id,
|
|
338
|
+
command=write_cmd,
|
|
339
|
+
timeout=timeout,
|
|
340
|
+
)
|
|
341
|
+
|
|
342
|
+
if result.exit_code != 0:
|
|
343
|
+
return {
|
|
344
|
+
"error": f"Failed to create file: {path}",
|
|
345
|
+
"stderr": result.stderr,
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
return {
|
|
349
|
+
"is_file_update": is_update,
|
|
350
|
+
"message": f"File {'updated' if is_update else 'created'}: {path}",
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
async def async_str_replace(
|
|
355
|
+
ctx: "AsyncSandboxContext", path: str, old_str: str, new_str: str, timeout: float | None
|
|
356
|
+
) -> dict:
|
|
357
|
+
"""Replace a string in a file (async).
|
|
358
|
+
|
|
359
|
+
Args:
|
|
360
|
+
ctx: The async sandbox context.
|
|
361
|
+
path: The file path to modify.
|
|
362
|
+
old_str: The exact string to find and replace.
|
|
363
|
+
new_str: The string to replace old_str with.
|
|
364
|
+
timeout: Optional timeout for command execution.
|
|
365
|
+
|
|
366
|
+
Returns:
|
|
367
|
+
A dict with diff information or error details.
|
|
368
|
+
"""
|
|
369
|
+
read_cmd = f"cat {escape_for_shell(path)}"
|
|
370
|
+
result = await ctx.client.sandboxes.exec_command(
|
|
371
|
+
sandbox_id=ctx.sandbox_id,
|
|
372
|
+
command=read_cmd,
|
|
373
|
+
timeout=timeout,
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
if result.exit_code != 0:
|
|
377
|
+
return {
|
|
378
|
+
"error": f"File not found: {path}",
|
|
379
|
+
"stderr": result.stderr,
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
original_content = result.stdout
|
|
383
|
+
|
|
384
|
+
if old_str not in original_content:
|
|
385
|
+
return {
|
|
386
|
+
"error": f"String not found in file: {old_str[:50]}...",
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
occurrences = original_content.count(old_str)
|
|
390
|
+
if occurrences > 1:
|
|
391
|
+
return {
|
|
392
|
+
"error": f"Multiple occurrences ({occurrences}) of the string found. Please provide more context to make the match unique.",
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
new_content = original_content.replace(old_str, new_str, 1)
|
|
396
|
+
|
|
397
|
+
old_lines = original_content.split("\n")
|
|
398
|
+
new_lines = new_content.split("\n")
|
|
399
|
+
|
|
400
|
+
old_start = 1
|
|
401
|
+
for i, (old_line, new_line) in enumerate(zip(old_lines, new_lines)):
|
|
402
|
+
if old_line != new_line:
|
|
403
|
+
old_start = i + 1
|
|
404
|
+
break
|
|
405
|
+
|
|
406
|
+
encoded_content = base64.b64encode(new_content.encode()).decode()
|
|
407
|
+
write_cmd = f"echo {escape_for_shell(encoded_content)} | base64 -d > {escape_for_shell(path)}"
|
|
408
|
+
|
|
409
|
+
result = await ctx.client.sandboxes.exec_command(
|
|
410
|
+
sandbox_id=ctx.sandbox_id,
|
|
411
|
+
command=write_cmd,
|
|
412
|
+
timeout=timeout,
|
|
413
|
+
)
|
|
414
|
+
|
|
415
|
+
if result.exit_code != 0:
|
|
416
|
+
return {
|
|
417
|
+
"error": f"Failed to write file: {path}",
|
|
418
|
+
"stderr": result.stderr,
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
old_str_lines = old_str.count("\n") + 1
|
|
422
|
+
new_str_lines = new_str.count("\n") + 1
|
|
423
|
+
|
|
424
|
+
diff_lines = []
|
|
425
|
+
for line in old_str.split("\n"):
|
|
426
|
+
diff_lines.append(f"-{line}")
|
|
427
|
+
for line in new_str.split("\n"):
|
|
428
|
+
diff_lines.append(f"+{line}")
|
|
429
|
+
|
|
430
|
+
return {
|
|
431
|
+
"oldStart": old_start,
|
|
432
|
+
"oldLines": old_str_lines,
|
|
433
|
+
"newStart": old_start,
|
|
434
|
+
"newLines": new_str_lines,
|
|
435
|
+
"lines": diff_lines,
|
|
436
|
+
}
|
acontext/async_client.py
CHANGED
|
@@ -13,9 +13,8 @@ from .errors import APIError, TransportError
|
|
|
13
13
|
from .messages import MessagePart as MessagePart
|
|
14
14
|
from .uploads import FileUpload as FileUpload
|
|
15
15
|
from .resources.async_disks import AsyncDisksAPI as AsyncDisksAPI
|
|
16
|
-
from .resources.
|
|
16
|
+
from .resources.async_sandboxes import AsyncSandboxesAPI as AsyncSandboxesAPI
|
|
17
17
|
from .resources.async_sessions import AsyncSessionsAPI as AsyncSessionsAPI
|
|
18
|
-
from .resources.async_spaces import AsyncSpacesAPI as AsyncSpacesAPI
|
|
19
18
|
from .resources.async_tools import AsyncToolsAPI as AsyncToolsAPI
|
|
20
19
|
from .resources.async_skills import AsyncSkillsAPI as AsyncSkillsAPI
|
|
21
20
|
from .resources.async_users import AsyncUsersAPI as AsyncUsersAPI
|
|
@@ -105,14 +104,13 @@ class AcontextAsyncClient:
|
|
|
105
104
|
|
|
106
105
|
self._timeout = actual_timeout
|
|
107
106
|
|
|
108
|
-
self.spaces = AsyncSpacesAPI(self)
|
|
109
107
|
self.sessions = AsyncSessionsAPI(self)
|
|
110
108
|
self.disks = AsyncDisksAPI(self)
|
|
111
109
|
self.artifacts = self.disks.artifacts
|
|
112
|
-
self.blocks = AsyncBlocksAPI(self)
|
|
113
110
|
self.tools = AsyncToolsAPI(self)
|
|
114
111
|
self.skills = AsyncSkillsAPI(self)
|
|
115
112
|
self.users = AsyncUsersAPI(self)
|
|
113
|
+
self.sandboxes = AsyncSandboxesAPI(self)
|
|
116
114
|
|
|
117
115
|
@property
|
|
118
116
|
def base_url(self) -> str:
|
|
@@ -158,7 +156,10 @@ class AcontextAsyncClient:
|
|
|
158
156
|
data: Mapping[str, Any] | None = None,
|
|
159
157
|
files: Mapping[str, tuple[str, BinaryIO, str | None]] | None = None,
|
|
160
158
|
unwrap: bool = True,
|
|
159
|
+
timeout: float | None = None,
|
|
161
160
|
) -> Any:
|
|
161
|
+
# Use per-request timeout if provided, otherwise use client default
|
|
162
|
+
effective_timeout = timeout if timeout is not None else self._timeout
|
|
162
163
|
try:
|
|
163
164
|
response = await self._client.request(
|
|
164
165
|
method=method,
|
|
@@ -167,7 +168,7 @@ class AcontextAsyncClient:
|
|
|
167
168
|
json=json_data,
|
|
168
169
|
data=data,
|
|
169
170
|
files=files,
|
|
170
|
-
timeout=
|
|
171
|
+
timeout=effective_timeout,
|
|
171
172
|
)
|
|
172
173
|
except httpx.HTTPError as exc: # pragma: no cover - passthrough to caller
|
|
173
174
|
raise TransportError(str(exc)) from exc
|
acontext/client.py
CHANGED
|
@@ -13,9 +13,8 @@ from .errors import APIError, TransportError
|
|
|
13
13
|
from .messages import MessagePart as MessagePart
|
|
14
14
|
from .uploads import FileUpload as FileUpload
|
|
15
15
|
from .resources.disks import DisksAPI as DisksAPI
|
|
16
|
-
from .resources.
|
|
16
|
+
from .resources.sandboxes import SandboxesAPI as SandboxesAPI
|
|
17
17
|
from .resources.sessions import SessionsAPI as SessionsAPI
|
|
18
|
-
from .resources.spaces import SpacesAPI as SpacesAPI
|
|
19
18
|
from .resources.tools import ToolsAPI as ToolsAPI
|
|
20
19
|
from .resources.skills import SkillsAPI as SkillsAPI
|
|
21
20
|
from .resources.users import UsersAPI as UsersAPI
|
|
@@ -105,14 +104,13 @@ class AcontextClient:
|
|
|
105
104
|
|
|
106
105
|
self._timeout = actual_timeout
|
|
107
106
|
|
|
108
|
-
self.spaces = SpacesAPI(self)
|
|
109
107
|
self.sessions = SessionsAPI(self)
|
|
110
108
|
self.disks = DisksAPI(self)
|
|
111
109
|
self.artifacts = self.disks.artifacts
|
|
112
|
-
self.blocks = BlocksAPI(self)
|
|
113
110
|
self.tools = ToolsAPI(self)
|
|
114
111
|
self.skills = SkillsAPI(self)
|
|
115
112
|
self.users = UsersAPI(self)
|
|
113
|
+
self.sandboxes = SandboxesAPI(self)
|
|
116
114
|
|
|
117
115
|
@property
|
|
118
116
|
def base_url(self) -> str:
|
|
@@ -157,7 +155,10 @@ class AcontextClient:
|
|
|
157
155
|
data: Mapping[str, Any] | None = None,
|
|
158
156
|
files: Mapping[str, tuple[str, BinaryIO, str | None]] | None = None,
|
|
159
157
|
unwrap: bool = True,
|
|
158
|
+
timeout: float | None = None,
|
|
160
159
|
) -> Any:
|
|
160
|
+
# Use per-request timeout if provided, otherwise use client default
|
|
161
|
+
effective_timeout = timeout if timeout is not None else self._timeout
|
|
161
162
|
try:
|
|
162
163
|
response = self._client.request(
|
|
163
164
|
method=method,
|
|
@@ -166,7 +167,7 @@ class AcontextClient:
|
|
|
166
167
|
json=json_data,
|
|
167
168
|
data=data,
|
|
168
169
|
files=files,
|
|
169
|
-
timeout=
|
|
170
|
+
timeout=effective_timeout,
|
|
170
171
|
)
|
|
171
172
|
except httpx.HTTPError as exc: # pragma: no cover - passthrough to caller
|
|
172
173
|
raise TransportError(str(exc)) from exc
|
acontext/client_types.py
CHANGED
|
@@ -17,6 +17,7 @@ class RequesterProtocol(Protocol):
|
|
|
17
17
|
data: Mapping[str, Any] | None = None,
|
|
18
18
|
files: Mapping[str, tuple[str, BinaryIO, str | None]] | None = None,
|
|
19
19
|
unwrap: bool = True,
|
|
20
|
+
timeout: float | None = None,
|
|
20
21
|
) -> Any:
|
|
21
22
|
...
|
|
22
23
|
|
|
@@ -32,5 +33,6 @@ class AsyncRequesterProtocol(Protocol):
|
|
|
32
33
|
data: Mapping[str, Any] | None = None,
|
|
33
34
|
files: Mapping[str, tuple[str, BinaryIO, str | None]] | None = None,
|
|
34
35
|
unwrap: bool = True,
|
|
36
|
+
timeout: float | None = None,
|
|
35
37
|
) -> Awaitable[Any]:
|
|
36
38
|
...
|
acontext/resources/__init__.py
CHANGED
|
@@ -1,16 +1,14 @@
|
|
|
1
1
|
"""Resource-specific API helpers for the Acontext client."""
|
|
2
2
|
|
|
3
|
-
from .async_blocks import AsyncBlocksAPI
|
|
4
3
|
from .async_disks import AsyncDisksAPI, AsyncDiskArtifactsAPI
|
|
4
|
+
from .async_sandboxes import AsyncSandboxesAPI
|
|
5
5
|
from .async_sessions import AsyncSessionsAPI
|
|
6
|
-
from .async_spaces import AsyncSpacesAPI
|
|
7
6
|
from .async_tools import AsyncToolsAPI
|
|
8
7
|
from .async_skills import AsyncSkillsAPI
|
|
9
8
|
from .async_users import AsyncUsersAPI
|
|
10
|
-
from .blocks import BlocksAPI
|
|
11
9
|
from .disks import DisksAPI, DiskArtifactsAPI
|
|
10
|
+
from .sandboxes import SandboxesAPI
|
|
12
11
|
from .sessions import SessionsAPI
|
|
13
|
-
from .spaces import SpacesAPI
|
|
14
12
|
from .tools import ToolsAPI
|
|
15
13
|
from .skills import SkillsAPI
|
|
16
14
|
from .users import UsersAPI
|
|
@@ -18,17 +16,15 @@ from .users import UsersAPI
|
|
|
18
16
|
__all__ = [
|
|
19
17
|
"DisksAPI",
|
|
20
18
|
"DiskArtifactsAPI",
|
|
21
|
-
"
|
|
19
|
+
"SandboxesAPI",
|
|
22
20
|
"SessionsAPI",
|
|
23
|
-
"SpacesAPI",
|
|
24
21
|
"ToolsAPI",
|
|
25
22
|
"SkillsAPI",
|
|
26
23
|
"UsersAPI",
|
|
27
24
|
"AsyncDisksAPI",
|
|
28
25
|
"AsyncDiskArtifactsAPI",
|
|
29
|
-
"
|
|
26
|
+
"AsyncSandboxesAPI",
|
|
30
27
|
"AsyncSessionsAPI",
|
|
31
|
-
"AsyncSpacesAPI",
|
|
32
28
|
"AsyncToolsAPI",
|
|
33
29
|
"AsyncSkillsAPI",
|
|
34
30
|
"AsyncUsersAPI",
|
|
@@ -287,3 +287,95 @@ class AsyncDiskArtifactsAPI:
|
|
|
287
287
|
"GET", f"/disk/{disk_id}/artifact/glob", params=params
|
|
288
288
|
)
|
|
289
289
|
return [Artifact.model_validate(item) for item in data]
|
|
290
|
+
|
|
291
|
+
async def download_to_sandbox(
|
|
292
|
+
self,
|
|
293
|
+
disk_id: str,
|
|
294
|
+
*,
|
|
295
|
+
file_path: str,
|
|
296
|
+
filename: str,
|
|
297
|
+
sandbox_id: str,
|
|
298
|
+
sandbox_path: str,
|
|
299
|
+
) -> bool:
|
|
300
|
+
"""Download an artifact from disk storage to a sandbox environment.
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
disk_id: The UUID of the disk containing the artifact.
|
|
304
|
+
file_path: Directory path of the artifact (not including filename).
|
|
305
|
+
filename: The filename of the artifact.
|
|
306
|
+
sandbox_id: The UUID of the target sandbox.
|
|
307
|
+
sandbox_path: Destination directory in the sandbox.
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
True if the download was successful.
|
|
311
|
+
|
|
312
|
+
Example:
|
|
313
|
+
```python
|
|
314
|
+
success = await client.disks.artifacts.download_to_sandbox(
|
|
315
|
+
disk_id="disk-uuid",
|
|
316
|
+
file_path="/documents/",
|
|
317
|
+
filename="report.pdf",
|
|
318
|
+
sandbox_id="sandbox-uuid",
|
|
319
|
+
sandbox_path="/home/user/"
|
|
320
|
+
)
|
|
321
|
+
print(f"Success: {success}")
|
|
322
|
+
```
|
|
323
|
+
"""
|
|
324
|
+
payload = {
|
|
325
|
+
"file_path": file_path,
|
|
326
|
+
"filename": filename,
|
|
327
|
+
"sandbox_id": sandbox_id,
|
|
328
|
+
"sandbox_path": sandbox_path,
|
|
329
|
+
}
|
|
330
|
+
data = await self._requester.request(
|
|
331
|
+
"POST",
|
|
332
|
+
f"/disk/{disk_id}/artifact/download_to_sandbox",
|
|
333
|
+
json_data=payload,
|
|
334
|
+
)
|
|
335
|
+
return bool(data.get("success", False))
|
|
336
|
+
|
|
337
|
+
async def upload_from_sandbox(
|
|
338
|
+
self,
|
|
339
|
+
disk_id: str,
|
|
340
|
+
*,
|
|
341
|
+
sandbox_id: str,
|
|
342
|
+
sandbox_path: str,
|
|
343
|
+
sandbox_filename: str,
|
|
344
|
+
file_path: str,
|
|
345
|
+
) -> Artifact:
|
|
346
|
+
"""Upload a file from a sandbox environment to disk storage as an artifact.
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
disk_id: The UUID of the target disk.
|
|
350
|
+
sandbox_id: The UUID of the source sandbox.
|
|
351
|
+
sandbox_path: Source directory in the sandbox (not including filename).
|
|
352
|
+
sandbox_filename: Filename in the sandbox.
|
|
353
|
+
file_path: Destination directory path on the disk.
|
|
354
|
+
|
|
355
|
+
Returns:
|
|
356
|
+
Artifact containing the created artifact information.
|
|
357
|
+
|
|
358
|
+
Example:
|
|
359
|
+
```python
|
|
360
|
+
artifact = await client.disks.artifacts.upload_from_sandbox(
|
|
361
|
+
disk_id="disk-uuid",
|
|
362
|
+
sandbox_id="sandbox-uuid",
|
|
363
|
+
sandbox_path="/home/user/",
|
|
364
|
+
sandbox_filename="output.txt",
|
|
365
|
+
file_path="/results/"
|
|
366
|
+
)
|
|
367
|
+
print(f"Created: {artifact.path}{artifact.filename}")
|
|
368
|
+
```
|
|
369
|
+
"""
|
|
370
|
+
payload = {
|
|
371
|
+
"sandbox_id": sandbox_id,
|
|
372
|
+
"sandbox_path": sandbox_path,
|
|
373
|
+
"sandbox_filename": sandbox_filename,
|
|
374
|
+
"file_path": file_path,
|
|
375
|
+
}
|
|
376
|
+
data = await self._requester.request(
|
|
377
|
+
"POST",
|
|
378
|
+
f"/disk/{disk_id}/artifact/upload_from_sandbox",
|
|
379
|
+
json_data=payload,
|
|
380
|
+
)
|
|
381
|
+
return Artifact.model_validate(data)
|