morphcloud 0.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- morphcloud/__init__.py +7 -0
- morphcloud/_asyncify.py +412 -0
- morphcloud/_bash_interpreter.py +226 -0
- morphcloud/_llm.py +431 -0
- morphcloud/_oci.py +605 -0
- morphcloud/_scramble.py +377 -0
- morphcloud/_ssh.py +181 -0
- morphcloud/_utils.py +46 -0
- morphcloud/api.py +529 -0
- morphcloud/cli.py +467 -0
- morphcloud-0.1.10.dist-info/LICENSE +21 -0
- morphcloud-0.1.10.dist-info/METADATA +81 -0
- morphcloud-0.1.10.dist-info/RECORD +15 -0
- morphcloud-0.1.10.dist-info/WHEEL +4 -0
- morphcloud-0.1.10.dist-info/entry_points.txt +3 -0
morphcloud/__init__.py
ADDED
morphcloud/_asyncify.py
ADDED
|
@@ -0,0 +1,412 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Asyncify - A Python module for transparent synchronous to asynchronous code conversion.
|
|
3
|
+
|
|
4
|
+
This module provides utilities to convert synchronous code to asynchronous code through
|
|
5
|
+
either AST rewriting or thread-based execution. It includes decorators and registration
|
|
6
|
+
functions to handle both function and class conversions.
|
|
7
|
+
|
|
8
|
+
Main components:
|
|
9
|
+
- register_async_equivalent: Maps sync functions to their async equivalents
|
|
10
|
+
- asyncify_transparent: Marks functions for AST-based rewriting
|
|
11
|
+
- asyncify: Main decorator for converting sync to async code
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
# Standard library imports
|
|
15
|
+
import ast
|
|
16
|
+
import asyncio
|
|
17
|
+
import functools
|
|
18
|
+
import glob
|
|
19
|
+
import inspect
|
|
20
|
+
import json
|
|
21
|
+
import os
|
|
22
|
+
import shutil
|
|
23
|
+
import sqlite3
|
|
24
|
+
import subprocess
|
|
25
|
+
import textwrap
|
|
26
|
+
import time
|
|
27
|
+
import urllib.request
|
|
28
|
+
from types import FunctionType
|
|
29
|
+
from typing import Any, Callable, Dict, Optional
|
|
30
|
+
|
|
31
|
+
# Third-party imports
|
|
32
|
+
import requests
|
|
33
|
+
|
|
34
|
+
###############################################################################
|
|
35
|
+
# GLOBAL SYNC-TO-ASYNC REGISTRY
|
|
36
|
+
###############################################################################
|
|
37
|
+
|
|
38
|
+
# Global registry mapping synchronous functions to their async equivalents
|
|
39
|
+
SYNC_TO_ASYNC_MAP: Dict[str, Callable[..., Any]] = {}
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def register_async_equivalent(sync_obj: Callable, async_obj: Callable):
|
|
43
|
+
"""
|
|
44
|
+
Register a synchronous function and its asynchronous equivalent.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
sync_obj: The synchronous function to register
|
|
48
|
+
async_obj: The corresponding asynchronous function
|
|
49
|
+
"""
|
|
50
|
+
mod = sync_obj.__module__
|
|
51
|
+
qname = sync_obj.__qualname__
|
|
52
|
+
key = f"{mod}.{qname}"
|
|
53
|
+
SYNC_TO_ASYNC_MAP[key] = async_obj
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
###############################################################################
|
|
57
|
+
# ASYNCIFY_TRANSPARENT DECORATOR
|
|
58
|
+
###############################################################################
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def asyncify_transparent(func: Callable):
|
|
62
|
+
"""
|
|
63
|
+
Mark a function as transparent for AST-based rewriting.
|
|
64
|
+
|
|
65
|
+
This decorator marks a function for potential AST rewriting when @asyncify
|
|
66
|
+
is later applied. No async function is created at this stage.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
func: The function to mark as transparent
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
The original function with _asyncify_transparent attribute set
|
|
73
|
+
"""
|
|
74
|
+
setattr(func, "_asyncify_transparent", True)
|
|
75
|
+
return func
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
###############################################################################
|
|
79
|
+
# AST REWRITING LOGIC
|
|
80
|
+
###############################################################################
|
|
81
|
+
|
|
82
|
+
# Global storage for async functions created during rewriting
|
|
83
|
+
__ASYNCIFY_GLOBAL_MAP__ = {}
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def store_async_callable(async_func: Callable) -> str:
|
|
87
|
+
"""Store an async function in the global map and return its identifier."""
|
|
88
|
+
name = f"__asyncify_func_{id(async_func)}"
|
|
89
|
+
__ASYNCIFY_GLOBAL_MAP__[name] = async_func
|
|
90
|
+
return name
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def resolve_qualified_name(node: ast.expr, globals_dict: dict) -> Optional[str]:
|
|
94
|
+
"""
|
|
95
|
+
Attempt to resolve a fully qualified name for the function call in the AST node.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
node: The AST node to analyze
|
|
99
|
+
globals_dict: Global namespace dictionary
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
Optional[str]: The fully qualified name if found, None otherwise
|
|
103
|
+
"""
|
|
104
|
+
# Build the name parts from the AST node
|
|
105
|
+
parts = []
|
|
106
|
+
current = node
|
|
107
|
+
while isinstance(current, ast.Attribute):
|
|
108
|
+
parts.insert(0, current.attr)
|
|
109
|
+
current = current.value
|
|
110
|
+
if isinstance(current, ast.Name):
|
|
111
|
+
parts.insert(0, current.id)
|
|
112
|
+
else:
|
|
113
|
+
return None
|
|
114
|
+
|
|
115
|
+
dotted_name = ".".join(parts)
|
|
116
|
+
|
|
117
|
+
# Check for direct match in the async map
|
|
118
|
+
if dotted_name in SYNC_TO_ASYNC_MAP:
|
|
119
|
+
return dotted_name
|
|
120
|
+
|
|
121
|
+
# Handle single-part names (local/global functions)
|
|
122
|
+
if len(parts) == 1:
|
|
123
|
+
name = parts[0]
|
|
124
|
+
if name in globals_dict:
|
|
125
|
+
obj = globals_dict[name]
|
|
126
|
+
if callable(obj):
|
|
127
|
+
mod = getattr(obj, "__module__", None)
|
|
128
|
+
qn = getattr(obj, "__qualname__", None)
|
|
129
|
+
if mod and qn:
|
|
130
|
+
full_key = f"{mod}.{qn}"
|
|
131
|
+
if full_key in SYNC_TO_ASYNC_MAP:
|
|
132
|
+
return full_key
|
|
133
|
+
return None
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class AsyncifyTransformer(ast.NodeTransformer):
|
|
137
|
+
"""AST transformer that converts sync function calls to async equivalents."""
|
|
138
|
+
|
|
139
|
+
def __init__(self, sync_to_async_map: Dict[str, Callable], globals_dict: dict):
|
|
140
|
+
self.sync_to_async_map = sync_to_async_map
|
|
141
|
+
self.globals_dict = globals_dict
|
|
142
|
+
self.found_async_calls = False
|
|
143
|
+
super().__init__()
|
|
144
|
+
|
|
145
|
+
def visit_FunctionDef(self, node: ast.FunctionDef):
|
|
146
|
+
"""Transform function definitions to async if they contain async calls."""
|
|
147
|
+
self.generic_visit(node)
|
|
148
|
+
if self.found_async_calls:
|
|
149
|
+
return ast.AsyncFunctionDef(
|
|
150
|
+
name=node.name,
|
|
151
|
+
args=node.args,
|
|
152
|
+
body=node.body,
|
|
153
|
+
decorator_list=node.decorator_list,
|
|
154
|
+
returns=node.returns,
|
|
155
|
+
type_comment=node.type_comment,
|
|
156
|
+
)
|
|
157
|
+
return node
|
|
158
|
+
|
|
159
|
+
def visit_Call(self, node: ast.Call):
|
|
160
|
+
"""Transform sync function calls to their async equivalents if available."""
|
|
161
|
+
self.generic_visit(node)
|
|
162
|
+
full_name = resolve_qualified_name(node.func, self.globals_dict)
|
|
163
|
+
if full_name and full_name in self.sync_to_async_map:
|
|
164
|
+
async_func = self.sync_to_async_map[full_name]
|
|
165
|
+
new_name = store_async_callable(async_func)
|
|
166
|
+
new_func = ast.Name(id=new_name, ctx=ast.Load())
|
|
167
|
+
new_call = ast.Await(
|
|
168
|
+
value=ast.Call(func=new_func, args=node.args, keywords=node.keywords)
|
|
169
|
+
)
|
|
170
|
+
self.found_async_calls = True
|
|
171
|
+
return new_call
|
|
172
|
+
return node
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def try_ast_rewrite(
|
|
176
|
+
func: Callable, sync_to_async_map: Dict[str, Callable]
|
|
177
|
+
) -> Optional[Callable]:
|
|
178
|
+
"""
|
|
179
|
+
Attempt to rewrite a function's AST to convert sync calls to async.
|
|
180
|
+
|
|
181
|
+
Args:
|
|
182
|
+
func: The function to rewrite
|
|
183
|
+
sync_to_async_map: Map of sync functions to their async equivalents
|
|
184
|
+
|
|
185
|
+
Returns:
|
|
186
|
+
Optional[Callable]: The rewritten async function if successful, None otherwise
|
|
187
|
+
"""
|
|
188
|
+
try:
|
|
189
|
+
source = inspect.getsource(func)
|
|
190
|
+
except (OSError, TypeError):
|
|
191
|
+
return None
|
|
192
|
+
|
|
193
|
+
source = textwrap.dedent(source)
|
|
194
|
+
tree = ast.parse(source)
|
|
195
|
+
|
|
196
|
+
transformer = AsyncifyTransformer(sync_to_async_map, func.__globals__)
|
|
197
|
+
new_tree = transformer.visit(tree)
|
|
198
|
+
ast.fix_missing_locations(new_tree)
|
|
199
|
+
|
|
200
|
+
if not transformer.found_async_calls:
|
|
201
|
+
return None
|
|
202
|
+
|
|
203
|
+
code = compile(new_tree, filename="<asyncify>", mode="exec")
|
|
204
|
+
new_globals = func.__globals__.copy()
|
|
205
|
+
new_globals.update(__ASYNCIFY_GLOBAL_MAP__)
|
|
206
|
+
exec(code, new_globals)
|
|
207
|
+
|
|
208
|
+
new_func = new_globals.get(func.__name__, None)
|
|
209
|
+
if new_func and asyncio.iscoroutinefunction(new_func):
|
|
210
|
+
new_func.__name__ = func.__name__
|
|
211
|
+
new_func.__doc__ = func.__doc__
|
|
212
|
+
return new_func
|
|
213
|
+
return None
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
###############################################################################
|
|
217
|
+
# ASYNCIFY DECORATOR
|
|
218
|
+
###############################################################################
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def asyncify(obj: Any):
|
|
222
|
+
"""
|
|
223
|
+
Main decorator for converting sync code to async.
|
|
224
|
+
|
|
225
|
+
When applied to a function:
|
|
226
|
+
- Creates async_{fn_name} globally
|
|
227
|
+
- If transparent, attempts AST rewriting
|
|
228
|
+
- Falls back to to_thread if needed
|
|
229
|
+
|
|
230
|
+
When applied to a class:
|
|
231
|
+
- Creates new class Async{ClassName}
|
|
232
|
+
- Converts methods to async_{method}
|
|
233
|
+
- Uses AST rewriting or to_thread as appropriate
|
|
234
|
+
|
|
235
|
+
Args:
|
|
236
|
+
obj: Function or class to convert
|
|
237
|
+
|
|
238
|
+
Returns:
|
|
239
|
+
The original object (function) or new async class
|
|
240
|
+
|
|
241
|
+
Raises:
|
|
242
|
+
TypeError: If applied to anything other than a function or class
|
|
243
|
+
"""
|
|
244
|
+
if inspect.isclass(obj):
|
|
245
|
+
return _asyncify_class(obj)
|
|
246
|
+
elif isinstance(obj, FunctionType):
|
|
247
|
+
return _asyncify_function(obj)
|
|
248
|
+
else:
|
|
249
|
+
raise TypeError("asyncify can only be applied to functions or classes")
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def _asyncify_function(func: Callable):
|
|
253
|
+
"""Helper function to asyncify a single function."""
|
|
254
|
+
async_func = None
|
|
255
|
+
if getattr(func, "_asyncify_transparent", False):
|
|
256
|
+
# Try AST rewriting for transparent functions
|
|
257
|
+
async_func = try_ast_rewrite(func, SYNC_TO_ASYNC_MAP)
|
|
258
|
+
if async_func is not None:
|
|
259
|
+
# Register successful rewrite
|
|
260
|
+
register_async_equivalent(func, async_func)
|
|
261
|
+
else:
|
|
262
|
+
# Fallback to to_thread
|
|
263
|
+
@functools.wraps(func)
|
|
264
|
+
async def async_func(*args, **kwargs):
|
|
265
|
+
return await asyncio.to_thread(func, *args, **kwargs)
|
|
266
|
+
|
|
267
|
+
else:
|
|
268
|
+
# Not transparent, always use to_thread
|
|
269
|
+
@functools.wraps(func)
|
|
270
|
+
async def async_func(*args, **kwargs):
|
|
271
|
+
return await asyncio.to_thread(func, *args, **kwargs)
|
|
272
|
+
|
|
273
|
+
# Register in globals
|
|
274
|
+
async_name = f"async_{func.__name__}"
|
|
275
|
+
globals()[async_name] = async_func
|
|
276
|
+
return func
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def _asyncify_class(cls: type):
|
|
280
|
+
"""Helper function to asyncify a class."""
|
|
281
|
+
new_attrs = {}
|
|
282
|
+
for name, value in cls.__dict__.items():
|
|
283
|
+
if callable(value) and not (name.startswith("__") and name.endswith("__")):
|
|
284
|
+
if getattr(value, "_asyncify_transparent", False):
|
|
285
|
+
# Try rewriting transparent methods
|
|
286
|
+
maybe_async = try_ast_rewrite(value, SYNC_TO_ASYNC_MAP)
|
|
287
|
+
if maybe_async is not None:
|
|
288
|
+
async_method = maybe_async
|
|
289
|
+
else:
|
|
290
|
+
# Fallback to thread
|
|
291
|
+
@functools.wraps(value)
|
|
292
|
+
async def async_method(*args, __value=value, **kwargs):
|
|
293
|
+
return await asyncio.to_thread(__value, *args, **kwargs)
|
|
294
|
+
|
|
295
|
+
else:
|
|
296
|
+
# Not transparent, use thread
|
|
297
|
+
@functools.wraps(value)
|
|
298
|
+
async def async_method(*args, __value=value, **kwargs):
|
|
299
|
+
return await asyncio.to_thread(__value, *args, **kwargs)
|
|
300
|
+
|
|
301
|
+
async_name = f"async_{name}"
|
|
302
|
+
new_attrs[async_name] = async_method
|
|
303
|
+
else:
|
|
304
|
+
new_attrs[name] = value
|
|
305
|
+
|
|
306
|
+
new_cls_name = f"Async{cls.__name__}"
|
|
307
|
+
new_cls = type(new_cls_name, cls.__bases__, new_attrs)
|
|
308
|
+
|
|
309
|
+
# Register in globals
|
|
310
|
+
globals()[new_cls_name] = new_cls
|
|
311
|
+
return new_cls
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
###############################################################################
|
|
315
|
+
# ASYNC EQUIVALENTS REGISTRATION
|
|
316
|
+
###############################################################################
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
# Standard library async equivalents
|
|
320
|
+
async def async_requests_get(url, **kwargs):
|
|
321
|
+
"""Async equivalent for requests.get using aiohttp."""
|
|
322
|
+
import aiohttp
|
|
323
|
+
|
|
324
|
+
async with aiohttp.ClientSession() as session:
|
|
325
|
+
async with session.get(url, **kwargs) as response:
|
|
326
|
+
return await response.text()
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
async def async_open(*args, **kwargs):
|
|
330
|
+
"""Async equivalent for open using aiofiles."""
|
|
331
|
+
import aiofiles
|
|
332
|
+
|
|
333
|
+
return aiofiles.open(*args, **kwargs)
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
async def async_subprocess_run(*cmd, **kwargs):
|
|
337
|
+
"""Async equivalent for subprocess.run."""
|
|
338
|
+
proc = await asyncio.create_subprocess_exec(
|
|
339
|
+
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, **kwargs
|
|
340
|
+
)
|
|
341
|
+
stdout, stderr = await proc.communicate()
|
|
342
|
+
return stdout, stderr, proc.returncode
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
async def async_urlopen(url, **kwargs):
|
|
346
|
+
"""Async equivalent for urllib.request.urlopen using aiohttp."""
|
|
347
|
+
import aiohttp
|
|
348
|
+
|
|
349
|
+
async with aiohttp.ClientSession() as session:
|
|
350
|
+
async with session.get(url, **kwargs) as response:
|
|
351
|
+
return await response.read()
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
async def async_sqlite_connect(database, **kwargs):
|
|
355
|
+
"""Async equivalent for sqlite3.connect using aiosqlite."""
|
|
356
|
+
import aiosqlite
|
|
357
|
+
|
|
358
|
+
return await aiosqlite.connect(database, **kwargs)
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
async def async_glob(pattern):
|
|
362
|
+
"""Async equivalent for glob.glob using to_thread."""
|
|
363
|
+
return await asyncio.to_thread(glob.glob, pattern)
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
async def async_listdir(path="."):
|
|
367
|
+
"""Async equivalent for os.listdir using to_thread."""
|
|
368
|
+
return await asyncio.to_thread(os.listdir, path)
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
async def async_copyfile(src, dst, *, chunk_size=65536):
|
|
372
|
+
"""Async equivalent for shutil.copyfile using aiofiles."""
|
|
373
|
+
import aiofiles
|
|
374
|
+
|
|
375
|
+
async with aiofiles.open(src, "rb") as fsrc, aiofiles.open(dst, "wb") as fdst:
|
|
376
|
+
while True:
|
|
377
|
+
chunk = await fsrc.read(chunk_size)
|
|
378
|
+
if not chunk:
|
|
379
|
+
break
|
|
380
|
+
await fdst.write(chunk)
|
|
381
|
+
return None
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
async def async_json_load(file):
|
|
385
|
+
"""Async equivalent for json.load using aiofiles."""
|
|
386
|
+
import aiofiles
|
|
387
|
+
|
|
388
|
+
async with aiofiles.open(file, "r") as f:
|
|
389
|
+
content = await f.read()
|
|
390
|
+
return json.loads(content)
|
|
391
|
+
|
|
392
|
+
|
|
393
|
+
async def async_json_dump(obj, file):
|
|
394
|
+
"""Async equivalent for json.dump using aiofiles."""
|
|
395
|
+
import aiofiles
|
|
396
|
+
|
|
397
|
+
async with aiofiles.open(file, "w") as f:
|
|
398
|
+
await f.write(json.dumps(obj))
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
# Register all async equivalents
|
|
402
|
+
register_async_equivalent(time.sleep, asyncio.sleep)
|
|
403
|
+
register_async_equivalent(requests.get, async_requests_get)
|
|
404
|
+
register_async_equivalent(open, async_open)
|
|
405
|
+
register_async_equivalent(subprocess.run, async_subprocess_run)
|
|
406
|
+
register_async_equivalent(urllib.request.urlopen, async_urlopen)
|
|
407
|
+
register_async_equivalent(sqlite3.connect, async_sqlite_connect)
|
|
408
|
+
register_async_equivalent(glob.glob, async_glob)
|
|
409
|
+
register_async_equivalent(os.listdir, async_listdir)
|
|
410
|
+
register_async_equivalent(shutil.copyfile, async_copyfile)
|
|
411
|
+
register_async_equivalent(json.load, async_json_load)
|
|
412
|
+
register_async_equivalent(json.dump, async_json_dump)
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import time
|
|
3
|
+
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Optional, Tuple, Dict
|
|
7
|
+
|
|
8
|
+
import paramiko
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class OSCType(Enum):
|
|
12
|
+
"""OSC 133 Semantic Prompt Sequence Types"""
|
|
13
|
+
|
|
14
|
+
PROMPT_START = "P" # Indicates start of prompt (k=i or k=s parameter)
|
|
15
|
+
COMMAND_START = "C" # Indicates start of command execution
|
|
16
|
+
COMMAND_DONE = "D" # Indicates command completion (includes exit code)
|
|
17
|
+
PROMPT_CONT = "A" # Indicates continuation of prompt (cl parameter)
|
|
18
|
+
BLOCK_END = "B" # Indicates end of a block
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class PromptKind(Enum):
|
|
22
|
+
"""OSC 133 Prompt Types (k parameter)"""
|
|
23
|
+
|
|
24
|
+
INITIAL = "i" # Initial prompt
|
|
25
|
+
SECONDARY = "s" # Secondary prompt (like PS2)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class ContinuationKind(Enum):
|
|
29
|
+
"""OSC 133 Continuation Types (cl parameter)"""
|
|
30
|
+
|
|
31
|
+
MESSAGE = "m" # Message continuation
|
|
32
|
+
PARTIAL = "p" # Partial line
|
|
33
|
+
COMPLETE = "c" # Complete line
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass
|
|
37
|
+
class OSCParams:
|
|
38
|
+
"""Structured representation of OSC 133 parameters"""
|
|
39
|
+
|
|
40
|
+
type: OSCType
|
|
41
|
+
prompt_kind: Optional[PromptKind] = None
|
|
42
|
+
continuation: Optional[ContinuationKind] = None
|
|
43
|
+
exit_code: Optional[int] = None
|
|
44
|
+
aid: Optional[int] = None # Activity ID
|
|
45
|
+
raw_params: Dict[str, str] = None
|
|
46
|
+
|
|
47
|
+
@classmethod
|
|
48
|
+
def from_sequence(cls, sequence: str) -> "OSCParams":
|
|
49
|
+
"""Parse an OSC sequence into structured parameters"""
|
|
50
|
+
# Split the sequence into type and parameters
|
|
51
|
+
parts = sequence.split(";")
|
|
52
|
+
if not parts:
|
|
53
|
+
raise ValueError(f"Empty sequence")
|
|
54
|
+
|
|
55
|
+
# Handle the sequence type
|
|
56
|
+
type_str = parts[0]
|
|
57
|
+
if type_str == "B":
|
|
58
|
+
return cls(type=OSCType.BLOCK_END, raw_params={})
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
osc_type = OSCType(type_str)
|
|
62
|
+
except ValueError:
|
|
63
|
+
raise ValueError(f"Unknown sequence type: {type_str}")
|
|
64
|
+
|
|
65
|
+
raw_params = {}
|
|
66
|
+
exit_code = None
|
|
67
|
+
|
|
68
|
+
# Parse the remaining parameters
|
|
69
|
+
if osc_type == OSCType.COMMAND_DONE and len(parts) > 1:
|
|
70
|
+
try:
|
|
71
|
+
exit_code = int(parts[1])
|
|
72
|
+
raw_params["exit_code"] = parts[1]
|
|
73
|
+
except ValueError:
|
|
74
|
+
pass
|
|
75
|
+
|
|
76
|
+
# Parse key-value pairs
|
|
77
|
+
for part in parts[1:]:
|
|
78
|
+
if "=" in part:
|
|
79
|
+
key, value = part.split("=", 1)
|
|
80
|
+
raw_params[key] = value
|
|
81
|
+
|
|
82
|
+
return cls(
|
|
83
|
+
type=osc_type,
|
|
84
|
+
prompt_kind=PromptKind(raw_params["k"]) if "k" in raw_params else None,
|
|
85
|
+
continuation=ContinuationKind(raw_params["cl"])
|
|
86
|
+
if "cl" in raw_params
|
|
87
|
+
else None,
|
|
88
|
+
exit_code=exit_code
|
|
89
|
+
if exit_code is not None
|
|
90
|
+
else (int(raw_params["exit_code"]) if "exit_code" in raw_params else None),
|
|
91
|
+
aid=int(raw_params["aid"]) if "aid" in raw_params else None,
|
|
92
|
+
raw_params=raw_params,
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
@dataclass
|
|
97
|
+
class CommandResult:
|
|
98
|
+
prompt: str # The shell prompt (e.g., "user@host:~$")
|
|
99
|
+
command: str # The command that was executed
|
|
100
|
+
output: str # The command's output (stdout/stderr)
|
|
101
|
+
exit_code: int # Command exit code
|
|
102
|
+
osc_params: Dict[str, OSCParams] # Structured OSC parameters
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class SemanticShellClient:
|
|
106
|
+
def __init__(
|
|
107
|
+
self,
|
|
108
|
+
hostname: str,
|
|
109
|
+
username: str,
|
|
110
|
+
port: int = 2222,
|
|
111
|
+
):
|
|
112
|
+
self.client = paramiko.SSHClient()
|
|
113
|
+
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
114
|
+
self.client.connect(
|
|
115
|
+
hostname=hostname,
|
|
116
|
+
username=username,
|
|
117
|
+
port=port,
|
|
118
|
+
)
|
|
119
|
+
self.channel = self.client.invoke_shell()
|
|
120
|
+
self.buffer = ""
|
|
121
|
+
# Wait for initial prompt
|
|
122
|
+
self._read_until_prompt()
|
|
123
|
+
|
|
124
|
+
def _parse_osc_sequences(self, text: str) -> Dict[str, OSCParams]:
|
|
125
|
+
"""Parse all OSC sequences in the text"""
|
|
126
|
+
params = {}
|
|
127
|
+
sequences = re.finditer(r"\x1b]133;([^\x07]*)\x07", text)
|
|
128
|
+
for i, seq in enumerate(sequences):
|
|
129
|
+
try:
|
|
130
|
+
params[f"seq_{i}"] = OSCParams.from_sequence(seq.group(1))
|
|
131
|
+
except ValueError as e:
|
|
132
|
+
# Uncomment for debugging
|
|
133
|
+
# print(f"Error parsing sequence: {e}")
|
|
134
|
+
continue
|
|
135
|
+
return params
|
|
136
|
+
|
|
137
|
+
def _split_repl_parts(
|
|
138
|
+
self, text: str
|
|
139
|
+
) -> Tuple[str, str, str, Dict[str, OSCParams]]:
|
|
140
|
+
"""
|
|
141
|
+
Split the shell output into prompt, command output, and OSC parameters.
|
|
142
|
+
Returns (prompt, command_output, output, osc_params)
|
|
143
|
+
"""
|
|
144
|
+
# Parse OSC sequences first
|
|
145
|
+
osc_params = self._parse_osc_sequences(text)
|
|
146
|
+
|
|
147
|
+
# Clean the text of OSC sequences
|
|
148
|
+
clean_text = re.sub(r"\x1b]133;[^\x07]*\x07", "", text)
|
|
149
|
+
clean_text = re.sub(r"\x1b]122;[^\x07]*\x07", "", clean_text)
|
|
150
|
+
|
|
151
|
+
# Split into lines
|
|
152
|
+
lines = clean_text.split("\r\n")
|
|
153
|
+
|
|
154
|
+
# Extract parts
|
|
155
|
+
prompt = lines[-1] if lines else "" # Last line is the new prompt
|
|
156
|
+
command_output = (
|
|
157
|
+
lines[0] if len(lines) > 0 else ""
|
|
158
|
+
) # First line contains command echo
|
|
159
|
+
output = (
|
|
160
|
+
"\n".join(lines[1:-1]) if len(lines) > 2 else ""
|
|
161
|
+
) # Middle lines are command output
|
|
162
|
+
|
|
163
|
+
# Clean up any remaining control characters
|
|
164
|
+
prompt = re.sub(r"\x1b[^m]*m", "", prompt).strip()
|
|
165
|
+
command_output = re.sub(r"\x1b[^m]*m", "", command_output).strip()
|
|
166
|
+
output = re.sub(r"\x1b[^m]*m", "", output).strip()
|
|
167
|
+
|
|
168
|
+
return prompt, command_output, output, osc_params
|
|
169
|
+
|
|
170
|
+
def _read_until_prompt(self, timeout: float = 30) -> Tuple[str, int]:
|
|
171
|
+
"""
|
|
172
|
+
Read the shell output until we see the OSC 133 command completion sequence.
|
|
173
|
+
Returns the output and the exit code.
|
|
174
|
+
"""
|
|
175
|
+
start_time = time.time()
|
|
176
|
+
output = []
|
|
177
|
+
exit_code = None
|
|
178
|
+
|
|
179
|
+
while time.time() - start_time < timeout:
|
|
180
|
+
if self.channel.recv_ready():
|
|
181
|
+
chunk = self.channel.recv(4096).decode("utf-8")
|
|
182
|
+
self.buffer += chunk
|
|
183
|
+
output.append(chunk)
|
|
184
|
+
|
|
185
|
+
# Look for the command completion sequence
|
|
186
|
+
completion_match = re.search(
|
|
187
|
+
r"\x1b]133;D;(\d+);aid=\d+\x07", self.buffer
|
|
188
|
+
)
|
|
189
|
+
if completion_match:
|
|
190
|
+
exit_code = int(completion_match.group(1))
|
|
191
|
+
|
|
192
|
+
# Look for the next prompt indicator
|
|
193
|
+
if re.search(r"\x1b]133;A;cl=m;aid=\d+\x07", self.buffer):
|
|
194
|
+
complete_output = "".join(output)
|
|
195
|
+
self.buffer = ""
|
|
196
|
+
return complete_output, exit_code
|
|
197
|
+
|
|
198
|
+
time.sleep(0.1)
|
|
199
|
+
raise TimeoutError("Timed out waiting for command completion")
|
|
200
|
+
|
|
201
|
+
def execute_command(self, command: str, timeout: float = 30) -> CommandResult:
|
|
202
|
+
"""
|
|
203
|
+
Execute a command and wait for its completion.
|
|
204
|
+
Returns a CommandResult with structured output and OSC parameters.
|
|
205
|
+
"""
|
|
206
|
+
# Send the command
|
|
207
|
+
self.channel.send(command + "\n")
|
|
208
|
+
|
|
209
|
+
# Wait for completion and return output
|
|
210
|
+
raw_output, exit_code = self._read_until_prompt(timeout)
|
|
211
|
+
prompt, command_output, output, osc_params = self._split_repl_parts(raw_output)
|
|
212
|
+
|
|
213
|
+
return CommandResult(
|
|
214
|
+
prompt=prompt,
|
|
215
|
+
command=command_output
|
|
216
|
+
or command, # Use echo'd command or original if not found
|
|
217
|
+
output=output,
|
|
218
|
+
exit_code=exit_code or 0,
|
|
219
|
+
osc_params=osc_params,
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
def close(self):
|
|
223
|
+
"""Close the SSH connection."""
|
|
224
|
+
self.channel.close()
|
|
225
|
+
self.client.close()
|
|
226
|
+
|