isolate 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- isolate/__init__.py +3 -0
- isolate/_isolate_version.py +34 -0
- isolate/_version.py +6 -0
- isolate/backends/__init__.py +2 -0
- isolate/backends/_base.py +132 -0
- isolate/backends/common.py +259 -0
- isolate/backends/conda.py +215 -0
- isolate/backends/container.py +64 -0
- isolate/backends/local.py +46 -0
- isolate/backends/pyenv.py +143 -0
- isolate/backends/remote.py +141 -0
- isolate/backends/settings.py +121 -0
- isolate/backends/virtualenv.py +204 -0
- isolate/common/__init__.py +0 -0
- isolate/common/timestamp.py +15 -0
- isolate/connections/__init__.py +21 -0
- isolate/connections/_local/__init__.py +2 -0
- isolate/connections/_local/_base.py +190 -0
- isolate/connections/_local/agent_startup.py +53 -0
- isolate/connections/common.py +121 -0
- isolate/connections/grpc/__init__.py +1 -0
- isolate/connections/grpc/_base.py +175 -0
- isolate/connections/grpc/agent.py +284 -0
- isolate/connections/grpc/configuration.py +23 -0
- isolate/connections/grpc/definitions/__init__.py +11 -0
- isolate/connections/grpc/definitions/agent.proto +18 -0
- isolate/connections/grpc/definitions/agent_pb2.py +29 -0
- isolate/connections/grpc/definitions/agent_pb2.pyi +44 -0
- isolate/connections/grpc/definitions/agent_pb2_grpc.py +68 -0
- isolate/connections/grpc/definitions/common.proto +49 -0
- isolate/connections/grpc/definitions/common_pb2.py +35 -0
- isolate/connections/grpc/definitions/common_pb2.pyi +152 -0
- isolate/connections/grpc/definitions/common_pb2_grpc.py +4 -0
- isolate/connections/grpc/interface.py +71 -0
- isolate/connections/ipc/__init__.py +5 -0
- isolate/connections/ipc/_base.py +225 -0
- isolate/connections/ipc/agent.py +205 -0
- isolate/logger.py +53 -0
- isolate/logs.py +76 -0
- isolate/py.typed +0 -0
- isolate/registry.py +53 -0
- isolate/server/__init__.py +1 -0
- isolate/server/definitions/__init__.py +13 -0
- isolate/server/definitions/server.proto +80 -0
- isolate/server/definitions/server_pb2.py +56 -0
- isolate/server/definitions/server_pb2.pyi +241 -0
- isolate/server/definitions/server_pb2_grpc.py +205 -0
- isolate/server/health/__init__.py +11 -0
- isolate/server/health/health.proto +23 -0
- isolate/server/health/health_pb2.py +32 -0
- isolate/server/health/health_pb2.pyi +66 -0
- isolate/server/health/health_pb2_grpc.py +99 -0
- isolate/server/health_server.py +40 -0
- isolate/server/interface.py +27 -0
- isolate/server/server.py +735 -0
- isolate-0.22.0.dist-info/METADATA +88 -0
- isolate-0.22.0.dist-info/RECORD +61 -0
- isolate-0.22.0.dist-info/WHEEL +5 -0
- isolate-0.22.0.dist-info/entry_points.txt +7 -0
- isolate-0.22.0.dist-info/licenses/LICENSE +201 -0
- isolate-0.22.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import socket
|
|
3
|
+
import subprocess
|
|
4
|
+
from contextlib import contextmanager
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any, ContextManager, Iterator, List, Tuple, Union, cast
|
|
8
|
+
|
|
9
|
+
import grpc
|
|
10
|
+
|
|
11
|
+
from isolate.backends import (
|
|
12
|
+
BasicCallable,
|
|
13
|
+
CallResultType,
|
|
14
|
+
EnvironmentConnection,
|
|
15
|
+
)
|
|
16
|
+
from isolate.connections._local import PythonExecutionBase, agent_startup
|
|
17
|
+
from isolate.connections.common import serialize_object
|
|
18
|
+
from isolate.connections.grpc import agent, definitions
|
|
19
|
+
from isolate.connections.grpc.configuration import get_default_options
|
|
20
|
+
from isolate.connections.grpc.interface import from_grpc
|
|
21
|
+
from isolate.logs import LogLevel, LogSource
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class AgentError(Exception):
|
|
25
|
+
"""An internal problem caused by (most probably) the agent."""
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
PROCESS_SHUTDOWN_TIMEOUT_SECONDS = float(
|
|
29
|
+
os.getenv("ISOLATE_SHUTDOWN_GRACE_PERIOD", "60")
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass
|
|
34
|
+
class GRPCExecutionBase(EnvironmentConnection):
|
|
35
|
+
"""A customizable gRPC-based execution backend."""
|
|
36
|
+
|
|
37
|
+
def start_agent(self) -> ContextManager[Tuple[str, grpc.ChannelCredentials]]:
|
|
38
|
+
"""Starts the gRPC agent and returns the address it is listening on and
|
|
39
|
+
the required credentials to connect to it."""
|
|
40
|
+
raise NotImplementedError
|
|
41
|
+
|
|
42
|
+
def abort_agent(self) -> None:
|
|
43
|
+
raise NotImplementedError
|
|
44
|
+
|
|
45
|
+
@contextmanager
|
|
46
|
+
def _establish_bridge(
|
|
47
|
+
self,
|
|
48
|
+
*,
|
|
49
|
+
max_wait_timeout: float = 20.0,
|
|
50
|
+
) -> Iterator[definitions.AgentStub]:
|
|
51
|
+
with self.start_agent() as (address, credentials):
|
|
52
|
+
with grpc.secure_channel(
|
|
53
|
+
address,
|
|
54
|
+
credentials,
|
|
55
|
+
options=get_default_options(),
|
|
56
|
+
) as channel:
|
|
57
|
+
channel_status = grpc.channel_ready_future(channel)
|
|
58
|
+
try:
|
|
59
|
+
channel_status.result(timeout=max_wait_timeout)
|
|
60
|
+
except grpc.FutureTimeoutError:
|
|
61
|
+
raise AgentError(
|
|
62
|
+
"Couldn't connect to the gRPC server in the agent "
|
|
63
|
+
f"(listening at {address}) in time."
|
|
64
|
+
)
|
|
65
|
+
stub = definitions.AgentStub(channel)
|
|
66
|
+
stub._channel = channel # type: ignore
|
|
67
|
+
yield stub
|
|
68
|
+
|
|
69
|
+
def run(
|
|
70
|
+
self,
|
|
71
|
+
executable: BasicCallable,
|
|
72
|
+
*args: Any,
|
|
73
|
+
**kwargs: Any,
|
|
74
|
+
) -> CallResultType: # type: ignore[type-var]
|
|
75
|
+
# Implementation details
|
|
76
|
+
# ======================
|
|
77
|
+
#
|
|
78
|
+
# RPC Flow:
|
|
79
|
+
# ---------
|
|
80
|
+
# 1. [controller]: Spawn the agent.
|
|
81
|
+
# 2. [agent]: Start listening at the given address.
|
|
82
|
+
# 3. [controller]: Await *at most* max_wait_timeout seconds for the agent to
|
|
83
|
+
# be available if it doesn't do it until then,
|
|
84
|
+
# raise an AgentError.
|
|
85
|
+
# 4. [controller]: If the server is available, then establish the bridge and
|
|
86
|
+
# pass the 'function' as the input.
|
|
87
|
+
# 5. [agent]: Receive the function, deserialize it, start the execution.
|
|
88
|
+
# 6. [controller]: Watch agent for logs (stdout/stderr), and as soon as they
|
|
89
|
+
# appear call the log handler.
|
|
90
|
+
# 7. [agent]: Once the execution of the function is finished, send the
|
|
91
|
+
# result using the same serialization method.
|
|
92
|
+
# 8. [controller]: Receive the result back and return it.
|
|
93
|
+
|
|
94
|
+
method = self.environment.settings.serialization_method
|
|
95
|
+
function = definitions.SerializedObject(
|
|
96
|
+
method=method,
|
|
97
|
+
definition=serialize_object(method, executable),
|
|
98
|
+
was_it_raised=False,
|
|
99
|
+
stringized_traceback=None,
|
|
100
|
+
)
|
|
101
|
+
function_call = definitions.FunctionCall(
|
|
102
|
+
function=function,
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
with self._establish_bridge() as bridge:
|
|
106
|
+
for partial_result in bridge.Run(function_call):
|
|
107
|
+
for raw_log in partial_result.logs:
|
|
108
|
+
log = from_grpc(raw_log)
|
|
109
|
+
self.log(log.message, level=log.level, source=log.source)
|
|
110
|
+
|
|
111
|
+
if partial_result.is_complete:
|
|
112
|
+
if not partial_result.result:
|
|
113
|
+
raise AgentError(
|
|
114
|
+
"The agent didn't return a result, but it should have."
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
return cast(CallResultType, from_grpc(partial_result.result))
|
|
118
|
+
|
|
119
|
+
raise AgentError(
|
|
120
|
+
"No result object was received from the agent "
|
|
121
|
+
"(it never set is_complete to True)."
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
class LocalPythonGRPC(PythonExecutionBase[str], GRPCExecutionBase):
|
|
126
|
+
_process: Union[None, subprocess.Popen] = None
|
|
127
|
+
|
|
128
|
+
@contextmanager
|
|
129
|
+
def start_agent(self) -> Iterator[Tuple[str, grpc.ChannelCredentials]]:
|
|
130
|
+
def find_free_port() -> Tuple[str, int]:
|
|
131
|
+
"""Find a free port in the system."""
|
|
132
|
+
with socket.socket() as _temp_socket:
|
|
133
|
+
_temp_socket.bind(("", 0))
|
|
134
|
+
return _temp_socket.getsockname()
|
|
135
|
+
|
|
136
|
+
host, port = find_free_port()
|
|
137
|
+
address = f"{host}:{port}"
|
|
138
|
+
self._process = None
|
|
139
|
+
try:
|
|
140
|
+
with self.start_process(address) as process:
|
|
141
|
+
self._process = process
|
|
142
|
+
yield address, grpc.local_channel_credentials()
|
|
143
|
+
finally:
|
|
144
|
+
self.abort_agent()
|
|
145
|
+
|
|
146
|
+
def abort_agent(self) -> None:
|
|
147
|
+
if self._process is not None:
|
|
148
|
+
try:
|
|
149
|
+
print("Terminating the agent process...")
|
|
150
|
+
self._process.terminate()
|
|
151
|
+
self._process.wait(timeout=PROCESS_SHUTDOWN_TIMEOUT_SECONDS)
|
|
152
|
+
print("Agent process shutdown gracefully")
|
|
153
|
+
except Exception as exc:
|
|
154
|
+
print(f"Failed to shutdown the agent process gracefully: {exc}")
|
|
155
|
+
self._process.kill()
|
|
156
|
+
|
|
157
|
+
def get_python_cmd(
|
|
158
|
+
self,
|
|
159
|
+
executable: Path,
|
|
160
|
+
connection: str,
|
|
161
|
+
log_fd: int,
|
|
162
|
+
) -> List[Union[str, Path]]:
|
|
163
|
+
return [
|
|
164
|
+
executable,
|
|
165
|
+
agent_startup.__file__,
|
|
166
|
+
agent.__file__,
|
|
167
|
+
connection,
|
|
168
|
+
"--log-fd",
|
|
169
|
+
str(log_fd),
|
|
170
|
+
]
|
|
171
|
+
|
|
172
|
+
def handle_agent_log(
|
|
173
|
+
self, line: str, *, level: LogLevel, source: LogSource
|
|
174
|
+
) -> None:
|
|
175
|
+
self.log(line, level=level, source=source)
|
|
@@ -0,0 +1,284 @@
|
|
|
1
|
+
# agent-requires: isolate[server]
|
|
2
|
+
"""
|
|
3
|
+
This file contains the implementation of the gRPC agent. The agent is a
|
|
4
|
+
separate process that is responsible for running the user code in a
|
|
5
|
+
sandboxed environment.
|
|
6
|
+
|
|
7
|
+
This file is referenced by the latest version of the `isolate` package
|
|
8
|
+
but then runs it in the context of the frozen agent built environment.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import asyncio
|
|
14
|
+
import os
|
|
15
|
+
import signal
|
|
16
|
+
import sys
|
|
17
|
+
import traceback
|
|
18
|
+
from argparse import ArgumentParser
|
|
19
|
+
from concurrent import futures
|
|
20
|
+
from dataclasses import dataclass
|
|
21
|
+
from typing import (
|
|
22
|
+
Any,
|
|
23
|
+
AsyncIterator,
|
|
24
|
+
Iterable,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
from grpc import StatusCode, aio, local_server_credentials
|
|
28
|
+
|
|
29
|
+
from isolate.connections.grpc.definitions import PartialRunResult
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
from isolate import __version__ as agent_version
|
|
33
|
+
except ImportError:
|
|
34
|
+
agent_version = "UNKNOWN"
|
|
35
|
+
|
|
36
|
+
from isolate.backends.common import sha256_digest_of
|
|
37
|
+
from isolate.connections.common import SerializationError, serialize_object
|
|
38
|
+
from isolate.connections.grpc import definitions
|
|
39
|
+
from isolate.connections.grpc.configuration import get_default_options
|
|
40
|
+
from isolate.connections.grpc.interface import from_grpc
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass
|
|
44
|
+
class AbortException(Exception):
|
|
45
|
+
message: str
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class AgentServicer(definitions.AgentServicer):
|
|
49
|
+
def __init__(self, log_fd: int | None = None):
|
|
50
|
+
super().__init__()
|
|
51
|
+
|
|
52
|
+
self._run_cache: dict[str, Any] = {}
|
|
53
|
+
self._log = sys.stdout if log_fd is None else os.fdopen(log_fd, "w")
|
|
54
|
+
self._thread_pool = futures.ThreadPoolExecutor(max_workers=1)
|
|
55
|
+
|
|
56
|
+
def handle_termination(*args):
|
|
57
|
+
self.log("Termination signal received, shutting down...")
|
|
58
|
+
signal.raise_signal(signal.SIGTERM)
|
|
59
|
+
|
|
60
|
+
signal.signal(signal.SIGINT, handle_termination)
|
|
61
|
+
|
|
62
|
+
async def Run(
|
|
63
|
+
self,
|
|
64
|
+
request: definitions.FunctionCall,
|
|
65
|
+
context: aio.ServicerContext,
|
|
66
|
+
) -> AsyncIterator[PartialRunResult]:
|
|
67
|
+
self.log(f"A connection has been established: {context.peer()}!")
|
|
68
|
+
server_version = os.getenv("ISOLATE_SERVER_VERSION") or "unknown"
|
|
69
|
+
self.log(f"Isolate info: server {server_version}, agent {agent_version}")
|
|
70
|
+
|
|
71
|
+
extra_args = []
|
|
72
|
+
if request.HasField("setup_func"):
|
|
73
|
+
cache_key = sha256_digest_of(
|
|
74
|
+
request.setup_func.definition,
|
|
75
|
+
request.setup_func.method,
|
|
76
|
+
)
|
|
77
|
+
if cache_key not in self._run_cache:
|
|
78
|
+
try:
|
|
79
|
+
(
|
|
80
|
+
result,
|
|
81
|
+
was_it_raised,
|
|
82
|
+
stringized_tb,
|
|
83
|
+
) = await self.execute_function(
|
|
84
|
+
request.setup_func,
|
|
85
|
+
"setup",
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
if was_it_raised:
|
|
89
|
+
self.log(
|
|
90
|
+
"The setup function has thrown an error. Aborting the run."
|
|
91
|
+
)
|
|
92
|
+
yield self.send_object(
|
|
93
|
+
request.setup_func.method,
|
|
94
|
+
result,
|
|
95
|
+
was_it_raised,
|
|
96
|
+
stringized_tb,
|
|
97
|
+
)
|
|
98
|
+
raise AbortException("The setup function has thrown an error.")
|
|
99
|
+
except AbortException as exc:
|
|
100
|
+
self.abort_with_msg(context, exc.message)
|
|
101
|
+
return
|
|
102
|
+
else:
|
|
103
|
+
assert not was_it_raised
|
|
104
|
+
self._run_cache[cache_key] = result
|
|
105
|
+
|
|
106
|
+
extra_args.append(self._run_cache[cache_key])
|
|
107
|
+
|
|
108
|
+
try:
|
|
109
|
+
result, was_it_raised, stringized_tb = await self.execute_function(
|
|
110
|
+
request.function,
|
|
111
|
+
"function",
|
|
112
|
+
extra_args=extra_args,
|
|
113
|
+
)
|
|
114
|
+
yield self.send_object(
|
|
115
|
+
request.function.method,
|
|
116
|
+
result,
|
|
117
|
+
was_it_raised,
|
|
118
|
+
stringized_tb,
|
|
119
|
+
)
|
|
120
|
+
except AbortException as exc:
|
|
121
|
+
self.abort_with_msg(context, exc.message)
|
|
122
|
+
return
|
|
123
|
+
|
|
124
|
+
async def execute_function(
|
|
125
|
+
self,
|
|
126
|
+
function: definitions.SerializedObject,
|
|
127
|
+
function_kind: str,
|
|
128
|
+
*,
|
|
129
|
+
extra_args: Iterable[Any] = (),
|
|
130
|
+
) -> tuple[Any, bool, str | None]:
|
|
131
|
+
if function.was_it_raised:
|
|
132
|
+
raise AbortException(
|
|
133
|
+
f"The {function_kind} function must be callable, "
|
|
134
|
+
"not a raised exception."
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
try:
|
|
138
|
+
# TODO: technically any sort of exception could be raised here, since
|
|
139
|
+
# depickling is basically involves code execution from the *user*.
|
|
140
|
+
function = from_grpc(function)
|
|
141
|
+
except SerializationError as exc:
|
|
142
|
+
str_tb = traceback.format_exc()
|
|
143
|
+
self.log(str_tb)
|
|
144
|
+
self.log(f"The {function_kind} function could not be deserialized.")
|
|
145
|
+
return exc, True, str_tb
|
|
146
|
+
|
|
147
|
+
if not callable(function):
|
|
148
|
+
raise AbortException(
|
|
149
|
+
f"The {function_kind} function must be callable, "
|
|
150
|
+
f"not {type(function).__name__}."
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
self.log(f"Starting the execution of the {function_kind} function.")
|
|
154
|
+
|
|
155
|
+
was_it_raised = False
|
|
156
|
+
stringized_tb = None
|
|
157
|
+
try:
|
|
158
|
+
# Newer fal SDK will mark async entrypoints with `_run_on_main_thread` so
|
|
159
|
+
# we execute on the main loop and can await the coroutine they return.
|
|
160
|
+
# Older fal SDK still call `asyncio.run(...)`.
|
|
161
|
+
# To avoid error "asyncio.run() cannot be called from a running event loop"
|
|
162
|
+
# and be backward compatible,
|
|
163
|
+
# we offload those unflagged functions to a thread pool.
|
|
164
|
+
|
|
165
|
+
if getattr(function, "_run_on_main_thread", False):
|
|
166
|
+
result = function(*extra_args)
|
|
167
|
+
else:
|
|
168
|
+
result = self._thread_pool.submit(function, *extra_args).result()
|
|
169
|
+
|
|
170
|
+
if asyncio.iscoroutine(result):
|
|
171
|
+
result = await result
|
|
172
|
+
|
|
173
|
+
except BaseException as exc:
|
|
174
|
+
result = exc
|
|
175
|
+
was_it_raised = True
|
|
176
|
+
num_frames = len(traceback.extract_stack()[:-5])
|
|
177
|
+
stringized_tb = "".join(traceback.format_exc(limit=-num_frames))
|
|
178
|
+
|
|
179
|
+
if not was_it_raised:
|
|
180
|
+
self.log(f"Completed the execution of the {function_kind} function.")
|
|
181
|
+
else:
|
|
182
|
+
self.log(
|
|
183
|
+
f"Completed the execution of the {function_kind} function"
|
|
184
|
+
f" with an error: {result}\nTraceback:\n{stringized_tb}"
|
|
185
|
+
)
|
|
186
|
+
return result, was_it_raised, stringized_tb
|
|
187
|
+
|
|
188
|
+
def send_object(
|
|
189
|
+
self,
|
|
190
|
+
serialization_method: str,
|
|
191
|
+
result: object,
|
|
192
|
+
was_it_raised: bool,
|
|
193
|
+
stringized_tb: str | None,
|
|
194
|
+
) -> definitions.PartialRunResult:
|
|
195
|
+
try:
|
|
196
|
+
definition = serialize_object(serialization_method, result)
|
|
197
|
+
except SerializationError:
|
|
198
|
+
if stringized_tb:
|
|
199
|
+
print(stringized_tb, file=sys.stderr)
|
|
200
|
+
self.log(traceback.format_exc())
|
|
201
|
+
raise AbortException(
|
|
202
|
+
"Error while serializing the execution result "
|
|
203
|
+
f"(object of type {type(result)})."
|
|
204
|
+
)
|
|
205
|
+
except BaseException:
|
|
206
|
+
self.log(traceback.format_exc())
|
|
207
|
+
raise AbortException(
|
|
208
|
+
"An unexpected error occurred while serializing the result."
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
self.log("Sending the result.")
|
|
212
|
+
serialized_obj = definitions.SerializedObject(
|
|
213
|
+
method=serialization_method,
|
|
214
|
+
definition=definition,
|
|
215
|
+
was_it_raised=was_it_raised,
|
|
216
|
+
stringized_traceback=stringized_tb,
|
|
217
|
+
)
|
|
218
|
+
return definitions.PartialRunResult(
|
|
219
|
+
result=serialized_obj,
|
|
220
|
+
is_complete=True,
|
|
221
|
+
logs=[],
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
def log(self, message: str) -> None:
|
|
225
|
+
self._log.write(message + "\n")
|
|
226
|
+
self._log.flush()
|
|
227
|
+
|
|
228
|
+
def abort_with_msg(
|
|
229
|
+
self,
|
|
230
|
+
context: aio.ServicerContext,
|
|
231
|
+
message: str,
|
|
232
|
+
*,
|
|
233
|
+
code: StatusCode = StatusCode.INVALID_ARGUMENT,
|
|
234
|
+
) -> None:
|
|
235
|
+
context.set_code(code)
|
|
236
|
+
context.set_details(message)
|
|
237
|
+
return None
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def create_server(address: str) -> aio.Server:
|
|
241
|
+
"""Create a new (temporary) gRPC server listening on the given
|
|
242
|
+
address."""
|
|
243
|
+
# Use asyncio server so requests can run in the main thread and intercept signals
|
|
244
|
+
# There seems to be a weird bug with grpcio that makes subsequent requests fail with
|
|
245
|
+
# concurrent rpc limit exceeded if we set maximum_current_rpcs to 1. Setting it to 2
|
|
246
|
+
# fixes it, even though in practice, we only run one request at a time.
|
|
247
|
+
server = aio.server(
|
|
248
|
+
maximum_concurrent_rpcs=2,
|
|
249
|
+
options=get_default_options(),
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
# Local server credentials allow us to ensure that the
|
|
253
|
+
# connection is established by a local process.
|
|
254
|
+
server_credentials = local_server_credentials()
|
|
255
|
+
server.add_secure_port(address, server_credentials)
|
|
256
|
+
return server
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
async def run_agent(address: str, log_fd: int | None = None) -> int:
|
|
260
|
+
"""Run the agent servicer on the given address."""
|
|
261
|
+
server = create_server(address)
|
|
262
|
+
servicer = AgentServicer(log_fd=log_fd)
|
|
263
|
+
|
|
264
|
+
# This function just calls some methods on the server
|
|
265
|
+
# and register a generic handler for the bridge. It does
|
|
266
|
+
# not have any global side effects.
|
|
267
|
+
definitions.register_agent(servicer, server)
|
|
268
|
+
|
|
269
|
+
await server.start()
|
|
270
|
+
await server.wait_for_termination()
|
|
271
|
+
return 0
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
async def main() -> int:
|
|
275
|
+
parser = ArgumentParser()
|
|
276
|
+
parser.add_argument("address", type=str)
|
|
277
|
+
parser.add_argument("--log-fd", type=int)
|
|
278
|
+
|
|
279
|
+
options = parser.parse_args()
|
|
280
|
+
return await run_agent(options.address, log_fd=options.log_fd)
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
if __name__ == "__main__":
|
|
284
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import ast
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
_GRPC_OPTION_PREFIX = "ISOLATE_GRPC_CALL_"
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def get_default_options():
|
|
8
|
+
"""Return the default list of GRPC call options (both for
|
|
9
|
+
server and client) which are set via environment variables.
|
|
10
|
+
|
|
11
|
+
Each environment variable starting with `ISOLATE_GRPC_CALL_`
|
|
12
|
+
will be converted to a GRPC option. The name of the option
|
|
13
|
+
will be the name of the environment variable, with the
|
|
14
|
+
`ISOLATE_GRPC_CALL_` prefix removed and converted to lowercase.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
options = []
|
|
18
|
+
for raw_key, raw_value in os.environ.items():
|
|
19
|
+
if raw_key.startswith(_GRPC_OPTION_PREFIX):
|
|
20
|
+
field = raw_key[len(_GRPC_OPTION_PREFIX) :].lower()
|
|
21
|
+
value = ast.literal_eval(raw_value)
|
|
22
|
+
options.append((f"grpc.{field}", value))
|
|
23
|
+
return options
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from google.protobuf.message import Message # noqa: F401
|
|
2
|
+
|
|
3
|
+
from isolate.connections.grpc.definitions.agent_pb2 import * # noqa: F403
|
|
4
|
+
from isolate.connections.grpc.definitions.agent_pb2_grpc import ( # noqa: F401
|
|
5
|
+
AgentServicer,
|
|
6
|
+
AgentStub,
|
|
7
|
+
)
|
|
8
|
+
from isolate.connections.grpc.definitions.agent_pb2_grpc import ( # noqa: F401
|
|
9
|
+
add_AgentServicer_to_server as register_agent,
|
|
10
|
+
)
|
|
11
|
+
from isolate.connections.grpc.definitions.common_pb2 import * # noqa: F403
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
syntax = "proto3";
|
|
2
|
+
|
|
3
|
+
import "common.proto";
|
|
4
|
+
|
|
5
|
+
service Agent {
|
|
6
|
+
// Start running the given function, and stream results back.
|
|
7
|
+
rpc Run (FunctionCall) returns (stream PartialRunResult) {}
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
message FunctionCall {
|
|
11
|
+
// The function to execute and return the results to.
|
|
12
|
+
SerializedObject function = 1;
|
|
13
|
+
// Optionally the setup function which will be passed
|
|
14
|
+
// as the first argument to the given function. This
|
|
15
|
+
// has to be an idempotent step since the result for
|
|
16
|
+
// this executable will be cached.
|
|
17
|
+
optional SerializedObject setup_func = 2;
|
|
18
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
+
# source: agent.proto
|
|
4
|
+
# Protobuf Python Version: 4.25.1
|
|
5
|
+
"""Generated protocol buffer code."""
|
|
6
|
+
from google.protobuf import descriptor as _descriptor
|
|
7
|
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
|
8
|
+
from google.protobuf import symbol_database as _symbol_database
|
|
9
|
+
from google.protobuf.internal import builder as _builder
|
|
10
|
+
# @@protoc_insertion_point(imports)
|
|
11
|
+
|
|
12
|
+
_sym_db = _symbol_database.Default()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
from isolate.connections.grpc.definitions import common_pb2 as common__pb2
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x61gent.proto\x1a\x0c\x63ommon.proto\"n\n\x0c\x46unctionCall\x12#\n\x08\x66unction\x18\x01 \x01(\x0b\x32\x11.SerializedObject\x12*\n\nsetup_func\x18\x02 \x01(\x0b\x32\x11.SerializedObjectH\x00\x88\x01\x01\x42\r\n\x0b_setup_func24\n\x05\x41gent\x12+\n\x03Run\x12\r.FunctionCall\x1a\x11.PartialRunResult\"\x00\x30\x01\x62\x06proto3')
|
|
19
|
+
|
|
20
|
+
_globals = globals()
|
|
21
|
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
22
|
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'agent_pb2', _globals)
|
|
23
|
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
|
24
|
+
DESCRIPTOR._options = None
|
|
25
|
+
_globals['_FUNCTIONCALL']._serialized_start=29
|
|
26
|
+
_globals['_FUNCTIONCALL']._serialized_end=139
|
|
27
|
+
_globals['_AGENT']._serialized_start=141
|
|
28
|
+
_globals['_AGENT']._serialized_end=193
|
|
29
|
+
# @@protoc_insertion_point(module_scope)
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@generated by mypy-protobuf. Do not edit manually!
|
|
3
|
+
isort:skip_file
|
|
4
|
+
"""
|
|
5
|
+
import builtins
|
|
6
|
+
from isolate.connections.grpc.definitions import common_pb2
|
|
7
|
+
import google.protobuf.descriptor
|
|
8
|
+
import google.protobuf.message
|
|
9
|
+
import sys
|
|
10
|
+
|
|
11
|
+
if sys.version_info >= (3, 8):
|
|
12
|
+
import typing as typing_extensions
|
|
13
|
+
else:
|
|
14
|
+
import typing_extensions
|
|
15
|
+
|
|
16
|
+
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
|
17
|
+
|
|
18
|
+
@typing_extensions.final
|
|
19
|
+
class FunctionCall(google.protobuf.message.Message):
|
|
20
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
21
|
+
|
|
22
|
+
FUNCTION_FIELD_NUMBER: builtins.int
|
|
23
|
+
SETUP_FUNC_FIELD_NUMBER: builtins.int
|
|
24
|
+
@property
|
|
25
|
+
def function(self) -> common_pb2.SerializedObject:
|
|
26
|
+
"""The function to execute and return the results to."""
|
|
27
|
+
@property
|
|
28
|
+
def setup_func(self) -> common_pb2.SerializedObject:
|
|
29
|
+
"""Optionally the setup function which will be passed
|
|
30
|
+
as the first argument to the given function. This
|
|
31
|
+
has to be an idempotent step since the result for
|
|
32
|
+
this executable will be cached.
|
|
33
|
+
"""
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
*,
|
|
37
|
+
function: common_pb2.SerializedObject | None = ...,
|
|
38
|
+
setup_func: common_pb2.SerializedObject | None = ...,
|
|
39
|
+
) -> None: ...
|
|
40
|
+
def HasField(self, field_name: typing_extensions.Literal["_setup_func", b"_setup_func", "function", b"function", "setup_func", b"setup_func"]) -> builtins.bool: ...
|
|
41
|
+
def ClearField(self, field_name: typing_extensions.Literal["_setup_func", b"_setup_func", "function", b"function", "setup_func", b"setup_func"]) -> None: ...
|
|
42
|
+
def WhichOneof(self, oneof_group: typing_extensions.Literal["_setup_func", b"_setup_func"]) -> typing_extensions.Literal["setup_func"] | None: ...
|
|
43
|
+
|
|
44
|
+
global___FunctionCall = FunctionCall
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
|
2
|
+
"""Client and server classes corresponding to protobuf-defined services."""
|
|
3
|
+
import grpc
|
|
4
|
+
|
|
5
|
+
from isolate.connections.grpc.definitions import agent_pb2 as agent__pb2
|
|
6
|
+
from isolate.connections.grpc.definitions import common_pb2 as common__pb2
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class AgentStub(object):
|
|
10
|
+
"""Missing associated documentation comment in .proto file."""
|
|
11
|
+
|
|
12
|
+
def __init__(self, channel):
|
|
13
|
+
"""Constructor.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
channel: A grpc.Channel.
|
|
17
|
+
"""
|
|
18
|
+
self.Run = channel.unary_stream(
|
|
19
|
+
'/Agent/Run',
|
|
20
|
+
request_serializer=agent__pb2.FunctionCall.SerializeToString,
|
|
21
|
+
response_deserializer=common__pb2.PartialRunResult.FromString,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class AgentServicer(object):
|
|
26
|
+
"""Missing associated documentation comment in .proto file."""
|
|
27
|
+
|
|
28
|
+
def Run(self, request, context):
|
|
29
|
+
"""Start running the given function, and stream results back.
|
|
30
|
+
"""
|
|
31
|
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
32
|
+
context.set_details('Method not implemented!')
|
|
33
|
+
raise NotImplementedError('Method not implemented!')
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def add_AgentServicer_to_server(servicer, server):
|
|
37
|
+
rpc_method_handlers = {
|
|
38
|
+
'Run': grpc.unary_stream_rpc_method_handler(
|
|
39
|
+
servicer.Run,
|
|
40
|
+
request_deserializer=agent__pb2.FunctionCall.FromString,
|
|
41
|
+
response_serializer=common__pb2.PartialRunResult.SerializeToString,
|
|
42
|
+
),
|
|
43
|
+
}
|
|
44
|
+
generic_handler = grpc.method_handlers_generic_handler(
|
|
45
|
+
'Agent', rpc_method_handlers)
|
|
46
|
+
server.add_generic_rpc_handlers((generic_handler,))
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
# This class is part of an EXPERIMENTAL API.
|
|
50
|
+
class Agent(object):
|
|
51
|
+
"""Missing associated documentation comment in .proto file."""
|
|
52
|
+
|
|
53
|
+
@staticmethod
|
|
54
|
+
def Run(request,
|
|
55
|
+
target,
|
|
56
|
+
options=(),
|
|
57
|
+
channel_credentials=None,
|
|
58
|
+
call_credentials=None,
|
|
59
|
+
insecure=False,
|
|
60
|
+
compression=None,
|
|
61
|
+
wait_for_ready=None,
|
|
62
|
+
timeout=None,
|
|
63
|
+
metadata=None):
|
|
64
|
+
return grpc.experimental.unary_stream(request, target, '/Agent/Run',
|
|
65
|
+
agent__pb2.FunctionCall.SerializeToString,
|
|
66
|
+
common__pb2.PartialRunResult.FromString,
|
|
67
|
+
options, channel_credentials,
|
|
68
|
+
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
|