isolate 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- isolate/__init__.py +3 -0
- isolate/_isolate_version.py +34 -0
- isolate/_version.py +6 -0
- isolate/backends/__init__.py +2 -0
- isolate/backends/_base.py +132 -0
- isolate/backends/common.py +259 -0
- isolate/backends/conda.py +215 -0
- isolate/backends/container.py +64 -0
- isolate/backends/local.py +46 -0
- isolate/backends/pyenv.py +143 -0
- isolate/backends/remote.py +141 -0
- isolate/backends/settings.py +121 -0
- isolate/backends/virtualenv.py +204 -0
- isolate/common/__init__.py +0 -0
- isolate/common/timestamp.py +15 -0
- isolate/connections/__init__.py +21 -0
- isolate/connections/_local/__init__.py +2 -0
- isolate/connections/_local/_base.py +190 -0
- isolate/connections/_local/agent_startup.py +53 -0
- isolate/connections/common.py +121 -0
- isolate/connections/grpc/__init__.py +1 -0
- isolate/connections/grpc/_base.py +175 -0
- isolate/connections/grpc/agent.py +284 -0
- isolate/connections/grpc/configuration.py +23 -0
- isolate/connections/grpc/definitions/__init__.py +11 -0
- isolate/connections/grpc/definitions/agent.proto +18 -0
- isolate/connections/grpc/definitions/agent_pb2.py +29 -0
- isolate/connections/grpc/definitions/agent_pb2.pyi +44 -0
- isolate/connections/grpc/definitions/agent_pb2_grpc.py +68 -0
- isolate/connections/grpc/definitions/common.proto +49 -0
- isolate/connections/grpc/definitions/common_pb2.py +35 -0
- isolate/connections/grpc/definitions/common_pb2.pyi +152 -0
- isolate/connections/grpc/definitions/common_pb2_grpc.py +4 -0
- isolate/connections/grpc/interface.py +71 -0
- isolate/connections/ipc/__init__.py +5 -0
- isolate/connections/ipc/_base.py +225 -0
- isolate/connections/ipc/agent.py +205 -0
- isolate/logger.py +53 -0
- isolate/logs.py +76 -0
- isolate/py.typed +0 -0
- isolate/registry.py +53 -0
- isolate/server/__init__.py +1 -0
- isolate/server/definitions/__init__.py +13 -0
- isolate/server/definitions/server.proto +80 -0
- isolate/server/definitions/server_pb2.py +56 -0
- isolate/server/definitions/server_pb2.pyi +241 -0
- isolate/server/definitions/server_pb2_grpc.py +205 -0
- isolate/server/health/__init__.py +11 -0
- isolate/server/health/health.proto +23 -0
- isolate/server/health/health_pb2.py +32 -0
- isolate/server/health/health_pb2.pyi +66 -0
- isolate/server/health/health_pb2_grpc.py +99 -0
- isolate/server/health_server.py +40 -0
- isolate/server/interface.py +27 -0
- isolate/server/server.py +735 -0
- isolate-0.22.0.dist-info/METADATA +88 -0
- isolate-0.22.0.dist-info/RECORD +61 -0
- isolate-0.22.0.dist-info/WHEEL +5 -0
- isolate-0.22.0.dist-info/entry_points.txt +7 -0
- isolate-0.22.0.dist-info/licenses/LICENSE +201 -0
- isolate-0.22.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
# This file defines an "isolate" agent for inter-process communication over
|
|
2
|
+
# sockets. It is spawned by the controller process with a single argument (a
|
|
3
|
+
# base64 encoded server address) and expected to go through the following procedures:
|
|
4
|
+
# 1. Decode the given address
|
|
5
|
+
# 2. Create a connection to the transmission bridge using the address
|
|
6
|
+
# 3. Receive a callable object from the bridge
|
|
7
|
+
# 4. Execute the callable object
|
|
8
|
+
# 5. Send the result back to the bridge
|
|
9
|
+
# 6. Exit
|
|
10
|
+
#
|
|
11
|
+
# Up until to point 4, the agent process has no way of transmitting information
|
|
12
|
+
# to the controller so it should use the stderr/stdout channels appropriately. After
|
|
13
|
+
# the executable is received, the controller process will switch to the listening mode
|
|
14
|
+
# and wait for agent to return something. The expected object is a tuple of two objects
|
|
15
|
+
# one being the actual result of the given callable, and the other one is a boolean flag
|
|
16
|
+
# indicating whether the callable has raised an exception or not.
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import base64
|
|
21
|
+
import importlib
|
|
22
|
+
import os
|
|
23
|
+
import sys
|
|
24
|
+
import time
|
|
25
|
+
import traceback
|
|
26
|
+
from argparse import ArgumentParser
|
|
27
|
+
from contextlib import closing
|
|
28
|
+
from multiprocessing.connection import Client
|
|
29
|
+
from typing import TYPE_CHECKING, Any, Callable, ContextManager
|
|
30
|
+
|
|
31
|
+
if TYPE_CHECKING:
|
|
32
|
+
# Somhow mypy can't figure out that `ConnectionWrapper`
|
|
33
|
+
# really exists.
|
|
34
|
+
class ConnectionWrapper:
|
|
35
|
+
def __init__(
|
|
36
|
+
self,
|
|
37
|
+
connection: Any,
|
|
38
|
+
loads: Callable[[bytes], Any],
|
|
39
|
+
dumps: Callable[[Any], bytes],
|
|
40
|
+
) -> None: ...
|
|
41
|
+
|
|
42
|
+
def recv(self) -> Any: ...
|
|
43
|
+
|
|
44
|
+
def send(self, value: Any) -> None: ...
|
|
45
|
+
|
|
46
|
+
def close(self) -> None: ...
|
|
47
|
+
|
|
48
|
+
else:
|
|
49
|
+
from multiprocessing.connection import ConnectionWrapper
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def decode_service_address(address: str) -> tuple[str, int]:
|
|
53
|
+
host, port = base64.b64decode(address).decode("utf-8").rsplit(":", 1)
|
|
54
|
+
return host, int(port)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def child_connection(
|
|
58
|
+
serialization_method: str, address: tuple[str, int]
|
|
59
|
+
) -> ContextManager[ConnectionWrapper]:
|
|
60
|
+
serialization_backend = importlib.import_module(serialization_method)
|
|
61
|
+
return closing(
|
|
62
|
+
ConnectionWrapper(
|
|
63
|
+
Client(address),
|
|
64
|
+
loads=serialization_backend.loads,
|
|
65
|
+
dumps=serialization_backend.dumps,
|
|
66
|
+
)
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
IS_DEBUG_MODE = os.getenv("ISOLATE_ENABLE_DEBUGGING") == "1"
|
|
71
|
+
DEBUG_TIMEOUT = 60 * 15
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def run_client(
|
|
75
|
+
serialization_method: str,
|
|
76
|
+
address: tuple[str, int],
|
|
77
|
+
*,
|
|
78
|
+
with_pdb: bool = False,
|
|
79
|
+
log_fd: int | None = None,
|
|
80
|
+
) -> None:
|
|
81
|
+
# Debug Mode
|
|
82
|
+
# ==========
|
|
83
|
+
#
|
|
84
|
+
# Isolated processes are really tricky to debug properly
|
|
85
|
+
# so we want to have a smooth way into the process and see
|
|
86
|
+
# what is really going on in the case of errors.
|
|
87
|
+
#
|
|
88
|
+
# For using the debug mode, you first need to set ISOLATE_ENABLE_DEBUGGING
|
|
89
|
+
# environment variable to "1" from your controller process. This will
|
|
90
|
+
# make the isolated process hang at the initialization, and make it print
|
|
91
|
+
# the instructions to connect to the controller process.
|
|
92
|
+
#
|
|
93
|
+
# On a separate shell (while letting the the controller process hang), you can
|
|
94
|
+
# execute the given command to drop into the PDB (Python Debugger). With that
|
|
95
|
+
# you can observe each step of the connection and run process.
|
|
96
|
+
|
|
97
|
+
if with_pdb:
|
|
98
|
+
# This condition will only be activated if we want to
|
|
99
|
+
# debug the isolated process by passing the --with-pdb
|
|
100
|
+
# flag when executing the binary.
|
|
101
|
+
import pdb
|
|
102
|
+
|
|
103
|
+
pdb.set_trace()
|
|
104
|
+
|
|
105
|
+
if log_fd is None:
|
|
106
|
+
_log = sys.stdout
|
|
107
|
+
else:
|
|
108
|
+
_log = os.fdopen(log_fd, "w")
|
|
109
|
+
|
|
110
|
+
def log(_msg):
|
|
111
|
+
_log.write(_msg)
|
|
112
|
+
_log.flush()
|
|
113
|
+
|
|
114
|
+
log(f"Trying to create a connection to {address}")
|
|
115
|
+
# TODO(feat): this should probably run in a loop instead of
|
|
116
|
+
# receiving a single function and then exitting immediately.
|
|
117
|
+
with child_connection(serialization_method, address) as connection:
|
|
118
|
+
log(f"Created child connection to {address}")
|
|
119
|
+
callable = connection.recv()
|
|
120
|
+
log(f"Received the callable at {address}")
|
|
121
|
+
|
|
122
|
+
result = None
|
|
123
|
+
did_it_raise = False
|
|
124
|
+
stringized_tb = None
|
|
125
|
+
try:
|
|
126
|
+
result = callable()
|
|
127
|
+
except BaseException as exc:
|
|
128
|
+
result = exc
|
|
129
|
+
did_it_raise = True
|
|
130
|
+
num_frames = len(traceback.extract_stack()[:-4])
|
|
131
|
+
stringized_tb = "".join(traceback.format_exc(limit=-num_frames))
|
|
132
|
+
finally:
|
|
133
|
+
try:
|
|
134
|
+
connection.send((result, did_it_raise, stringized_tb))
|
|
135
|
+
except BaseException:
|
|
136
|
+
if did_it_raise:
|
|
137
|
+
# If we can't even send it through the connection
|
|
138
|
+
# still try to dump it to the stderr as the last
|
|
139
|
+
# resort.
|
|
140
|
+
assert isinstance(result, BaseException)
|
|
141
|
+
traceback.print_exception(
|
|
142
|
+
type(result),
|
|
143
|
+
result,
|
|
144
|
+
result.__traceback__,
|
|
145
|
+
)
|
|
146
|
+
raise
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def _get_shell_bootstrap() -> str:
|
|
150
|
+
# Return a string that contains environment variables that
|
|
151
|
+
# might be used during isolated hook's execution.
|
|
152
|
+
return " ".join(
|
|
153
|
+
f"{session_variable}={os.getenv(session_variable)}"
|
|
154
|
+
for session_variable in [
|
|
155
|
+
# PYTHONPATH is customized by the Extended Environment IPC
|
|
156
|
+
# system to make sure that the isolated process can
|
|
157
|
+
# import stuff from the primary environment. Without this
|
|
158
|
+
# the isolated process will not be able to run properly
|
|
159
|
+
# on the newly created debug session.
|
|
160
|
+
"PYTHONPATH",
|
|
161
|
+
]
|
|
162
|
+
if session_variable in os.environ
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def main() -> int:
|
|
167
|
+
parser = ArgumentParser()
|
|
168
|
+
parser.add_argument("listen_at")
|
|
169
|
+
parser.add_argument("--with-pdb", action="store_true", default=False)
|
|
170
|
+
parser.add_argument("--serialization-backend", default="pickle")
|
|
171
|
+
parser.add_argument("--log-fd", type=int)
|
|
172
|
+
|
|
173
|
+
options = parser.parse_args()
|
|
174
|
+
if IS_DEBUG_MODE:
|
|
175
|
+
assert not options.with_pdb, "--with-pdb can't be used in the debug mode"
|
|
176
|
+
message = "=" * 60
|
|
177
|
+
message += "\n" * 3
|
|
178
|
+
message += (
|
|
179
|
+
"Debug mode successfully activated. "
|
|
180
|
+
"You can start your debugging session with the following command:\n"
|
|
181
|
+
)
|
|
182
|
+
message += (
|
|
183
|
+
f" $ {_get_shell_bootstrap()}\\\n "
|
|
184
|
+
f"{sys.executable} {os.path.abspath(__file__)} "
|
|
185
|
+
f"--serialization-backend {options.serialization_backend} "
|
|
186
|
+
f"--with-pdb {options.listen_at}"
|
|
187
|
+
)
|
|
188
|
+
message += "\n" * 3
|
|
189
|
+
message += "=" * 60
|
|
190
|
+
print(message)
|
|
191
|
+
time.sleep(DEBUG_TIMEOUT)
|
|
192
|
+
|
|
193
|
+
serialization_method = options.serialization_backend
|
|
194
|
+
address = decode_service_address(options.listen_at)
|
|
195
|
+
run_client(
|
|
196
|
+
serialization_method,
|
|
197
|
+
address,
|
|
198
|
+
with_pdb=options.with_pdb,
|
|
199
|
+
log_fd=options.log_fd,
|
|
200
|
+
)
|
|
201
|
+
return 0
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
if __name__ == "__main__":
|
|
205
|
+
sys.exit(main())
|
isolate/logger.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
from typing import Dict
|
|
5
|
+
|
|
6
|
+
from isolate.logs import LogLevel, LogSource
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
# NOTE: we probably should've created a proper `logging.getLogger` here,
|
|
10
|
+
# but it handling `source` would be not trivial, so we are better off
|
|
11
|
+
# just keeping it simple for now.
|
|
12
|
+
class IsolateLogger:
|
|
13
|
+
extra_labels: Dict[str, str] = {}
|
|
14
|
+
|
|
15
|
+
def __init__(self, log_labels: Dict[str, str]):
|
|
16
|
+
self.log_labels = log_labels
|
|
17
|
+
|
|
18
|
+
def log(self, level: LogLevel, message: str, source: LogSource) -> None:
|
|
19
|
+
record = {
|
|
20
|
+
# Set the timestamp from source so we can be sure no buffering or
|
|
21
|
+
# latency is affecting the timestamp.
|
|
22
|
+
"logged_at": datetime.now(tz=timezone.utc).isoformat(),
|
|
23
|
+
"isolate_source": source.name,
|
|
24
|
+
"level": level.name,
|
|
25
|
+
"message": message,
|
|
26
|
+
**self.log_labels,
|
|
27
|
+
**self.extra_labels,
|
|
28
|
+
}
|
|
29
|
+
print(json.dumps(record))
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def with_env_expanded(cls, labels: Dict[str, str]) -> "IsolateLogger":
|
|
33
|
+
for key, value in labels.items():
|
|
34
|
+
if value.startswith("$"):
|
|
35
|
+
expanded = os.getenv(value[1:])
|
|
36
|
+
else:
|
|
37
|
+
expanded = value
|
|
38
|
+
if expanded is not None:
|
|
39
|
+
labels[key] = expanded
|
|
40
|
+
|
|
41
|
+
return cls(labels)
|
|
42
|
+
|
|
43
|
+
@classmethod
|
|
44
|
+
def from_env(cls) -> "IsolateLogger":
|
|
45
|
+
_labels: Dict[str, str] = {}
|
|
46
|
+
raw = os.getenv("ISOLATE_LOG_LABELS")
|
|
47
|
+
if raw:
|
|
48
|
+
try:
|
|
49
|
+
_labels = json.loads(raw)
|
|
50
|
+
except json.JSONDecodeError:
|
|
51
|
+
print("Failed to parse ISOLATE_LOG_LABELS")
|
|
52
|
+
|
|
53
|
+
return cls.with_env_expanded(labels=_labels)
|
isolate/logs.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import tempfile
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from functools import total_ordering
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import TYPE_CHECKING
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from isolate.backends import BaseEnvironment
|
|
13
|
+
|
|
14
|
+
_SYSTEM_TEMP_DIR = Path(tempfile.gettempdir())
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class LogSource(str, Enum):
|
|
18
|
+
"""Represents where the log orinates from."""
|
|
19
|
+
|
|
20
|
+
# During the environment creation process (e.g. if the environment
|
|
21
|
+
# is already created/cached, then no logs from this source will be
|
|
22
|
+
# emitted).
|
|
23
|
+
BUILDER = "builder"
|
|
24
|
+
|
|
25
|
+
# During the environment execution process (from the server<->agent
|
|
26
|
+
# communication, mostly for debugging purposes).
|
|
27
|
+
BRIDGE = "bridge"
|
|
28
|
+
|
|
29
|
+
# From the user script itself (e.g. a print() call in the given
|
|
30
|
+
# function). The stream will be attached as level (stdout or stderr)
|
|
31
|
+
USER = "user"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@total_ordering
|
|
35
|
+
class LogLevel(Enum):
|
|
36
|
+
"""Represents the log level."""
|
|
37
|
+
|
|
38
|
+
TRACE = 0
|
|
39
|
+
DEBUG = 10
|
|
40
|
+
INFO = 20
|
|
41
|
+
WARNING = 30
|
|
42
|
+
ERROR = 40
|
|
43
|
+
|
|
44
|
+
# For user scripts
|
|
45
|
+
STDOUT = 100
|
|
46
|
+
STDERR = 110
|
|
47
|
+
|
|
48
|
+
def __lt__(self, other: LogLevel) -> bool:
|
|
49
|
+
if self.__class__ is other.__class__:
|
|
50
|
+
return self.value < other.value
|
|
51
|
+
return NotImplemented
|
|
52
|
+
|
|
53
|
+
def __str__(self) -> str:
|
|
54
|
+
return self.name.lower()
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@dataclass
|
|
58
|
+
class Log:
|
|
59
|
+
"""A structured log message with an option source and level."""
|
|
60
|
+
|
|
61
|
+
message: str
|
|
62
|
+
source: LogSource
|
|
63
|
+
level: LogLevel = LogLevel.INFO
|
|
64
|
+
bound_env: BaseEnvironment | None = field(default=None, repr=False)
|
|
65
|
+
timestamp: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
|
|
66
|
+
|
|
67
|
+
def __str__(self) -> str:
|
|
68
|
+
parts = [self.timestamp.strftime("%m/%d/%Y %H:%M:%S")]
|
|
69
|
+
if self.bound_env:
|
|
70
|
+
parts.append(f"[{self.bound_env.key[:6]}]")
|
|
71
|
+
else:
|
|
72
|
+
parts.append("[global]")
|
|
73
|
+
|
|
74
|
+
parts.append(f"[{self.source}]".ljust(10))
|
|
75
|
+
parts.append(f"[{self.level}]".ljust(10))
|
|
76
|
+
return " ".join(parts) + self.message
|
isolate/py.typed
ADDED
|
File without changes
|
isolate/registry.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
from typing import TYPE_CHECKING, Any
|
|
5
|
+
|
|
6
|
+
if sys.version_info >= (3, 10):
|
|
7
|
+
import importlib.metadata as importlib_metadata
|
|
8
|
+
else:
|
|
9
|
+
import importlib_metadata
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from isolate.backends import BaseEnvironment
|
|
13
|
+
|
|
14
|
+
# Any new environments can register themselves during package installation
|
|
15
|
+
# time by simply adding an entry point to the `isolate.environment` group.
|
|
16
|
+
_ENTRY_POINT = "isolate.backends"
|
|
17
|
+
|
|
18
|
+
_ENTRY_POINTS: dict[str, importlib_metadata.EntryPoint] = {}
|
|
19
|
+
_ENVIRONMENTS: dict[str, type[BaseEnvironment]] = {}
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _reload_registry() -> None:
|
|
23
|
+
entry_points = importlib_metadata.entry_points()
|
|
24
|
+
_ENTRY_POINTS.update(
|
|
25
|
+
{
|
|
26
|
+
# We are not immediately loading the backend class here
|
|
27
|
+
# since it might cause importing modules that we won't be
|
|
28
|
+
# using at all.
|
|
29
|
+
entry_point.name: entry_point
|
|
30
|
+
for entry_point in entry_points.select(group=_ENTRY_POINT)
|
|
31
|
+
}
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
_reload_registry()
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def prepare_environment(
|
|
39
|
+
kind: str,
|
|
40
|
+
**kwargs: Any,
|
|
41
|
+
) -> BaseEnvironment:
|
|
42
|
+
"""Get the environment for the given `kind` with the given `config`."""
|
|
43
|
+
from isolate.backends.settings import DEFAULT_SETTINGS
|
|
44
|
+
|
|
45
|
+
if kind not in _ENVIRONMENTS:
|
|
46
|
+
entry_point = _ENTRY_POINTS.get(kind)
|
|
47
|
+
if entry_point is None:
|
|
48
|
+
raise ValueError(f"Unknown environment: '{kind}'")
|
|
49
|
+
|
|
50
|
+
_ENVIRONMENTS[kind] = entry_point.load()
|
|
51
|
+
|
|
52
|
+
settings = kwargs.pop("context", DEFAULT_SETTINGS)
|
|
53
|
+
return _ENVIRONMENTS[kind].from_config(config=kwargs, settings=settings)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from isolate.server.server import BridgeManager, IsolateServicer # noqa: F401
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from google.protobuf.json_format import MessageToDict as struct_to_dict # noqa: F401
|
|
2
|
+
from google.protobuf.struct_pb2 import Struct # noqa: F401
|
|
3
|
+
|
|
4
|
+
# Inherit everything from the gRPC connection handler.
|
|
5
|
+
from isolate.connections.grpc.definitions import * # noqa: F403
|
|
6
|
+
from isolate.server.definitions.server_pb2 import * # noqa: F403
|
|
7
|
+
from isolate.server.definitions.server_pb2_grpc import ( # noqa: F401
|
|
8
|
+
IsolateServicer,
|
|
9
|
+
IsolateStub,
|
|
10
|
+
)
|
|
11
|
+
from isolate.server.definitions.server_pb2_grpc import ( # noqa: F401
|
|
12
|
+
add_IsolateServicer_to_server as register_isolate,
|
|
13
|
+
)
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
syntax = "proto3";
|
|
2
|
+
|
|
3
|
+
import "common.proto";
|
|
4
|
+
import "google/protobuf/struct.proto";
|
|
5
|
+
|
|
6
|
+
service Isolate {
|
|
7
|
+
// Run the given function on the specified environment. Streams logs
|
|
8
|
+
// and the result originating from that function.
|
|
9
|
+
rpc Run (BoundFunction) returns (stream PartialRunResult) {}
|
|
10
|
+
|
|
11
|
+
// Submit a function to be run without waiting for results.
|
|
12
|
+
rpc Submit (SubmitRequest) returns (SubmitResponse) {}
|
|
13
|
+
|
|
14
|
+
// Set the metadata for a task.
|
|
15
|
+
rpc SetMetadata (SetMetadataRequest) returns (SetMetadataResponse) {}
|
|
16
|
+
|
|
17
|
+
// List running tasks
|
|
18
|
+
rpc List (ListRequest) returns (ListResponse) {}
|
|
19
|
+
|
|
20
|
+
// Cancel a running task
|
|
21
|
+
rpc Cancel (CancelRequest) returns (CancelResponse) {}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
message BoundFunction {
|
|
25
|
+
repeated EnvironmentDefinition environments = 1;
|
|
26
|
+
SerializedObject function = 2;
|
|
27
|
+
optional SerializedObject setup_func = 3;
|
|
28
|
+
bool stream_logs = 4;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
message EnvironmentDefinition {
|
|
32
|
+
// Kind of the isolate environment.
|
|
33
|
+
string kind = 1;
|
|
34
|
+
// A free-form definition of environment properties.
|
|
35
|
+
google.protobuf.Struct configuration = 2;
|
|
36
|
+
// Whether to force-create this environment or not.
|
|
37
|
+
bool force = 3;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
message SubmitRequest {
|
|
41
|
+
// The function to run.
|
|
42
|
+
BoundFunction function = 1;
|
|
43
|
+
// Task metadata.
|
|
44
|
+
TaskMetadata metadata = 2;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
message TaskMetadata {
|
|
48
|
+
// Labels to attach to the logs.
|
|
49
|
+
map<string, string> logger_labels = 1;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
message SubmitResponse {
|
|
53
|
+
string task_id = 1;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
message SetMetadataRequest{
|
|
57
|
+
string task_id = 1;
|
|
58
|
+
TaskMetadata metadata = 2;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
message SetMetadataResponse {
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
message ListRequest {
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
message TaskInfo {
|
|
68
|
+
string task_id = 1;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
message ListResponse {
|
|
72
|
+
repeated TaskInfo tasks = 1;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
message CancelRequest {
|
|
76
|
+
string task_id = 1;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
message CancelResponse {
|
|
80
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
+
# source: server.proto
|
|
4
|
+
# Protobuf Python Version: 4.25.1
|
|
5
|
+
"""Generated protocol buffer code."""
|
|
6
|
+
from google.protobuf import descriptor as _descriptor
|
|
7
|
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
|
8
|
+
from google.protobuf import symbol_database as _symbol_database
|
|
9
|
+
from google.protobuf.internal import builder as _builder
|
|
10
|
+
# @@protoc_insertion_point(imports)
|
|
11
|
+
|
|
12
|
+
_sym_db = _symbol_database.Default()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
from isolate.connections.grpc.definitions import common_pb2 as common__pb2
|
|
16
|
+
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cserver.proto\x1a\x0c\x63ommon.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xb2\x01\n\rBoundFunction\x12,\n\x0c\x65nvironments\x18\x01 \x03(\x0b\x32\x16.EnvironmentDefinition\x12#\n\x08\x66unction\x18\x02 \x01(\x0b\x32\x11.SerializedObject\x12*\n\nsetup_func\x18\x03 \x01(\x0b\x32\x11.SerializedObjectH\x00\x88\x01\x01\x12\x13\n\x0bstream_logs\x18\x04 \x01(\x08\x42\r\n\x0b_setup_func\"d\n\x15\x45nvironmentDefinition\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12.\n\rconfiguration\x18\x02 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\r\n\x05\x66orce\x18\x03 \x01(\x08\"R\n\rSubmitRequest\x12 \n\x08\x66unction\x18\x01 \x01(\x0b\x32\x0e.BoundFunction\x12\x1f\n\x08metadata\x18\x02 \x01(\x0b\x32\r.TaskMetadata\"{\n\x0cTaskMetadata\x12\x36\n\rlogger_labels\x18\x01 \x03(\x0b\x32\x1f.TaskMetadata.LoggerLabelsEntry\x1a\x33\n\x11LoggerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"!\n\x0eSubmitResponse\x12\x0f\n\x07task_id\x18\x01 \x01(\t\"F\n\x12SetMetadataRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x1f\n\x08metadata\x18\x02 \x01(\x0b\x32\r.TaskMetadata\"\x15\n\x13SetMetadataResponse\"\r\n\x0bListRequest\"\x1b\n\x08TaskInfo\x12\x0f\n\x07task_id\x18\x01 \x01(\t\"(\n\x0cListResponse\x12\x18\n\x05tasks\x18\x01 \x03(\x0b\x32\t.TaskInfo\" \n\rCancelRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\"\x10\n\x0e\x43\x61ncelResponse2\xf4\x01\n\x07Isolate\x12,\n\x03Run\x12\x0e.BoundFunction\x1a\x11.PartialRunResult\"\x00\x30\x01\x12+\n\x06Submit\x12\x0e.SubmitRequest\x1a\x0f.SubmitResponse\"\x00\x12:\n\x0bSetMetadata\x12\x13.SetMetadataRequest\x1a\x14.SetMetadataResponse\"\x00\x12%\n\x04List\x12\x0c.ListRequest\x1a\r.ListResponse\"\x00\x12+\n\x06\x43\x61ncel\x12\x0e.CancelRequest\x1a\x0f.CancelResponse\"\x00\x62\x06proto3')
|
|
20
|
+
|
|
21
|
+
_globals = globals()
|
|
22
|
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
23
|
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'server_pb2', _globals)
|
|
24
|
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
|
25
|
+
DESCRIPTOR._options = None
|
|
26
|
+
_globals['_TASKMETADATA_LOGGERLABELSENTRY']._options = None
|
|
27
|
+
_globals['_TASKMETADATA_LOGGERLABELSENTRY']._serialized_options = b'8\001'
|
|
28
|
+
_globals['_BOUNDFUNCTION']._serialized_start=61
|
|
29
|
+
_globals['_BOUNDFUNCTION']._serialized_end=239
|
|
30
|
+
_globals['_ENVIRONMENTDEFINITION']._serialized_start=241
|
|
31
|
+
_globals['_ENVIRONMENTDEFINITION']._serialized_end=341
|
|
32
|
+
_globals['_SUBMITREQUEST']._serialized_start=343
|
|
33
|
+
_globals['_SUBMITREQUEST']._serialized_end=425
|
|
34
|
+
_globals['_TASKMETADATA']._serialized_start=427
|
|
35
|
+
_globals['_TASKMETADATA']._serialized_end=550
|
|
36
|
+
_globals['_TASKMETADATA_LOGGERLABELSENTRY']._serialized_start=499
|
|
37
|
+
_globals['_TASKMETADATA_LOGGERLABELSENTRY']._serialized_end=550
|
|
38
|
+
_globals['_SUBMITRESPONSE']._serialized_start=552
|
|
39
|
+
_globals['_SUBMITRESPONSE']._serialized_end=585
|
|
40
|
+
_globals['_SETMETADATAREQUEST']._serialized_start=587
|
|
41
|
+
_globals['_SETMETADATAREQUEST']._serialized_end=657
|
|
42
|
+
_globals['_SETMETADATARESPONSE']._serialized_start=659
|
|
43
|
+
_globals['_SETMETADATARESPONSE']._serialized_end=680
|
|
44
|
+
_globals['_LISTREQUEST']._serialized_start=682
|
|
45
|
+
_globals['_LISTREQUEST']._serialized_end=695
|
|
46
|
+
_globals['_TASKINFO']._serialized_start=697
|
|
47
|
+
_globals['_TASKINFO']._serialized_end=724
|
|
48
|
+
_globals['_LISTRESPONSE']._serialized_start=726
|
|
49
|
+
_globals['_LISTRESPONSE']._serialized_end=766
|
|
50
|
+
_globals['_CANCELREQUEST']._serialized_start=768
|
|
51
|
+
_globals['_CANCELREQUEST']._serialized_end=800
|
|
52
|
+
_globals['_CANCELRESPONSE']._serialized_start=802
|
|
53
|
+
_globals['_CANCELRESPONSE']._serialized_end=818
|
|
54
|
+
_globals['_ISOLATE']._serialized_start=821
|
|
55
|
+
_globals['_ISOLATE']._serialized_end=1065
|
|
56
|
+
# @@protoc_insertion_point(module_scope)
|