isolate 0.12.14__py3-none-any.whl → 0.12.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of isolate might be problematic. Click here for more details.

@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.12.14'
16
- __version_tuple__ = version_tuple = (0, 12, 14)
15
+ __version__ = version = '0.12.16'
16
+ __version_tuple__ = version_tuple = (0, 12, 16)
isolate/backends/_base.py CHANGED
@@ -12,7 +12,6 @@ from typing import (
12
12
  )
13
13
 
14
14
  from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings
15
- from isolate.exceptions import IsolateException
16
15
  from isolate.logs import Log, LogLevel, LogSource
17
16
 
18
17
  __all__ = [
@@ -28,7 +27,7 @@ CallResultType = TypeVar("CallResultType")
28
27
  BasicCallable = Callable[[], CallResultType]
29
28
 
30
29
 
31
- class EnvironmentCreationError(IsolateException):
30
+ class EnvironmentCreationError(Exception):
32
31
  """Raised when the environment cannot be created."""
33
32
 
34
33
 
@@ -171,23 +171,26 @@ def _unblocked_pipe() -> tuple[int, int]:
171
171
  def logged_io(
172
172
  stdout_hook: HookT,
173
173
  stderr_hook: HookT | None = None,
174
- ) -> Iterator[tuple[int, int]]:
174
+ log_hook: HookT | None = None,
175
+ ) -> Iterator[tuple[int, int, int]]:
175
176
  """Open two new streams (for stdout and stderr, respectively) and start relaying all
176
177
  the output from them to the given hooks."""
177
178
 
178
179
  stdout_reader_fd, stdout_writer_fd = _unblocked_pipe()
179
180
  stderr_reader_fd, stderr_writer_fd = _unblocked_pipe()
181
+ log_reader_fd, log_writer_fd = _unblocked_pipe()
180
182
 
181
183
  termination_event = threading.Event()
182
184
  io_observer = _io_observer(
183
185
  hooks={
184
186
  stdout_reader_fd: stdout_hook,
185
187
  stderr_reader_fd: stderr_hook or stdout_hook,
188
+ log_reader_fd: log_hook or stdout_hook,
186
189
  },
187
190
  termination_event=termination_event,
188
191
  )
189
192
  try:
190
- yield stdout_writer_fd, stderr_writer_fd
193
+ yield stdout_writer_fd, stderr_writer_fd, log_writer_fd
191
194
  finally:
192
195
  termination_event.set()
193
196
  try:
isolate/backends/conda.py CHANGED
@@ -171,7 +171,7 @@ class CondaEnvironment(BaseEnvironment[Path]):
171
171
 
172
172
  def _run_conda(self, *args: Any) -> None:
173
173
  conda_executable = get_executable(self._exec_command, self._exec_home)
174
- with logged_io(partial(self.log, level=LogLevel.INFO)) as (stdout, stderr):
174
+ with logged_io(partial(self.log, level=LogLevel.INFO)) as (stdout, stderr, _):
175
175
  subprocess.check_call(
176
176
  [conda_executable, *args],
177
177
  stdout=stdout,
@@ -0,0 +1,49 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ from dataclasses import dataclass, field
5
+ from pathlib import Path
6
+ from typing import Any, ClassVar
7
+
8
+ from isolate.backends import BaseEnvironment
9
+ from isolate.backends.common import sha256_digest_of
10
+ from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings
11
+ from isolate.connections import PythonIPC
12
+
13
+
14
+ @dataclass
15
+ class ContainerizedPythonEnvironment(BaseEnvironment[Path]):
16
+ BACKEND_NAME: ClassVar[str] = "container"
17
+
18
+ image: dict[str, Any] = field(default_factory=dict)
19
+ python_version: str | None = None
20
+ tags: list[str] = field(default_factory=list)
21
+
22
+ @classmethod
23
+ def from_config(
24
+ cls,
25
+ config: dict[str, Any],
26
+ settings: IsolateSettings = DEFAULT_SETTINGS,
27
+ ) -> BaseEnvironment:
28
+ environment = cls(**config)
29
+ environment.apply_settings(settings)
30
+ return environment
31
+
32
+ @property
33
+ def key(self) -> str:
34
+ # dockerfile_str is always there, but the validation is handled by the
35
+ # controller.
36
+ dockerfile_str = self.image.get("dockerfile_str", "")
37
+ return sha256_digest_of(dockerfile_str, *sorted(self.tags))
38
+
39
+ def create(self, *, force: bool = False) -> Path:
40
+ return Path(sys.exec_prefix)
41
+
42
+ def destroy(self, connection_key: Path) -> None:
43
+ raise NotImplementedError("ContainerizedPythonEnvironment cannot be destroyed")
44
+
45
+ def exists(self) -> bool:
46
+ return True
47
+
48
+ def open_connection(self, connection_key: Path) -> PythonIPC:
49
+ return PythonIPC(self, connection_key)
isolate/backends/pyenv.py CHANGED
@@ -80,7 +80,7 @@ class PyenvEnvironment(BaseEnvironment[Path]):
80
80
  return Path(prefix.strip())
81
81
 
82
82
  def _install_python(self, pyenv: Path, root_path: Path) -> None:
83
- with logged_io(partial(self.log, level=LogLevel.INFO)) as (stdout, stderr):
83
+ with logged_io(partial(self.log, level=LogLevel.INFO)) as (stdout, stderr, _):
84
84
  try:
85
85
  subprocess.check_call(
86
86
  [pyenv, "install", "--skip-existing", self.python_version],
@@ -102,7 +102,7 @@ class PyenvEnvironment(BaseEnvironment[Path]):
102
102
  return None
103
103
 
104
104
  pyenv_root = connection_key.parent.parent
105
- with logged_io(self.log) as (stdout, stderr):
105
+ with logged_io(self.log) as (stdout, stderr, _):
106
106
  subprocess.check_call(
107
107
  [pyenv, "uninstall", "-f", connection_key.name],
108
108
  env={**os.environ, "PYENV_ROOT": str(pyenv_root)},
@@ -110,7 +110,7 @@ class VirtualPythonEnvironment(BaseEnvironment[Path]):
110
110
  for extra_index_url in self.extra_index_urls:
111
111
  pip_cmd.extend(["--extra-index-url", extra_index_url])
112
112
 
113
- with logged_io(partial(self.log, level=LogLevel.INFO)) as (stdout, stderr):
113
+ with logged_io(partial(self.log, level=LogLevel.INFO)) as (stdout, stderr, _):
114
114
  try:
115
115
  subprocess.check_call(
116
116
  pip_cmd,
@@ -17,7 +17,7 @@ from typing import (
17
17
 
18
18
  from isolate.backends.common import get_executable_path, logged_io
19
19
  from isolate.connections.common import AGENT_SIGNATURE
20
- from isolate.logs import LogLevel
20
+ from isolate.logs import LogLevel, LogSource
21
21
 
22
22
  if TYPE_CHECKING:
23
23
  from isolate.backends import BaseEnvironment
@@ -113,14 +113,22 @@ class PythonExecutionBase(Generic[ConnectionType]):
113
113
 
114
114
  python_executable = get_executable_path(self.environment_path, "python")
115
115
  with logged_io(
116
- partial(self.handle_agent_log, level=LogLevel.STDOUT),
117
- partial(self.handle_agent_log, level=LogLevel.STDERR),
118
- ) as (stdout, stderr):
116
+ partial(
117
+ self.handle_agent_log, source=LogSource.USER, level=LogLevel.STDOUT
118
+ ),
119
+ partial(
120
+ self.handle_agent_log, source=LogSource.USER, level=LogLevel.STDERR
121
+ ),
122
+ partial(
123
+ self.handle_agent_log, source=LogSource.BRIDGE, level=LogLevel.TRACE
124
+ ),
125
+ ) as (stdout, stderr, log_fd):
119
126
  yield subprocess.Popen(
120
- self.get_python_cmd(python_executable, connection),
127
+ self.get_python_cmd(python_executable, connection, log_fd),
121
128
  env=self.get_env_vars(),
122
129
  stdout=stdout,
123
130
  stderr=stderr,
131
+ pass_fds=(log_fd,),
124
132
  text=True,
125
133
  )
126
134
 
@@ -158,11 +166,12 @@ class PythonExecutionBase(Generic[ConnectionType]):
158
166
  self,
159
167
  executable: Path,
160
168
  connection: ConnectionType,
169
+ log_fd: int,
161
170
  ) -> list[str | Path]:
162
171
  """Return the command to run the agent process with."""
163
172
  raise NotImplementedError
164
173
 
165
- def handle_agent_log(self, line: str, level: LogLevel) -> None:
174
+ def handle_agent_log(self, line: str, level: LogLevel, source: LogSource) -> None:
166
175
  """Handle a log line emitted by the agent process. The level will be either
167
176
  STDOUT or STDERR."""
168
177
  raise NotImplementedError
@@ -2,12 +2,12 @@ from __future__ import annotations
2
2
 
3
3
  import importlib
4
4
  import os
5
- from typing import TYPE_CHECKING, Any, cast
5
+ from contextlib import contextmanager
6
+ from dataclasses import dataclass
7
+ from typing import TYPE_CHECKING, Any, Iterator, cast
6
8
 
7
9
  from tblib import Traceback, TracebackParseError
8
10
 
9
- from isolate.exceptions import IsolateException
10
-
11
11
  if TYPE_CHECKING:
12
12
  from typing import Protocol
13
13
 
@@ -20,18 +20,24 @@ if TYPE_CHECKING:
20
20
  AGENT_SIGNATURE = "IS_ISOLATE_AGENT"
21
21
 
22
22
 
23
- class BaseSerializationError(IsolateException):
23
+ @dataclass
24
+ class SerializationError(Exception):
24
25
  """An error that happened during the serialization process."""
25
26
 
26
- pass
27
-
27
+ message: str
28
28
 
29
- class SerializationError(BaseSerializationError):
30
- pass
31
29
 
30
+ @contextmanager
31
+ def _step(message: str) -> Iterator[None]:
32
+ """A context manager to capture every expression
33
+ underneath it and if any of them fails for any reason
34
+ then it will raise a SerializationError with the
35
+ given message."""
32
36
 
33
- class DeserializationError(BaseSerializationError):
34
- pass
37
+ try:
38
+ yield
39
+ except BaseException as exception:
40
+ raise SerializationError("Error while " + message) from exception
35
41
 
36
42
 
37
43
  def as_serialization_method(backend: Any) -> SerializationBackend:
@@ -60,22 +66,13 @@ def load_serialized_object(
60
66
  flag is set to true, then the given object will be raised as an exception (instead
61
67
  of being returned)."""
62
68
 
63
- try:
69
+ with _step(f"preparing the serialization backend ({serialization_method})"):
64
70
  serialization_backend = as_serialization_method(
65
71
  importlib.import_module(serialization_method)
66
72
  )
67
- except BaseException as exc:
68
- raise DeserializationError(
69
- "Error while preparing the serialization backend "
70
- f"({serialization_method})"
71
- ) from exc
72
73
 
73
- try:
74
+ with _step("deserializing the given object"):
74
75
  result = serialization_backend.loads(raw_object)
75
- except BaseException as exc:
76
- raise DeserializationError(
77
- "Error while deserializing the given object"
78
- ) from exc
79
76
 
80
77
  if was_it_raised:
81
78
  raise prepare_exc(result, stringized_traceback=stringized_traceback)
@@ -87,19 +84,13 @@ def serialize_object(serialization_method: str, object: Any) -> bytes:
87
84
  """Serialize the given object using the given serialization method. If
88
85
  anything fails, then a SerializationError will be raised."""
89
86
 
90
- try:
87
+ with _step(f"preparing the serialization backend ({serialization_method})"):
91
88
  serialization_backend = as_serialization_method(
92
89
  importlib.import_module(serialization_method)
93
90
  )
94
- except BaseException as exc:
95
- raise SerializationError(
96
- f"Error while preparing the serialization backend ({serialization_method})"
97
- ) from exc
98
91
 
99
- try:
92
+ with _step("serializing the given object"):
100
93
  return serialization_backend.dumps(object)
101
- except BaseException as exc:
102
- raise SerializationError("Error while serializing the given object") from exc
103
94
 
104
95
 
105
96
  def is_agent() -> bool:
@@ -16,11 +16,10 @@ from isolate.connections.common import serialize_object
16
16
  from isolate.connections.grpc import agent, definitions
17
17
  from isolate.connections.grpc.configuration import get_default_options
18
18
  from isolate.connections.grpc.interface import from_grpc
19
- from isolate.exceptions import IsolateException
20
19
  from isolate.logs import LogLevel, LogSource
21
20
 
22
21
 
23
- class AgentError(IsolateException):
22
+ class AgentError(Exception):
24
23
  """An internal problem caused by (most probably) the agent."""
25
24
 
26
25
 
@@ -137,13 +136,17 @@ class LocalPythonGRPC(PythonExecutionBase[str], GRPCExecutionBase):
137
136
  self,
138
137
  executable: Path,
139
138
  connection: str,
139
+ log_fd: int,
140
140
  ) -> List[Union[str, Path]]:
141
141
  return [
142
142
  executable,
143
143
  agent_startup.__file__,
144
144
  agent.__file__,
145
145
  connection,
146
+ "--log-fd",
147
+ str(log_fd),
146
148
  ]
147
149
 
148
- def handle_agent_log(self, line: str, level: LogLevel) -> None:
149
- self.log(line, level=level, source=LogSource.USER)
150
+ def handle_agent_log(self, line: str, level: LogLevel, source: LogSource) -> None:
151
+ print(f"[{source}] [{level}] {line}")
152
+ self.log(line, level=level, source=source)
@@ -2,16 +2,16 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import os
6
+ import sys
5
7
  import traceback
6
8
  from argparse import ArgumentParser
7
9
  from concurrent import futures
8
- from dataclasses import dataclass, field
10
+ from dataclasses import dataclass
9
11
  from typing import (
10
12
  Any,
11
- Generator,
12
13
  Iterable,
13
14
  Iterator,
14
- cast,
15
15
  )
16
16
 
17
17
  import grpc
@@ -21,26 +21,27 @@ from isolate.backends.common import sha256_digest_of
21
21
  from isolate.connections.common import SerializationError, serialize_object
22
22
  from isolate.connections.grpc import definitions
23
23
  from isolate.connections.grpc.configuration import get_default_options
24
- from isolate.connections.grpc.interface import from_grpc, to_grpc
25
- from isolate.exceptions import IsolateException
26
- from isolate.logs import Log, LogLevel, LogSource
24
+ from isolate.connections.grpc.interface import from_grpc
27
25
 
28
26
 
29
27
  @dataclass
30
- class AbortException(IsolateException):
28
+ class AbortException(Exception):
31
29
  message: str
32
30
 
33
31
 
34
- @dataclass
35
32
  class AgentServicer(definitions.AgentServicer):
36
- _run_cache: dict[str, Any] = field(default_factory=dict)
33
+ def __init__(self, log_fd: int | None = None):
34
+ super().__init__()
35
+
36
+ self._run_cache: dict[str, Any] = {}
37
+ self._log = sys.stdout if log_fd is None else os.fdopen(log_fd, "w")
37
38
 
38
39
  def Run(
39
40
  self,
40
41
  request: definitions.FunctionCall,
41
42
  context: ServicerContext,
42
43
  ) -> Iterator[definitions.PartialRunResult]:
43
- yield from self.log(f"A connection has been established: {context.peer()}!")
44
+ self.log(f"A connection has been established: {context.peer()}!")
44
45
 
45
46
  extra_args = []
46
47
  if request.HasField("setup_func"):
@@ -54,16 +55,16 @@ class AgentServicer(definitions.AgentServicer):
54
55
  result,
55
56
  was_it_raised,
56
57
  stringized_tb,
57
- ) = yield from self.execute_function(
58
+ ) = self.execute_function(
58
59
  request.setup_func,
59
60
  "setup",
60
61
  )
61
62
 
62
63
  if was_it_raised:
63
- yield from self.log(
64
+ self.log(
64
65
  "The setup function has thrown an error. Aborting the run."
65
66
  )
66
- yield from self.send_object(
67
+ yield self.send_object(
67
68
  request.setup_func.method,
68
69
  result,
69
70
  was_it_raised,
@@ -79,12 +80,12 @@ class AgentServicer(definitions.AgentServicer):
79
80
  extra_args.append(self._run_cache[cache_key])
80
81
 
81
82
  try:
82
- result, was_it_raised, stringized_tb = yield from self.execute_function(
83
+ result, was_it_raised, stringized_tb = self.execute_function(
83
84
  request.function,
84
85
  "function",
85
86
  extra_args=extra_args,
86
87
  )
87
- yield from self.send_object(
88
+ yield self.send_object(
88
89
  request.function.method,
89
90
  result,
90
91
  was_it_raised,
@@ -99,7 +100,7 @@ class AgentServicer(definitions.AgentServicer):
99
100
  function_kind: str,
100
101
  *,
101
102
  extra_args: Iterable[Any] = (),
102
- ) -> Generator[definitions.PartialRunResult, None, Any]:
103
+ ) -> tuple[Any, bool, str | None]:
103
104
  if function.was_it_raised:
104
105
  raise AbortException(
105
106
  f"The {function_kind} function must be callable, "
@@ -107,11 +108,14 @@ class AgentServicer(definitions.AgentServicer):
107
108
  )
108
109
 
109
110
  try:
110
- # NOTE: technically any sort of exception could be raised here, since
111
+ # TODO: technically any sort of exception could be raised here, since
111
112
  # depickling is basically involves code execution from the *user*.
112
113
  function = from_grpc(function)
113
- except BaseException as exc:
114
- return exc, True, traceback.format_exc()
114
+ except SerializationError:
115
+ self.log(traceback.format_exc())
116
+ raise AbortException(
117
+ f"The {function_kind} function could not be deserialized."
118
+ )
115
119
 
116
120
  if not callable(function):
117
121
  raise AbortException(
@@ -119,7 +123,7 @@ class AgentServicer(definitions.AgentServicer):
119
123
  f"not {type(function).__name__}."
120
124
  )
121
125
 
122
- yield from self.log(f"Starting the execution of the {function_kind} function.")
126
+ self.log(f"Starting the execution of the {function_kind} function.")
123
127
 
124
128
  was_it_raised = False
125
129
  stringized_tb = None
@@ -131,7 +135,7 @@ class AgentServicer(definitions.AgentServicer):
131
135
  num_frames = len(traceback.extract_stack()[:-5])
132
136
  stringized_tb = "".join(traceback.format_exc(limit=-num_frames))
133
137
 
134
- yield from self.log(f"Completed the execution of the {function_kind} function.")
138
+ self.log(f"Completed the execution of the {function_kind} function.")
135
139
  return result, was_it_raised, stringized_tb
136
140
 
137
141
  def send_object(
@@ -140,46 +144,38 @@ class AgentServicer(definitions.AgentServicer):
140
144
  result: object,
141
145
  was_it_raised: bool,
142
146
  stringized_tb: str | None,
143
- ) -> Generator[definitions.PartialRunResult, None, Any]:
147
+ ) -> definitions.PartialRunResult:
144
148
  try:
145
149
  definition = serialize_object(serialization_method, result)
146
150
  except SerializationError:
147
151
  if stringized_tb:
148
- yield from self.log(
149
- stringized_tb, source=LogSource.USER, level=LogLevel.STDERR
150
- )
152
+ print(stringized_tb, file=sys.stderr)
151
153
  raise AbortException(
152
154
  "Error while serializing the execution result "
153
155
  f"(object of type {type(result)})."
154
156
  )
155
157
  except BaseException:
156
- yield from self.log(traceback.format_exc(), level=LogLevel.ERROR)
158
+ self.log(traceback.format_exc())
157
159
  raise AbortException(
158
160
  "An unexpected error occurred while serializing the result."
159
161
  )
160
162
 
161
- yield from self.log("Sending the result.")
163
+ self.log("Sending the result.")
162
164
  serialized_obj = definitions.SerializedObject(
163
165
  method=serialization_method,
164
166
  definition=definition,
165
167
  was_it_raised=was_it_raised,
166
168
  stringized_traceback=stringized_tb,
167
169
  )
168
- yield definitions.PartialRunResult(
170
+ return definitions.PartialRunResult(
169
171
  result=serialized_obj,
170
172
  is_complete=True,
171
173
  logs=[],
172
174
  )
173
175
 
174
- def log(
175
- self,
176
- message: str,
177
- level: LogLevel = LogLevel.TRACE,
178
- source: LogSource = LogSource.BRIDGE,
179
- ) -> Iterator[definitions.PartialRunResult]:
180
- log = to_grpc(Log(message, level=level, source=source))
181
- log = cast(definitions.Log, log)
182
- yield definitions.PartialRunResult(result=None, is_complete=False, logs=[log])
176
+ def log(self, message: str) -> None:
177
+ self._log.write(message)
178
+ self._log.flush()
183
179
 
184
180
  def abort_with_msg(
185
181
  self,
@@ -209,10 +205,10 @@ def create_server(address: str) -> grpc.Server:
209
205
  return server
210
206
 
211
207
 
212
- def run_agent(address: str) -> int:
208
+ def run_agent(address: str, log_fd: int | None = None) -> int:
213
209
  """Run the agent servicer on the given address."""
214
210
  server = create_server(address)
215
- servicer = AgentServicer()
211
+ servicer = AgentServicer(log_fd=log_fd)
216
212
 
217
213
  # This function just calls some methods on the server
218
214
  # and register a generic handler for the bridge. It does
@@ -227,9 +223,10 @@ def run_agent(address: str) -> int:
227
223
  def main() -> int:
228
224
  parser = ArgumentParser()
229
225
  parser.add_argument("address", type=str)
226
+ parser.add_argument("--log-fd", type=int)
230
227
 
231
228
  options = parser.parse_args()
232
- return run_agent(options.address)
229
+ return run_agent(options.address, log_fd=options.log_fd)
233
230
 
234
231
 
235
232
  if __name__ == "__main__":
@@ -202,6 +202,7 @@ class PythonIPC(PythonExecutionBase[AgentListener], IsolatedProcessConnection):
202
202
  self,
203
203
  executable: Path,
204
204
  connection: AgentListener,
205
+ log_fd: int,
205
206
  ) -> list[str | Path]:
206
207
  assert isinstance(connection.address, tuple)
207
208
  return [
@@ -214,21 +215,9 @@ class PythonIPC(PythonExecutionBase[AgentListener], IsolatedProcessConnection):
214
215
  # the connection with the bridge.
215
216
  "--serialization-backend",
216
217
  self.environment.settings.serialization_method,
218
+ "--log-fd",
219
+ str(log_fd),
217
220
  ]
218
221
 
219
- def handle_agent_log(self, line: str, level: LogLevel) -> None:
220
- # TODO: we probably should create a new fd and pass it as
221
- # one of the the arguments to the child process. Then everything
222
- # from that fd can be automatically logged as originating from the
223
- # bridge.
224
-
225
- # Agent can produce [trace] messages, so change the log
226
- # level to it if this does not originate from the user.
227
- if line.startswith("[trace]"):
228
- line = line.replace("[trace]", "", 1)
229
- level = LogLevel.TRACE
230
- source = LogSource.BRIDGE
231
- else:
232
- source = LogSource.USER
233
-
222
+ def handle_agent_log(self, line: str, level: LogLevel, source: LogSource) -> None:
234
223
  self.log(line, level=level, source=source)
@@ -15,6 +15,8 @@
15
15
  # one being the actual result of the given callable, and the other one is a boolean flag
16
16
  # indicating whether the callable has raised an exception or not.
17
17
 
18
+ from __future__ import annotations
19
+
18
20
  import base64
19
21
  import importlib
20
22
  import os
@@ -24,7 +26,7 @@ import traceback
24
26
  from argparse import ArgumentParser
25
27
  from contextlib import closing
26
28
  from multiprocessing.connection import Client
27
- from typing import TYPE_CHECKING, Any, Callable, ContextManager, Tuple
29
+ from typing import TYPE_CHECKING, Any, Callable, ContextManager
28
30
 
29
31
  if TYPE_CHECKING:
30
32
  # Somhow mypy can't figure out that `ConnectionWrapper`
@@ -47,13 +49,13 @@ else:
47
49
  from multiprocessing.connection import ConnectionWrapper
48
50
 
49
51
 
50
- def decode_service_address(address: str) -> Tuple[str, int]:
52
+ def decode_service_address(address: str) -> tuple[str, int]:
51
53
  host, port = base64.b64decode(address).decode("utf-8").rsplit(":", 1)
52
54
  return host, int(port)
53
55
 
54
56
 
55
57
  def child_connection(
56
- serialization_method: str, address: Tuple[str, int]
58
+ serialization_method: str, address: tuple[str, int]
57
59
  ) -> ContextManager[ConnectionWrapper]:
58
60
  serialization_backend = importlib.import_module(serialization_method)
59
61
  return closing(
@@ -70,7 +72,11 @@ DEBUG_TIMEOUT = 60 * 15
70
72
 
71
73
 
72
74
  def run_client(
73
- serialization_method: str, address: Tuple[str, int], *, with_pdb: bool = False
75
+ serialization_method: str,
76
+ address: tuple[str, int],
77
+ *,
78
+ with_pdb: bool = False,
79
+ log_fd: int | None = None,
74
80
  ) -> None:
75
81
  # Debug Mode
76
82
  # ==========
@@ -96,13 +102,22 @@ def run_client(
96
102
 
97
103
  pdb.set_trace()
98
104
 
99
- print(f"[trace] Trying to create a connection to {address}")
105
+ if log_fd is None:
106
+ _log = sys.stdout
107
+ else:
108
+ _log = os.fdopen(log_fd, "w")
109
+
110
+ def log(_msg):
111
+ _log.write(_msg)
112
+ _log.flush()
113
+
114
+ log(f"Trying to create a connection to {address}")
100
115
  # TODO(feat): this should probably run in a loop instead of
101
116
  # receiving a single function and then exitting immediately.
102
117
  with child_connection(serialization_method, address) as connection:
103
- print(f"[trace] Created child connection to {address}")
118
+ log(f"Created child connection to {address}")
104
119
  callable = connection.recv()
105
- print(f"[trace] Received the callable at {address}")
120
+ log(f"Received the callable at {address}")
106
121
 
107
122
  result = None
108
123
  did_it_raise = False
@@ -149,12 +164,11 @@ def _get_shell_bootstrap() -> str:
149
164
 
150
165
 
151
166
  def main() -> int:
152
- print(f"[trace] Starting the isolated process at PID {os.getpid()}")
153
-
154
167
  parser = ArgumentParser()
155
168
  parser.add_argument("listen_at")
156
169
  parser.add_argument("--with-pdb", action="store_true", default=False)
157
170
  parser.add_argument("--serialization-backend", default="pickle")
171
+ parser.add_argument("--log-fd", type=int)
158
172
 
159
173
  options = parser.parse_args()
160
174
  if IS_DEBUG_MODE:
@@ -178,7 +192,12 @@ def main() -> int:
178
192
 
179
193
  serialization_method = options.serialization_backend
180
194
  address = decode_service_address(options.listen_at)
181
- run_client(serialization_method, address, with_pdb=options.with_pdb)
195
+ run_client(
196
+ serialization_method,
197
+ address,
198
+ with_pdb=options.with_pdb,
199
+ log_fd=options.log_fd,
200
+ )
182
201
  return 0
183
202
 
184
203
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: isolate
3
- Version: 0.12.14
3
+ Version: 0.12.16
4
4
  Summary: Managed isolated environments for Python
5
5
  Author-email: Features & Labels <hello@fal.ai>
6
6
  Project-URL: Issues, https://github.com/fal-ai/isolate/issues
@@ -1,29 +1,29 @@
1
1
  isolate/__init__.py,sha256=uXOKnONs7sXgARNgElwr4_A1sKoA6ACHVEvs3IDiX1M,127
2
- isolate/_isolate_version.py,sha256=oRsayPIGrFfFJRhHuJNbw-xM7LmQjNrAd4E8zKg1CVc,415
2
+ isolate/_isolate_version.py,sha256=bxigpDKzhomSYVhs2qwGig9Gi0LMxVUVJgPbV18aLDk,415
3
3
  isolate/_version.py,sha256=05pXvy-yr5t3I1m9JMn42Ilzpg7fa8IB2J8a3G7t1cU,274
4
- isolate/exceptions.py,sha256=ki-f7khY-Yo1J8nPdy5IP-aZhQyLUP9pp3dE0moEVKw,44
5
4
  isolate/logs.py,sha256=R_AHUVYD18z_PhtK_mDWi9Gch79CxmwHY09hUDShtwg,2079
6
5
  isolate/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
6
  isolate/registry.py,sha256=hpzv4HI7iihG5I7i5r8Pb257ibhEKY18xQcG-w1-BgI,1590
8
7
  isolate/backends/__init__.py,sha256=LLrSM7UnDFW8tIT5oYlE1wVJrxKcaj_v7cFwvTjQTlc,119
9
- isolate/backends/_base.py,sha256=senNj-k2Dp2DkyxmZy7WxzsexmPfU6_DBo7bbZDqTLQ,4167
10
- isolate/backends/common.py,sha256=ZiU0Vkz78qaPH_3ThV28OcHi8QgXie4aBrkPXCJuNBM,8321
11
- isolate/backends/conda.py,sha256=OIQrpt_VffW2PjPOIzp-JvonW4e7rDQ1ASHOEjyzD8E,7646
8
+ isolate/backends/_base.py,sha256=Kt5pkhDzXZblq4vxaM3DQTo-Bj-7pIRZFlqJR7OfejY,4112
9
+ isolate/backends/common.py,sha256=Zx0HXnBX_jlOLpFNJzY4ue8NcW-kktqo_WZOJmPSjvI,8481
10
+ isolate/backends/conda.py,sha256=S5q5bdY787AMTck2iMGtwu-LHMH4a1qCIjNHDKTkqok,7649
11
+ isolate/backends/container.py,sha256=Ek06tMwrCzEGFUlIQ8lIWURiwchTiuiS4HHDk-9bN9k,1567
12
12
  isolate/backends/local.py,sha256=woxe4dmXuEHxWKsGNndoRA1_sP6yG-dg6tlFZni0mZc,1360
13
- isolate/backends/pyenv.py,sha256=G-OIwESUOU5TqqumwsKVUhRiFQzxB1xrPn-bGm4LQoI,5428
13
+ isolate/backends/pyenv.py,sha256=ZwTYoVPIWhS3Y4hN51x95aIOHi15GF7kEDdKTNhlMTE,5434
14
14
  isolate/backends/remote.py,sha256=qUm54mpqk0kaEfbPZl962Td3_P3qcpyVcfGdKfmkJHs,4234
15
15
  isolate/backends/settings.py,sha256=AiPYpzeon_AHS3ewSIKc0TMF4XrNdM32EFvgSTH2odE,3291
16
- isolate/backends/virtualenv.py,sha256=DiBAiybOLWMxJobIRaH4AgDn9agY5VrVPaSQObQa1Lo,6989
16
+ isolate/backends/virtualenv.py,sha256=CdzDO4stxNCMbBsNnR1agwyu2FDBDx9UwJWI7bv010k,6992
17
17
  isolate/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
18
  isolate/common/timestamp.py,sha256=seh7FrMRH4i1SCQavA8d-7z8qi0pP8lYYhd29gTPMwE,367
19
19
  isolate/connections/__init__.py,sha256=oa0PNo7ZQ0StPIDvKnJ02_CNVMyfOhxJ3M1C0VMvj9c,627
20
- isolate/connections/common.py,sha256=NwdUAV3jpzZwnzDFylPvdUtIwi6axx1G4HITJ11xMdg,3602
20
+ isolate/connections/common.py,sha256=PAfBGKZNUdtFlZQlw3_nQaUCKQXTnEkxzNNRV_i4R2A,3498
21
21
  isolate/connections/_local/__init__.py,sha256=6FtCKRSFBvTvjm5LNlNA-mieKEq3J7DZZRPcXVedERo,146
22
- isolate/connections/_local/_base.py,sha256=qCx2M8kbxuPTruj9kH5z005LU2FaC0BkLxsgY-sXRlg,6214
22
+ isolate/connections/_local/_base.py,sha256=nbeIH25wAZn1WDTGXv2HvAfFggzjJAF8GgGS4_iqNoY,6544
23
23
  isolate/connections/_local/agent_startup.py,sha256=swCs6Q0yVkDw7w-RftizHSMyJDM7DQwuP3TB0qI1ucg,1552
24
24
  isolate/connections/grpc/__init__.py,sha256=tcesLxlC36P6wSg2lBcO2egsJWMbSKwc8zFXhWac3YU,85
25
- isolate/connections/grpc/_base.py,sha256=hfGy_kFjifpK3zHD0_OTqpTF6CTOZH6abNnET4XLwE8,5613
26
- isolate/connections/grpc/agent.py,sha256=xRBfbmsWlOU4U6pBl0g_fhMdRZgVzXoKmi4gKTM3dYQ,7747
25
+ isolate/connections/grpc/_base.py,sha256=ZxmAVOqy1N-k0xqw4xwmpLEXhmCoxecbJC4jXGPbGfc,5685
26
+ isolate/connections/grpc/agent.py,sha256=LfmLs6B_lLftU-BQ_lm-p3RR-69Ls9hwLz0FpQDts3c,7451
27
27
  isolate/connections/grpc/configuration.py,sha256=50YvGGHA9uyKg74xU_gc73j7bsFk973uIpMhmw2HhxY,788
28
28
  isolate/connections/grpc/interface.py,sha256=yt63kytgXRXrTnjePGJVdXz4LJJVSSrNkJCF1yz6FIE,2270
29
29
  isolate/connections/grpc/definitions/__init__.py,sha256=Z0453Bbjoq-Oxm2Wfi9fae-BFf8YsZwmuh88strmvxo,459
@@ -36,8 +36,8 @@ isolate/connections/grpc/definitions/common_pb2.py,sha256=kU4hYQ04B2LNcjCjXb9m1u
36
36
  isolate/connections/grpc/definitions/common_pb2.pyi,sha256=J624Xc1Fp91ZFF8zdjJk1KCHNfHc2gRY8i3Aj1ofzKo,6887
37
37
  isolate/connections/grpc/definitions/common_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
38
38
  isolate/connections/ipc/__init__.py,sha256=j2Mbsph2mRhAWmkMyrtPOz0VG-e75h1OOZLwzs6pXUo,131
39
- isolate/connections/ipc/_base.py,sha256=iXCcgRU0c1Q0FDfm9nXVvjGwi-_6nCgkQZPATFaSu7Q,8494
40
- isolate/connections/ipc/agent.py,sha256=Wcoi5nA5RPPCll60sih9nTnFtM1JXM2QEUfSbjZUgjs,6828
39
+ isolate/connections/ipc/_base.py,sha256=Jk715XK2ei3yBpFcwUnFZ0owQMMf5jekZFNh2WlKRT4,8009
40
+ isolate/connections/ipc/agent.py,sha256=hGlL4x78FhRvMZ4DkVh3dk-EmWQqxHW4LIipgyOkw08,7069
41
41
  isolate/server/__init__.py,sha256=7R3GuWmxuqe0q28rVqETJN9OCrP_-Svjv9h0NR1GFL0,79
42
42
  isolate/server/health_server.py,sha256=yN7F1Q28DdX8-Zk3gef7XcQEE25XwlHwzV5GBM75aQM,1249
43
43
  isolate/server/interface.py,sha256=nGbjdxrN0p9m1LNdeds8NIoJOwPYW2NM6ktmbhfG4_s,687
@@ -52,9 +52,9 @@ isolate/server/health/health.proto,sha256=wE2_QD0OQAblKkEBG7sALLXEOj1mOLKG-FbC4t
52
52
  isolate/server/health/health_pb2.py,sha256=mCnDq0-frAddHopN_g_LueHddbW-sN5kOfntJDlAvUY,1783
53
53
  isolate/server/health/health_pb2.pyi,sha256=boMRHMlX770EuccQCFTeRgf_KA_VMgW7l9GZIwxvMok,2546
54
54
  isolate/server/health/health_pb2_grpc.py,sha256=JRluct2W4af83OYxwmcCn0vRc78zf04Num0vBApuPEo,4005
55
- isolate-0.12.14.dist-info/LICENSE,sha256=427vuyirL5scgBLqA9UWcdnxKrtSGc0u_JfUupk6lAA,11359
56
- isolate-0.12.14.dist-info/METADATA,sha256=ETQt_2bLdwTFn_TFF31pJs_dYbc4xM1YKWzCDHhNlGQ,3154
57
- isolate-0.12.14.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
58
- isolate-0.12.14.dist-info/entry_points.txt,sha256=XQ_nl-8MR94UnekxbBJRNGlY-lZ_Qh50N4mzwFDdwV8,290
59
- isolate-0.12.14.dist-info/top_level.txt,sha256=W9QJBHcq5WXRkbOXf25bvftzFsOZZN4n1DAatdroZrs,8
60
- isolate-0.12.14.dist-info/RECORD,,
55
+ isolate-0.12.16.dist-info/LICENSE,sha256=427vuyirL5scgBLqA9UWcdnxKrtSGc0u_JfUupk6lAA,11359
56
+ isolate-0.12.16.dist-info/METADATA,sha256=GYUwGs0Vlac8ds1WYrR52NRjidg9W_eEyKsGMDSAY7Q,3154
57
+ isolate-0.12.16.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
58
+ isolate-0.12.16.dist-info/entry_points.txt,sha256=s3prh2EERaVCbL8R45tfY5WFPZ1TsYOsz305YR7s-Pc,360
59
+ isolate-0.12.16.dist-info/top_level.txt,sha256=W9QJBHcq5WXRkbOXf25bvftzFsOZZN4n1DAatdroZrs,8
60
+ isolate-0.12.16.dist-info/RECORD,,
@@ -1,5 +1,6 @@
1
1
  [isolate.backends]
2
2
  conda = isolate.backends.conda:CondaEnvironment
3
+ container = isolate.backends.container:ContainerizedPythonEnvironment
3
4
  isolate-server = isolate.backends.remote:IsolateServer
4
5
  local = isolate.backends.local:LocalPythonEnvironment
5
6
  pyenv = isolate.backends.pyenv:PyenvEnvironment
isolate/exceptions.py DELETED
@@ -1,2 +0,0 @@
1
- class IsolateException(Exception):
2
- pass