isolate 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. isolate/__init__.py +3 -0
  2. isolate/_isolate_version.py +34 -0
  3. isolate/_version.py +6 -0
  4. isolate/backends/__init__.py +2 -0
  5. isolate/backends/_base.py +132 -0
  6. isolate/backends/common.py +259 -0
  7. isolate/backends/conda.py +215 -0
  8. isolate/backends/container.py +64 -0
  9. isolate/backends/local.py +46 -0
  10. isolate/backends/pyenv.py +143 -0
  11. isolate/backends/remote.py +141 -0
  12. isolate/backends/settings.py +121 -0
  13. isolate/backends/virtualenv.py +204 -0
  14. isolate/common/__init__.py +0 -0
  15. isolate/common/timestamp.py +15 -0
  16. isolate/connections/__init__.py +21 -0
  17. isolate/connections/_local/__init__.py +2 -0
  18. isolate/connections/_local/_base.py +190 -0
  19. isolate/connections/_local/agent_startup.py +53 -0
  20. isolate/connections/common.py +121 -0
  21. isolate/connections/grpc/__init__.py +1 -0
  22. isolate/connections/grpc/_base.py +175 -0
  23. isolate/connections/grpc/agent.py +284 -0
  24. isolate/connections/grpc/configuration.py +23 -0
  25. isolate/connections/grpc/definitions/__init__.py +11 -0
  26. isolate/connections/grpc/definitions/agent.proto +18 -0
  27. isolate/connections/grpc/definitions/agent_pb2.py +29 -0
  28. isolate/connections/grpc/definitions/agent_pb2.pyi +44 -0
  29. isolate/connections/grpc/definitions/agent_pb2_grpc.py +68 -0
  30. isolate/connections/grpc/definitions/common.proto +49 -0
  31. isolate/connections/grpc/definitions/common_pb2.py +35 -0
  32. isolate/connections/grpc/definitions/common_pb2.pyi +152 -0
  33. isolate/connections/grpc/definitions/common_pb2_grpc.py +4 -0
  34. isolate/connections/grpc/interface.py +71 -0
  35. isolate/connections/ipc/__init__.py +5 -0
  36. isolate/connections/ipc/_base.py +225 -0
  37. isolate/connections/ipc/agent.py +205 -0
  38. isolate/logger.py +53 -0
  39. isolate/logs.py +76 -0
  40. isolate/py.typed +0 -0
  41. isolate/registry.py +53 -0
  42. isolate/server/__init__.py +1 -0
  43. isolate/server/definitions/__init__.py +13 -0
  44. isolate/server/definitions/server.proto +80 -0
  45. isolate/server/definitions/server_pb2.py +56 -0
  46. isolate/server/definitions/server_pb2.pyi +241 -0
  47. isolate/server/definitions/server_pb2_grpc.py +205 -0
  48. isolate/server/health/__init__.py +11 -0
  49. isolate/server/health/health.proto +23 -0
  50. isolate/server/health/health_pb2.py +32 -0
  51. isolate/server/health/health_pb2.pyi +66 -0
  52. isolate/server/health/health_pb2_grpc.py +99 -0
  53. isolate/server/health_server.py +40 -0
  54. isolate/server/interface.py +27 -0
  55. isolate/server/server.py +735 -0
  56. isolate-0.22.0.dist-info/METADATA +88 -0
  57. isolate-0.22.0.dist-info/RECORD +61 -0
  58. isolate-0.22.0.dist-info/WHEEL +5 -0
  59. isolate-0.22.0.dist-info/entry_points.txt +7 -0
  60. isolate-0.22.0.dist-info/licenses/LICENSE +201 -0
  61. isolate-0.22.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,64 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ from dataclasses import dataclass, field
5
+ from pathlib import Path
6
+ from typing import Any, ClassVar
7
+
8
+ from isolate.backends import BaseEnvironment
9
+ from isolate.backends.common import sha256_digest_of
10
+ from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings
11
+ from isolate.connections import PythonIPC
12
+
13
+
14
+ @dataclass
15
+ class ContainerizedPythonEnvironment(BaseEnvironment[Path]):
16
+ BACKEND_NAME: ClassVar[str] = "container"
17
+
18
+ image: dict[str, Any] = field(default_factory=dict)
19
+ python_version: str | None = None
20
+ requirements: list[str] = field(default_factory=list)
21
+ tags: list[str] = field(default_factory=list)
22
+ resolver: str | None = None
23
+
24
+ @classmethod
25
+ def from_config(
26
+ cls,
27
+ config: dict[str, Any],
28
+ settings: IsolateSettings = DEFAULT_SETTINGS,
29
+ ) -> BaseEnvironment:
30
+ environment = cls(**config)
31
+ environment.apply_settings(settings)
32
+ if environment.resolver not in ("uv", None):
33
+ raise ValueError(
34
+ "Only 'uv' is supported as a resolver for container environments."
35
+ )
36
+ return environment
37
+
38
+ @property
39
+ def key(self) -> str:
40
+ extras = []
41
+ if self.resolver is not None:
42
+ extras.append(f"resolver={self.resolver}")
43
+
44
+ # dockerfile_str is always there, but the validation is handled by the
45
+ # controller.
46
+ dockerfile_str = self.image.get("dockerfile_str", "")
47
+ return sha256_digest_of(
48
+ dockerfile_str,
49
+ *self.requirements,
50
+ *sorted(self.tags),
51
+ *extras,
52
+ )
53
+
54
+ def create(self, *, force: bool = False) -> Path:
55
+ return Path(sys.exec_prefix)
56
+
57
+ def destroy(self, connection_key: Path) -> None:
58
+ raise NotImplementedError("ContainerizedPythonEnvironment cannot be destroyed")
59
+
60
+ def exists(self) -> bool:
61
+ return True
62
+
63
+ def open_connection(self, connection_key: Path) -> PythonIPC:
64
+ return PythonIPC(self, connection_key)
@@ -0,0 +1,46 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ from dataclasses import dataclass
5
+ from pathlib import Path
6
+ from typing import Any, ClassVar
7
+
8
+ from isolate.backends import BaseEnvironment
9
+ from isolate.backends.common import sha256_digest_of
10
+ from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings
11
+ from isolate.connections import PythonIPC
12
+
13
+
14
+ @dataclass
15
+ class LocalPythonEnvironment(BaseEnvironment[Path]):
16
+ BACKEND_NAME: ClassVar[str] = "local"
17
+
18
+ @classmethod
19
+ def from_config(
20
+ cls,
21
+ config: dict[str, Any],
22
+ settings: IsolateSettings = DEFAULT_SETTINGS,
23
+ ) -> BaseEnvironment:
24
+ environment = cls(**config)
25
+ environment.apply_settings(settings)
26
+ return environment
27
+
28
+ @property
29
+ def key(self) -> str:
30
+ return sha256_digest_of(sys.exec_prefix)
31
+
32
+ def create(self, *, force: bool = False) -> Path:
33
+ if force is True:
34
+ raise NotImplementedError(
35
+ "LocalPythonEnvironment cannot be forcibly created"
36
+ )
37
+ return Path(sys.exec_prefix)
38
+
39
+ def destroy(self, connection_key: Path) -> None:
40
+ raise NotImplementedError("LocalPythonEnvironment cannot be destroyed")
41
+
42
+ def exists(self) -> bool:
43
+ return True
44
+
45
+ def open_connection(self, connection_key: Path) -> PythonIPC:
46
+ return PythonIPC(self, connection_key)
@@ -0,0 +1,143 @@
1
+ from __future__ import annotations
2
+
3
+ import functools
4
+ import os
5
+ import shutil
6
+ import subprocess
7
+ from dataclasses import dataclass
8
+ from functools import partial
9
+ from pathlib import Path
10
+ from typing import Any, ClassVar
11
+
12
+ from isolate.backends import BaseEnvironment, EnvironmentCreationError
13
+ from isolate.backends.common import logged_io
14
+ from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings
15
+ from isolate.connections import PythonIPC
16
+ from isolate.logs import LogLevel
17
+
18
+ _PYENV_EXECUTABLE_NAME = "pyenv"
19
+ _PYENV_EXECUTABLE_PATH = os.environ.get("ISOLATE_PYENV_EXECUTABLE")
20
+
21
+
22
+ @dataclass
23
+ class PyenvEnvironment(BaseEnvironment[Path]):
24
+ BACKEND_NAME: ClassVar[str] = "pyenv"
25
+
26
+ python_version: str
27
+
28
+ @classmethod
29
+ def from_config(
30
+ cls,
31
+ config: dict[str, Any],
32
+ settings: IsolateSettings = DEFAULT_SETTINGS,
33
+ ) -> BaseEnvironment:
34
+ environment = cls(**config)
35
+ environment.apply_settings(settings)
36
+ return environment
37
+
38
+ @property
39
+ def key(self) -> str:
40
+ return os.path.join("versions", self.python_version)
41
+
42
+ def create(self, *, force: bool = False) -> Path:
43
+ pyenv = _get_pyenv_executable()
44
+ env_path = self.settings.cache_dir_for(self)
45
+ with self.settings.cache_lock_for(env_path):
46
+ # PyEnv installs* the Python versions under $root/versions/$version, where
47
+ # we use versions/$version as the key and $root as the base directory
48
+ # (for pyenv).
49
+ #
50
+ # [0]: https://github.com/pyenv/pyenv#locating-pyenv-provided-python-installations
51
+ pyenv_root = env_path.parent.parent
52
+ prefix = self._try_get_prefix(pyenv, pyenv_root)
53
+ if prefix is None or force:
54
+ self._install_python(pyenv, pyenv_root)
55
+ prefix = self._try_get_prefix(pyenv, pyenv_root)
56
+ if not prefix:
57
+ raise EnvironmentCreationError(
58
+ f"Python {self.python_version} must have been installed by now."
59
+ )
60
+
61
+ assert prefix is not None
62
+ return prefix
63
+
64
+ def _try_get_prefix(self, pyenv: Path, root_path: Path) -> Path | None:
65
+ try:
66
+ prefix = subprocess.check_output(
67
+ [pyenv, "prefix", self.python_version],
68
+ env={**os.environ, "PYENV_ROOT": str(root_path)},
69
+ text=True,
70
+ stderr=subprocess.PIPE,
71
+ )
72
+ except subprocess.CalledProcessError as exc:
73
+ if "not installed" in exc.stderr:
74
+ return None
75
+ raise EnvironmentCreationError(
76
+ f"Failed to get the prefix for Python {self.python_version}.\n"
77
+ f"{exc.stdout}\n{exc.stderr}"
78
+ )
79
+
80
+ return Path(prefix.strip())
81
+
82
+ def _install_python(self, pyenv: Path, root_path: Path) -> None:
83
+ with logged_io(partial(self.log, level=LogLevel.INFO)) as (stdout, stderr, _):
84
+ try:
85
+ subprocess.check_call(
86
+ [pyenv, "install", "--skip-existing", self.python_version],
87
+ env={**os.environ, "PYENV_ROOT": str(root_path)},
88
+ stdout=stdout,
89
+ stderr=stderr,
90
+ )
91
+ except subprocess.CalledProcessError:
92
+ raise EnvironmentCreationError(
93
+ f"Failed to install Python {self.python_version} via pyenv.\n"
94
+ )
95
+
96
+ def destroy(self, connection_key: Path) -> None:
97
+ pyenv = _get_pyenv_executable()
98
+ with self.settings.cache_lock_for(connection_key):
99
+ # It might be destroyed already (when we are awaiting
100
+ # for the lock to be released).
101
+ if not connection_key.exists():
102
+ return None
103
+
104
+ pyenv_root = connection_key.parent.parent
105
+ with logged_io(self.log) as (stdout, stderr, _):
106
+ subprocess.check_call(
107
+ [pyenv, "uninstall", "-f", connection_key.name],
108
+ env={**os.environ, "PYENV_ROOT": str(pyenv_root)},
109
+ stdout=stdout,
110
+ stderr=stderr,
111
+ )
112
+
113
+ def exists(self) -> bool:
114
+ pyenv = _get_pyenv_executable()
115
+ cache_dir = self.settings.cache_dir_for(self)
116
+ with self.settings.cache_lock_for(cache_dir):
117
+ pyenv_root = cache_dir.parent.parent
118
+ prefix = self._try_get_prefix(pyenv, pyenv_root)
119
+ return prefix is not None
120
+
121
+ def open_connection(self, connection_key: Path) -> PythonIPC:
122
+ return PythonIPC(self, connection_key)
123
+
124
+
125
+ @functools.lru_cache(1)
126
+ def _get_pyenv_executable() -> Path:
127
+ if _PYENV_EXECUTABLE_PATH:
128
+ if not os.path.exists(_PYENV_EXECUTABLE_PATH):
129
+ raise EnvironmentCreationError(
130
+ "Path to pyenv executable not found! ISOLATE_PYENV_EXECUTABLE "
131
+ f"variable: {_PYENV_EXECUTABLE_PATH!r}"
132
+ )
133
+ return Path(_PYENV_EXECUTABLE_PATH)
134
+
135
+ pyenv_path = shutil.which(_PYENV_EXECUTABLE_NAME)
136
+ if pyenv_path is None:
137
+ raise FileNotFoundError(
138
+ "Could not find the pyenv executable. If pyenv is not already installed "
139
+ "in your system, please install it first. If it is not in your PATH, "
140
+ "then point ISOLATE_PYENV_COMMAND to the absolute path of the "
141
+ "pyenv executable."
142
+ )
143
+ return Path(pyenv_path)
@@ -0,0 +1,141 @@
1
+ from __future__ import annotations
2
+
3
+ import copy
4
+ import json
5
+ from dataclasses import dataclass
6
+ from typing import Any, ClassVar, List
7
+
8
+ import grpc
9
+
10
+ from isolate.backends import (
11
+ BaseEnvironment,
12
+ BasicCallable,
13
+ CallResultType,
14
+ EnvironmentConnection,
15
+ )
16
+ from isolate.backends.common import sha256_digest_of
17
+ from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings
18
+ from isolate.server import interface
19
+ from isolate.server.definitions import (
20
+ BoundFunction,
21
+ EnvironmentDefinition,
22
+ IsolateStub,
23
+ )
24
+
25
+
26
+ @dataclass
27
+ class IsolateServer(BaseEnvironment[List[EnvironmentDefinition]]):
28
+ BACKEND_NAME: ClassVar[str] = "isolate-server"
29
+
30
+ host: str
31
+ target_environments: list[dict[str, Any]]
32
+
33
+ @classmethod
34
+ def from_config(
35
+ cls,
36
+ config: dict[str, Any],
37
+ settings: IsolateSettings = DEFAULT_SETTINGS,
38
+ ) -> BaseEnvironment:
39
+ environment = cls(**config)
40
+ environment.apply_settings(settings)
41
+
42
+ return environment
43
+
44
+ @property
45
+ def key(self) -> str:
46
+ return sha256_digest_of(
47
+ self.host,
48
+ json.dumps(self.target_environments),
49
+ )
50
+
51
+ def create(self, *, force: bool = False) -> list[EnvironmentDefinition]:
52
+ if force is True:
53
+ raise NotImplementedError(
54
+ "Only individual environments can be forcibly created, please set "
55
+ "them up manually by using the 'force_create' flag on the "
56
+ "environment definition."
57
+ )
58
+
59
+ envs = []
60
+ for env in self.target_environments:
61
+ if not env.get("kind") or not env.get("configuration"):
62
+ raise RuntimeError(f"`kind` or `configuration` key missing in: {env}")
63
+ configuration = copy.deepcopy(env["configuration"])
64
+ force_create = configuration.pop("force_create", False)
65
+ envs.append(
66
+ EnvironmentDefinition(
67
+ kind=env["kind"],
68
+ configuration=interface.to_struct(env["configuration"]),
69
+ force=force_create,
70
+ )
71
+ )
72
+ return envs
73
+
74
+ def exists(self) -> bool:
75
+ return False
76
+
77
+ def open_connection(
78
+ self,
79
+ connection_key: list[EnvironmentDefinition],
80
+ ) -> IsolateServerConnection:
81
+ return IsolateServerConnection(self, self.host, connection_key)
82
+
83
+
84
+ @dataclass
85
+ class IsolateServerConnection(EnvironmentConnection):
86
+ host: str
87
+ definitions: list[EnvironmentDefinition]
88
+ _channel: grpc.Channel | None = None
89
+
90
+ def _acquire_channel(self) -> None:
91
+ self._channel = grpc.insecure_channel(self.host)
92
+
93
+ def _release_channel(self) -> None:
94
+ if self._channel:
95
+ self._channel.close()
96
+ self._channel = None
97
+
98
+ def __exit__(self, *args: Any) -> None:
99
+ self._release_channel()
100
+
101
+ def run(
102
+ self,
103
+ executable: BasicCallable,
104
+ *args: Any,
105
+ **kwargs: Any,
106
+ ) -> CallResultType: # type: ignore[type-var]
107
+ if self._channel is None:
108
+ self._acquire_channel()
109
+
110
+ stub = IsolateStub(self._channel)
111
+ request = BoundFunction(
112
+ function=interface.to_serialized_object(
113
+ executable,
114
+ method=self.environment.settings.serialization_method,
115
+ was_it_raised=False,
116
+ ),
117
+ environments=self.definitions,
118
+ stream_logs=True, # Default to streaming logs
119
+ )
120
+
121
+ return_value = []
122
+ for result in stub.Run(request):
123
+ for raw_log in result.logs:
124
+ log = interface.from_grpc(raw_log)
125
+ self.log(log.message, level=log.level, source=log.source)
126
+
127
+ if result.is_complete:
128
+ return_value.append(interface.from_grpc(result.result))
129
+
130
+ if len(return_value) == 0:
131
+ raise RuntimeError(
132
+ "No result object was received from the server"
133
+ " (it never set is_complete to True)."
134
+ )
135
+ elif len(return_value) > 1:
136
+ raise RuntimeError(
137
+ "Multiple result objects were received from the server"
138
+ " (it set is_complete to True multiple times)."
139
+ )
140
+ else:
141
+ return return_value[0]
@@ -0,0 +1,121 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import shutil
5
+ import tempfile
6
+ from contextlib import contextmanager
7
+ from dataclasses import dataclass, replace
8
+ from pathlib import Path
9
+ from typing import TYPE_CHECKING, Callable, Iterator
10
+
11
+ from platformdirs import user_cache_dir
12
+
13
+ from isolate.backends.common import lock_build_path
14
+ from isolate.logs import Log, LogLevel, LogSource
15
+
16
+ if TYPE_CHECKING:
17
+ from isolate.backends import BaseEnvironment
18
+
19
+ _SYSTEM_TEMP_DIR = Path(tempfile.gettempdir())
20
+ _STRICT_CACHE = os.getenv("ISOLATE_STRICT_CACHE", "0") == "1"
21
+
22
+
23
+ @dataclass(frozen=True)
24
+ class IsolateSettings:
25
+ cache_dir: Path = Path(user_cache_dir("isolate", "isolate"))
26
+ serialization_method: str = "pickle"
27
+ log_hook: Callable[[Log], None] = print
28
+ strict_cache: bool = _STRICT_CACHE
29
+
30
+ def log(self, log: Log) -> None:
31
+ self.log_hook(self._infer_log_level(log))
32
+
33
+ def _infer_log_level(self, log: Log) -> Log:
34
+ """Infer the log level if it's correctly set."""
35
+ if log.level not in (LogLevel.STDOUT, LogLevel.STDERR):
36
+ # We should only infer the log level for stdout/stderr logs.
37
+ return log
38
+
39
+ if log.source in (LogSource.BUILDER, LogSource.BRIDGE):
40
+ return replace(log, level=LogLevel.TRACE)
41
+
42
+ line = log.message.lower()
43
+
44
+ if line.startswith("error") or "[error]" in line:
45
+ return replace(log, level=LogLevel.ERROR)
46
+ if line.startswith("warning") or "[warning]" in line:
47
+ return replace(log, level=LogLevel.WARNING)
48
+ if line.startswith("warn") or "[warn]" in line:
49
+ return replace(log, level=LogLevel.WARNING)
50
+ if line.startswith("info") or "[info]" in line:
51
+ return replace(log, level=LogLevel.INFO)
52
+ if line.startswith("debug") or "[debug]" in line:
53
+ return replace(log, level=LogLevel.DEBUG)
54
+ if line.startswith("trace") or "[trace]" in line:
55
+ return replace(log, level=LogLevel.TRACE)
56
+
57
+ # Default all to INFO level, even STDERR
58
+ return replace(log, level=LogLevel.INFO)
59
+
60
+ def _get_temp_base(self) -> Path:
61
+ """Return the base path for creating temporary files/directories.
62
+
63
+ If the isolate cache directory is in a different device than the
64
+ system temp base (e.g. /tmp), then it will return a new directory
65
+ under the cache directory."""
66
+
67
+ cache_stat = self.cache_dir.stat()
68
+ system_stat = _SYSTEM_TEMP_DIR.stat()
69
+ if cache_stat.st_dev == system_stat.st_dev:
70
+ return _SYSTEM_TEMP_DIR
71
+
72
+ if _SYSTEM_TEMP_DIR.samefile(self.cache_dir):
73
+ path = _SYSTEM_TEMP_DIR / "isolate"
74
+ else:
75
+ # This is quite important since if we have a shared cache
76
+ # disk, then /tmp is going to be in a different disk than
77
+ # the cache directory, which would make it impossible to
78
+ # rename() atomically.
79
+ path = self.cache_dir / "tmp"
80
+
81
+ path.mkdir(exist_ok=True, parents=True)
82
+ return path
83
+
84
+ def _get_lock_dir(self) -> Path:
85
+ """Return a directory which can be used for storing file-based locks."""
86
+ lock_dir = self._get_temp_base() / "locks"
87
+ lock_dir.mkdir(exist_ok=True, parents=True)
88
+ return lock_dir
89
+
90
+ @contextmanager
91
+ def cache_lock_for(self, path: Path) -> Iterator[Path]:
92
+ """Create a lock for accessing (and operating on) the given path. This
93
+ means whenever the context manager is entered, the path can be freely
94
+ modified and accessed without any other process interfering."""
95
+
96
+ with lock_build_path(path, self._get_lock_dir()):
97
+ try:
98
+ yield path
99
+ except BaseException:
100
+ # If anything goes wrong, we have to clean up the
101
+ # directory (we can't leave it as a corrupted build).
102
+ shutil.rmtree(path, ignore_errors=True)
103
+ raise
104
+
105
+ def cache_dir_for(self, backend: BaseEnvironment) -> Path:
106
+ """Return a directory which can be used for caching the given
107
+ environment's artifacts."""
108
+ backend_name = backend.BACKEND_NAME
109
+ assert backend_name is not None
110
+
111
+ environment_base_path = self.cache_dir / backend_name
112
+ environment_base_path.mkdir(exist_ok=True, parents=True)
113
+ return environment_base_path / backend.key
114
+
115
+ def completion_marker_for(self, path: Path) -> Path:
116
+ return path / ".isolate.completed"
117
+
118
+ replace = replace
119
+
120
+
121
+ DEFAULT_SETTINGS = IsolateSettings()