isolate 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. isolate/__init__.py +3 -0
  2. isolate/_isolate_version.py +34 -0
  3. isolate/_version.py +6 -0
  4. isolate/backends/__init__.py +2 -0
  5. isolate/backends/_base.py +132 -0
  6. isolate/backends/common.py +259 -0
  7. isolate/backends/conda.py +215 -0
  8. isolate/backends/container.py +64 -0
  9. isolate/backends/local.py +46 -0
  10. isolate/backends/pyenv.py +143 -0
  11. isolate/backends/remote.py +141 -0
  12. isolate/backends/settings.py +121 -0
  13. isolate/backends/virtualenv.py +204 -0
  14. isolate/common/__init__.py +0 -0
  15. isolate/common/timestamp.py +15 -0
  16. isolate/connections/__init__.py +21 -0
  17. isolate/connections/_local/__init__.py +2 -0
  18. isolate/connections/_local/_base.py +190 -0
  19. isolate/connections/_local/agent_startup.py +53 -0
  20. isolate/connections/common.py +121 -0
  21. isolate/connections/grpc/__init__.py +1 -0
  22. isolate/connections/grpc/_base.py +175 -0
  23. isolate/connections/grpc/agent.py +284 -0
  24. isolate/connections/grpc/configuration.py +23 -0
  25. isolate/connections/grpc/definitions/__init__.py +11 -0
  26. isolate/connections/grpc/definitions/agent.proto +18 -0
  27. isolate/connections/grpc/definitions/agent_pb2.py +29 -0
  28. isolate/connections/grpc/definitions/agent_pb2.pyi +44 -0
  29. isolate/connections/grpc/definitions/agent_pb2_grpc.py +68 -0
  30. isolate/connections/grpc/definitions/common.proto +49 -0
  31. isolate/connections/grpc/definitions/common_pb2.py +35 -0
  32. isolate/connections/grpc/definitions/common_pb2.pyi +152 -0
  33. isolate/connections/grpc/definitions/common_pb2_grpc.py +4 -0
  34. isolate/connections/grpc/interface.py +71 -0
  35. isolate/connections/ipc/__init__.py +5 -0
  36. isolate/connections/ipc/_base.py +225 -0
  37. isolate/connections/ipc/agent.py +205 -0
  38. isolate/logger.py +53 -0
  39. isolate/logs.py +76 -0
  40. isolate/py.typed +0 -0
  41. isolate/registry.py +53 -0
  42. isolate/server/__init__.py +1 -0
  43. isolate/server/definitions/__init__.py +13 -0
  44. isolate/server/definitions/server.proto +80 -0
  45. isolate/server/definitions/server_pb2.py +56 -0
  46. isolate/server/definitions/server_pb2.pyi +241 -0
  47. isolate/server/definitions/server_pb2_grpc.py +205 -0
  48. isolate/server/health/__init__.py +11 -0
  49. isolate/server/health/health.proto +23 -0
  50. isolate/server/health/health_pb2.py +32 -0
  51. isolate/server/health/health_pb2.pyi +66 -0
  52. isolate/server/health/health_pb2_grpc.py +99 -0
  53. isolate/server/health_server.py +40 -0
  54. isolate/server/interface.py +27 -0
  55. isolate/server/server.py +735 -0
  56. isolate-0.22.0.dist-info/METADATA +88 -0
  57. isolate-0.22.0.dist-info/RECORD +61 -0
  58. isolate-0.22.0.dist-info/WHEEL +5 -0
  59. isolate-0.22.0.dist-info/entry_points.txt +7 -0
  60. isolate-0.22.0.dist-info/licenses/LICENSE +201 -0
  61. isolate-0.22.0.dist-info/top_level.txt +1 -0
isolate/__init__.py ADDED
@@ -0,0 +1,3 @@
1
+ from isolate.registry import prepare_environment # noqa: F401
2
+
3
+ from ._version import __version__, version_tuple # noqa: F401
@@ -0,0 +1,34 @@
1
+ # file generated by setuptools-scm
2
+ # don't change, don't track in version control
3
+
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
12
+
13
+ TYPE_CHECKING = False
14
+ if TYPE_CHECKING:
15
+ from typing import Tuple
16
+ from typing import Union
17
+
18
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
20
+ else:
21
+ VERSION_TUPLE = object
22
+ COMMIT_ID = object
23
+
24
+ version: str
25
+ __version__: str
26
+ __version_tuple__: VERSION_TUPLE
27
+ version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
30
+
31
+ __version__ = version = '0.22.0'
32
+ __version_tuple__ = version_tuple = (0, 22, 0)
33
+
34
+ __commit_id__ = commit_id = None
isolate/_version.py ADDED
@@ -0,0 +1,6 @@
1
+ try:
2
+ from ._isolate_version import version as __version__ # type: ignore[import]
3
+ from ._isolate_version import version_tuple # type: ignore[import]
4
+ except ImportError:
5
+ __version__ = "UNKNOWN"
6
+ version_tuple = (0, 0, __version__) # type: ignore[assignment]
@@ -0,0 +1,2 @@
1
+ from isolate.backends._base import * # noqa: F403
2
+ from isolate.backends.settings import IsolateSettings # noqa: F401
@@ -0,0 +1,132 @@
1
+ from __future__ import annotations
2
+
3
+ from contextlib import contextmanager
4
+ from dataclasses import dataclass
5
+ from typing import (
6
+ Any,
7
+ Callable,
8
+ ClassVar,
9
+ Generic,
10
+ Iterator,
11
+ TypeVar,
12
+ )
13
+
14
+ from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings
15
+ from isolate.logs import Log, LogLevel, LogSource
16
+
17
+ __all__ = [
18
+ "BasicCallable",
19
+ "CallResultType",
20
+ "EnvironmentConnection",
21
+ "BaseEnvironment",
22
+ "EnvironmentCreationError",
23
+ ]
24
+
25
+ ConnectionKeyType = TypeVar("ConnectionKeyType")
26
+ CallResultType = TypeVar("CallResultType")
27
+ BasicCallable = Callable[[], CallResultType]
28
+
29
+
30
+ class EnvironmentCreationError(Exception):
31
+ """Raised when the environment cannot be created."""
32
+
33
+
34
+ class BaseEnvironment(Generic[ConnectionKeyType]):
35
+ """Represents a managed environment definition for an isolatation backend
36
+ that can be used to run Python code with different set of dependencies."""
37
+
38
+ BACKEND_NAME: ClassVar[str | None] = None
39
+
40
+ settings: IsolateSettings = DEFAULT_SETTINGS
41
+
42
+ @classmethod
43
+ def from_config(
44
+ cls,
45
+ config: dict[str, Any],
46
+ settings: IsolateSettings = DEFAULT_SETTINGS,
47
+ ) -> BaseEnvironment:
48
+ """Create a new environment from the given configuration."""
49
+ raise NotImplementedError
50
+
51
+ @property
52
+ def key(self) -> str:
53
+ """A unique identifier for this environment (combination of requirements,
54
+ python version and other relevant information) that can be used for caching
55
+ and identification purposes."""
56
+ raise NotImplementedError
57
+
58
+ def create(self, *, force: bool = False) -> ConnectionKeyType:
59
+ """Setup the given environment, and return all the information needed
60
+ for establishing a connection to it. If `force` flag is set, then even
61
+ if the environment is cached; it will be tried to be re-built."""
62
+ raise NotImplementedError
63
+
64
+ def destroy(self, connection_key: ConnectionKeyType) -> None:
65
+ """Dismantle this environment. Might raise an exception if the environment
66
+ does not exist."""
67
+ raise NotImplementedError
68
+
69
+ def exists(self) -> bool:
70
+ """Return True if the environment already exists."""
71
+ raise NotImplementedError
72
+
73
+ def open_connection(
74
+ self, connection_key: ConnectionKeyType
75
+ ) -> EnvironmentConnection:
76
+ """Return a new connection to the environment with using the
77
+ `connection_key`."""
78
+ raise NotImplementedError
79
+
80
+ @contextmanager
81
+ def connect(self) -> Iterator[EnvironmentConnection]:
82
+ """Create the given environment (if it already doesn't exist) and establish a
83
+ connection to it."""
84
+ connection_key = self.create()
85
+ with self.open_connection(connection_key) as connection:
86
+ yield connection
87
+
88
+ def apply_settings(self, settings: IsolateSettings) -> None:
89
+ """Apply the new settings to this environment."""
90
+ self.settings = settings
91
+
92
+ def log(
93
+ self,
94
+ message: str,
95
+ *,
96
+ level: LogLevel = LogLevel.DEBUG,
97
+ source: LogSource = LogSource.BUILDER,
98
+ ) -> None:
99
+ """Log a message."""
100
+ log_msg = Log(message, level=level, source=source, bound_env=self)
101
+ self.settings.log(log_msg)
102
+
103
+
104
+ @dataclass
105
+ class EnvironmentConnection:
106
+ environment: BaseEnvironment
107
+
108
+ def __enter__(self) -> EnvironmentConnection:
109
+ return self
110
+
111
+ def __exit__(self, *exc_info):
112
+ return None
113
+
114
+ def run(
115
+ self,
116
+ executable: BasicCallable,
117
+ *args: Any,
118
+ **kwargs: Any,
119
+ ) -> CallResultType: # type: ignore[type-var]
120
+ """Run the given executable inside the environment, and return the result.
121
+ If the executable raises an exception, then it will be raised directly."""
122
+ raise NotImplementedError
123
+
124
+ def log(
125
+ self,
126
+ message: str,
127
+ *,
128
+ level: LogLevel = LogLevel.TRACE,
129
+ source: LogSource = LogSource.BRIDGE,
130
+ ) -> None:
131
+ """Log a message through the bound environment."""
132
+ self.environment.log(message, level=level, source=source)
@@ -0,0 +1,259 @@
1
+ from __future__ import annotations
2
+
3
+ import errno
4
+ import hashlib
5
+ import os
6
+ import select
7
+ import shutil
8
+ import sysconfig
9
+ import threading
10
+ import time
11
+ from contextlib import contextmanager, suppress
12
+ from functools import lru_cache
13
+ from pathlib import Path
14
+ from types import ModuleType
15
+ from typing import Callable, Iterator
16
+
17
+ # For ensuring that the lock is created and not forgotten
18
+ # (e.g. the process which acquires it crashes, so it is never
19
+ # released), we are going to check the lock file's mtime every
20
+ # _REVOKE_LOCK_DELAY seconds. If the mtime is older than that
21
+ # value, we are going to assume the lock is stale and revoke it.
22
+ _REVOKE_LOCK_DELAY = 30
23
+
24
+
25
+ @contextmanager
26
+ def lock_build_path(path: Path, lock_dir: Path) -> Iterator[None]:
27
+ """Try to acquire a lock for all operations on the given 'path'. This guarantees
28
+ that the path will not be modified by any other process while the lock is held."""
29
+ lock_file = (lock_dir / path.name).with_suffix(".lock")
30
+ while not _try_acquire(lock_file):
31
+ time.sleep(0.05)
32
+ continue
33
+
34
+ with _keep_lock_alive(lock_file):
35
+ yield
36
+
37
+
38
+ @contextmanager
39
+ def _keep_lock_alive(lock_file: Path) -> Iterator[None]:
40
+ """Keep the lock file alive by updating its mtime as long
41
+ as we are doing something in the cache."""
42
+ event = threading.Event()
43
+
44
+ def _keep_alive(per_beat_delay: float = 1) -> None:
45
+ while not event.wait(per_beat_delay):
46
+ lock_file.touch()
47
+ lock_file.unlink()
48
+
49
+ thread = threading.Thread(target=_keep_alive)
50
+ try:
51
+ thread.start()
52
+ yield
53
+ finally:
54
+ event.set()
55
+ thread.join()
56
+
57
+
58
+ def _try_acquire(lock_file: Path) -> bool:
59
+ with suppress(FileNotFoundError):
60
+ mtime = lock_file.stat().st_mtime
61
+ if time.time() - mtime > _REVOKE_LOCK_DELAY:
62
+ # The lock file exists, but it may be stale. Check the
63
+ # mtime and if it is too old, revoke it.
64
+ lock_file.unlink()
65
+
66
+ try:
67
+ lock_file.touch(exist_ok=False)
68
+ except FileExistsError:
69
+ return False
70
+ else:
71
+ return True
72
+
73
+
74
+ def get_executable_path(search_path: Path, executable_name: str) -> Path:
75
+ """Return the path for the executable named 'executable_name' under
76
+ the '/bin' directory of 'search_path'."""
77
+
78
+ bin_dir = (search_path / "bin").as_posix()
79
+ executable_path = shutil.which(executable_name, path=bin_dir)
80
+ if executable_path is None:
81
+ raise FileNotFoundError(
82
+ f"Could not find '{executable_name}' in '{search_path}'. "
83
+ f"Is the virtual environment corrupted?"
84
+ )
85
+
86
+ return Path(executable_path)
87
+
88
+
89
+ _CHECK_FOR_TERMINATION_DELAY = 0.05
90
+ HookT = Callable[[str], None]
91
+
92
+
93
+ def _io_observer(
94
+ hooks: dict[int, HookT],
95
+ termination_event: threading.Event,
96
+ ) -> threading.Thread:
97
+ """Starts a new thread that reads from the specified file descriptors
98
+ and calls the bound hook function for each line until the EOF is reached
99
+ or the termination event is set.
100
+
101
+ Caller is responsible for joining the thread.
102
+ """
103
+
104
+ followed_fds = list(hooks.keys())
105
+ for fd in followed_fds:
106
+ if os.get_blocking(fd):
107
+ raise NotImplementedError(
108
+ "All the hooked file descriptors must be non-blocking."
109
+ )
110
+
111
+ def forward_lines(fd: int) -> None:
112
+ hook = hooks[fd]
113
+ with open(fd, closefd=False, errors="backslashreplace") as stream:
114
+ # TODO: we probably should pass the real line endings
115
+ raw_data = stream.read()
116
+ if not raw_data:
117
+ return # Nothing to read
118
+
119
+ for line in raw_data.splitlines():
120
+ # TODO: parse the lines to include `extra={...}` added by the logger?
121
+ hook(line)
122
+
123
+ def _reader():
124
+ while not termination_event.is_set():
125
+ # The observed file descriptors may be closed by the
126
+ # underlying process at any given time. So before we
127
+ # make a select call, we need to check if the file
128
+ # descriptors are still valid and remove the ones
129
+ # that are not.
130
+ for fd in followed_fds.copy():
131
+ try:
132
+ os.fstat(fd)
133
+ except OSError as exc:
134
+ if exc.errno == errno.EBADF:
135
+ followed_fds.remove(fd)
136
+
137
+ if not followed_fds:
138
+ # All the file descriptors are closed, so we can
139
+ # stop the thread.
140
+ return
141
+
142
+ ready, _, _ = select.select(
143
+ # rlist=
144
+ followed_fds,
145
+ # wlist=
146
+ [],
147
+ # xlist=
148
+ [],
149
+ # timeout=
150
+ _CHECK_FOR_TERMINATION_DELAY,
151
+ )
152
+ for fd in ready:
153
+ forward_lines(fd)
154
+
155
+ observer_thread = threading.Thread(target=_reader)
156
+ observer_thread.start()
157
+ return observer_thread
158
+
159
+
160
+ def _unblocked_pipe() -> tuple[int, int]:
161
+ """Create a pair of unblocked pipes. This is actually
162
+ the same as os.pipe2(os.O_NONBLOCK), but that is not
163
+ available in MacOS so we have to do it manually."""
164
+
165
+ read_fd, write_fd = os.pipe()
166
+ os.set_blocking(read_fd, False)
167
+ os.set_blocking(write_fd, False)
168
+ return read_fd, write_fd
169
+
170
+
171
+ @contextmanager
172
+ def logged_io(
173
+ stdout_hook: HookT,
174
+ stderr_hook: HookT | None = None,
175
+ log_hook: HookT | None = None,
176
+ ) -> Iterator[tuple[int, int, int]]:
177
+ """Open two new streams (for stdout and stderr, respectively) and start relaying all
178
+ the output from them to the given hooks."""
179
+
180
+ stdout_reader_fd, stdout_writer_fd = _unblocked_pipe()
181
+ stderr_reader_fd, stderr_writer_fd = _unblocked_pipe()
182
+ log_reader_fd, log_writer_fd = _unblocked_pipe()
183
+
184
+ termination_event = threading.Event()
185
+ io_observer = _io_observer(
186
+ hooks={
187
+ stdout_reader_fd: stdout_hook,
188
+ stderr_reader_fd: stderr_hook or stdout_hook,
189
+ log_reader_fd: log_hook or stdout_hook,
190
+ },
191
+ termination_event=termination_event,
192
+ )
193
+ try:
194
+ yield stdout_writer_fd, stderr_writer_fd, log_writer_fd
195
+ finally:
196
+ termination_event.set()
197
+ try:
198
+ # The observer thread checks the termination event in every
199
+ # _CHECK_FOR_TERMINATION_DELAY seconds. We need to wait at least
200
+ # more than that to make sure that it has a chance to terminate
201
+ # properly.
202
+ io_observer.join(timeout=_CHECK_FOR_TERMINATION_DELAY * 3)
203
+ except TimeoutError:
204
+ raise RuntimeError("Log observers did not terminate in time.")
205
+
206
+
207
+ @lru_cache(maxsize=None)
208
+ def sha256_digest_of(*unique_fields: str | bytes) -> str:
209
+ """Return the SHA256 digest that corresponds to the combined version
210
+ of 'unique_fields. The order is preserved."""
211
+
212
+ def _normalize(text: str | bytes) -> bytes:
213
+ if isinstance(text, str):
214
+ return text.encode()
215
+ else:
216
+ return text
217
+
218
+ join_char = b"\n"
219
+ inner_text = join_char.join(map(_normalize, unique_fields))
220
+ return hashlib.sha256(inner_text).hexdigest()
221
+
222
+
223
+ def active_python() -> str:
224
+ """Return the active Python version that can be used for caching
225
+ and re-creating this environment. Currently only covers major and
226
+ minor versions (like 3.9); patch versions are ignored (like 3.9.4)."""
227
+ return sysconfig.get_python_version()
228
+
229
+
230
+ def optional_import(module_name: str) -> ModuleType:
231
+ """Try to import the given module, and fail if it is not available
232
+ with an informative error message that includes the installations
233
+ instructions."""
234
+
235
+ import importlib
236
+
237
+ try:
238
+ return importlib.import_module(module_name)
239
+ except ImportError as exc:
240
+ raise ImportError(
241
+ "isolate must be installed with the 'build' extras for "
242
+ f"accessing {module_name!r} import functionality. Please try: "
243
+ f"'$ pip install \"isolate[build]\"' to install it."
244
+ ) from exc
245
+
246
+
247
+ @lru_cache(4)
248
+ def get_executable(command: str, home: str | None = None) -> Path:
249
+ for path in [home, None]:
250
+ binary_path = shutil.which(command, path=path)
251
+ if binary_path is not None:
252
+ return Path(binary_path)
253
+ # TODO: we should probably show some instructions on how you
254
+ # can install conda here.
255
+ raise FileNotFoundError(
256
+ f"Could not find the {command} executable. "
257
+ f"If the {command} executable is not available by default, please point "
258
+ f"isolate to the path where the {command} binary is available '{home}'."
259
+ )
@@ -0,0 +1,215 @@
1
+ from __future__ import annotations
2
+
3
+ import copy
4
+ import os
5
+ import subprocess
6
+ import tempfile
7
+ from dataclasses import dataclass, field
8
+ from functools import partial
9
+ from pathlib import Path
10
+ from typing import Any, ClassVar
11
+
12
+ from isolate.backends import BaseEnvironment, EnvironmentCreationError
13
+ from isolate.backends.common import (
14
+ active_python,
15
+ get_executable,
16
+ logged_io,
17
+ optional_import,
18
+ sha256_digest_of,
19
+ )
20
+ from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings
21
+ from isolate.connections import PythonIPC
22
+ from isolate.logs import LogLevel
23
+
24
+ # Specify paths where conda and mamba binaries might reside
25
+ _CONDA_COMMAND = os.environ.get("CONDA_EXE", "conda")
26
+ _MAMBA_COMMAND = os.environ.get("MAMBA_EXE", "micromamba")
27
+ _ISOLATE_CONDA_HOME = os.getenv("ISOLATE_CONDA_HOME")
28
+ _ISOLATE_MAMBA_HOME = os.getenv("ISOLATE_MAMBA_HOME")
29
+ _ISOLATE_DEFAULT_RESOLVER = os.getenv("ISOLATE_DEFAULT_RESOLVER", "mamba")
30
+
31
+ # Conda accepts the following version specifiers: =, ==, >=, <=, >, <, !=
32
+ _POSSIBLE_CONDA_VERSION_IDENTIFIERS = (
33
+ "=",
34
+ "<",
35
+ ">",
36
+ "!",
37
+ )
38
+
39
+
40
+ @dataclass
41
+ class CondaEnvironment(BaseEnvironment[Path]):
42
+ BACKEND_NAME: ClassVar[str] = "conda"
43
+
44
+ environment_definition: dict[str, Any] = field(default_factory=dict)
45
+ python_version: str | None = None
46
+ tags: list[str] = field(default_factory=list)
47
+ _exec_home: str | None = _ISOLATE_MAMBA_HOME
48
+ _exec_command: str | None = _MAMBA_COMMAND
49
+
50
+ @classmethod
51
+ def from_config(
52
+ cls,
53
+ config: dict[str, Any],
54
+ settings: IsolateSettings = DEFAULT_SETTINGS,
55
+ ) -> BaseEnvironment:
56
+ processing_config = copy.deepcopy(config)
57
+ processing_config.setdefault("python_version", active_python())
58
+ resolver = processing_config.pop("resolver", _ISOLATE_DEFAULT_RESOLVER)
59
+ if resolver == "conda":
60
+ _exec_home = _ISOLATE_CONDA_HOME
61
+ _exec_command = _CONDA_COMMAND
62
+ elif resolver == "mamba":
63
+ _exec_home = _ISOLATE_MAMBA_HOME
64
+ _exec_command = _MAMBA_COMMAND
65
+ else:
66
+ raise Exception(f"Conda resolver of type {resolver} is not supported")
67
+ if "env_dict" in processing_config:
68
+ definition = processing_config.pop("env_dict")
69
+ elif "env_yml_str" in processing_config:
70
+ yaml = optional_import("yaml")
71
+
72
+ definition = yaml.safe_load(processing_config.pop("env_yml_str"))
73
+ elif "packages" in processing_config:
74
+ definition = {
75
+ "dependencies": processing_config.pop("packages"),
76
+ }
77
+ else:
78
+ raise ValueError(
79
+ "Either 'env_dict', 'env_yml_str' or 'packages' must be specified"
80
+ )
81
+
82
+ dependencies = definition.setdefault("dependencies", [])
83
+ if _depends_on(dependencies, "python"):
84
+ raise ValueError(
85
+ "Python version can not be specified by the environment but rather ",
86
+ " it needs to be passed as `python_version` option to the environment.",
87
+ )
88
+
89
+ dependencies.append(f"python={processing_config['python_version']}")
90
+
91
+ # Extend pip dependencies and channels if they are specified.
92
+ if "pip" in processing_config:
93
+ if not _depends_on(dependencies, "pip"):
94
+ dependencies.append("pip")
95
+
96
+ try:
97
+ dependency_group = next(
98
+ dependency
99
+ for dependency in dependencies
100
+ if isinstance(dependency, dict) and "pip" in dependency
101
+ )
102
+ except StopIteration:
103
+ dependency_group = {"pip": []}
104
+ dependencies.append(dependency_group)
105
+
106
+ dependency_group["pip"].extend(processing_config.pop("pip"))
107
+
108
+ if "channels" in processing_config:
109
+ definition.setdefault("channels", [])
110
+ definition["channels"].extend(processing_config.pop("channels"))
111
+
112
+ environment = cls(
113
+ environment_definition=definition,
114
+ _exec_home=_exec_home,
115
+ _exec_command=_exec_command,
116
+ **processing_config,
117
+ )
118
+ environment.apply_settings(settings)
119
+ return environment
120
+
121
+ @property
122
+ def key(self) -> str:
123
+ return sha256_digest_of(
124
+ repr(self.environment_definition),
125
+ self.python_version,
126
+ self._exec_command,
127
+ *sorted(self.tags),
128
+ )
129
+
130
+ def create(self, *, force: bool = False) -> Path:
131
+ env_path = self.settings.cache_dir_for(self)
132
+ with self.settings.cache_lock_for(env_path):
133
+ if env_path.exists() and not force:
134
+ return env_path
135
+
136
+ self.log(f"Creating the environment at '{env_path}'")
137
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".yml") as tf:
138
+ yaml = optional_import("yaml")
139
+ yaml.dump(self.environment_definition, tf)
140
+ tf.flush()
141
+
142
+ try:
143
+ self._run_create(str(env_path), tf.name)
144
+ except subprocess.SubprocessError as exc:
145
+ raise EnvironmentCreationError(
146
+ f"Failure during 'conda create': {exc}"
147
+ )
148
+
149
+ self.log(f"New environment cached at '{env_path}'")
150
+ return env_path
151
+
152
+ def destroy(self, connection_key: Path) -> None:
153
+ with self.settings.cache_lock_for(connection_key):
154
+ # It might be destroyed already (when we are awaiting
155
+ # for the lock to be released).
156
+ if not connection_key.exists():
157
+ return
158
+
159
+ self._run_destroy(str(connection_key))
160
+
161
+ def _run_create(self, env_path: str, env_name: str) -> None:
162
+ if self._exec_command == "conda":
163
+ self._run_conda(
164
+ "env", "create", "--yes", "--prefix", env_path, "-f", env_name
165
+ )
166
+ else:
167
+ self._run_conda("env", "create", "--prefix", env_path, "-f", env_name)
168
+
169
+ def _run_destroy(self, connection_key: str) -> None:
170
+ self._run_conda("remove", "--yes", "--all", "--prefix", connection_key)
171
+
172
+ def _run_conda(self, *args: Any) -> None:
173
+ conda_executable = get_executable(self._exec_command, self._exec_home)
174
+ with logged_io(partial(self.log, level=LogLevel.INFO)) as (stdout, stderr, _):
175
+ subprocess.check_call(
176
+ [conda_executable, *args],
177
+ stdout=stdout,
178
+ stderr=stderr,
179
+ )
180
+
181
+ def exists(self) -> bool:
182
+ path = self.settings.cache_dir_for(self)
183
+ return path.exists()
184
+
185
+ def open_connection(self, connection_key: Path) -> PythonIPC:
186
+ return PythonIPC(self, connection_key)
187
+
188
+
189
+ def _depends_on(
190
+ dependencies: list[str | dict[str, list[str]]],
191
+ package_name: str,
192
+ ) -> bool:
193
+ for dependency in dependencies:
194
+ if isinstance(dependency, dict):
195
+ # It is a dependency group like pip: [...]
196
+ continue
197
+
198
+ # Get rid of all whitespace characters (python = 3.8 becomes python=3.8)
199
+ package = dependency.replace(" ", "")
200
+ if not package.startswith(package_name):
201
+ continue
202
+
203
+ # Ensure that the package name matches perfectly and not only
204
+ # at the prefix level. Examples:
205
+ # - python # OK
206
+ # - python=3.8 # OK
207
+ # - python>=3.8 # OK
208
+ # - python-user-toolkit # NOT OK
209
+ # - pythonhelp!=1.0 # NOT OK
210
+ suffix = package[len(package_name) :]
211
+ if suffix and suffix[0] not in _POSSIBLE_CONDA_VERSION_IDENTIFIERS:
212
+ continue
213
+
214
+ return True
215
+ return False