rappel 0.4.1__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rappel might be problematic. Click here for more details.

rappel/bridge.py ADDED
@@ -0,0 +1,228 @@
1
+ import asyncio
2
+ import os
3
+ import shlex
4
+ import subprocess
5
+ import tempfile
6
+ from contextlib import asynccontextmanager
7
+ from dataclasses import dataclass
8
+ from pathlib import Path
9
+ from threading import Lock, RLock
10
+ from typing import AsyncIterator, Optional
11
+ from urllib.parse import urlparse
12
+
13
+ import grpc
14
+ from grpc import aio # type: ignore[attr-defined]
15
+
16
+ from proto import messages_pb2 as pb2
17
+ from proto import messages_pb2_grpc as pb2_grpc
18
+ from rappel.logger import configure as configure_logger
19
+
20
+ DEFAULT_HOST = "127.0.0.1"
21
+ LOGGER = configure_logger("rappel.bridge")
22
+
23
+ _PORT_LOCK = RLock()
24
+ _CACHED_PORT: Optional[int] = None
25
+ _GRPC_TARGET: Optional[str] = None
26
+ _GRPC_CHANNEL: Optional[aio.Channel] = None
27
+ _GRPC_STUB: Optional[pb2_grpc.WorkflowServiceStub] = None
28
+ _BOOT_MUTEX = Lock()
29
+ _ASYNC_BOOT_LOCK: asyncio.Lock = asyncio.Lock()
30
+
31
+
32
+ @dataclass
33
+ class RunInstanceResult:
34
+ workflow_version_id: str
35
+ workflow_instance_id: str
36
+
37
+
38
+ def _boot_command() -> list[str]:
39
+ override = os.environ.get("RAPPEL_BOOT_COMMAND")
40
+ if override:
41
+ LOGGER.debug("Using RAPPEL_BOOT_COMMAND=%s", override)
42
+ return shlex.split(override)
43
+ binary = os.environ.get("RAPPEL_BOOT_BINARY", "boot-rappel-singleton")
44
+ LOGGER.debug("Using RAPPEL_BOOT_BINARY=%s", binary)
45
+ return [binary]
46
+
47
+
48
+ def _remember_port(port: int) -> int:
49
+ global _CACHED_PORT
50
+ with _PORT_LOCK:
51
+ _CACHED_PORT = port
52
+ return port
53
+
54
+
55
+ def _cached_port() -> Optional[int]:
56
+ with _PORT_LOCK:
57
+ return _CACHED_PORT
58
+
59
+
60
+ def _env_port_override() -> Optional[int]:
61
+ override = os.environ.get("RAPPEL_SERVER_PORT")
62
+ if not override:
63
+ return None
64
+ try:
65
+ return int(override)
66
+ except ValueError as exc: # pragma: no cover
67
+ raise RuntimeError(f"invalid RAPPEL_SERVER_PORT value: {override}") from exc
68
+
69
+
70
+ def _boot_singleton_blocking() -> int:
71
+ command = _boot_command()
72
+ with tempfile.NamedTemporaryFile(mode="w+", suffix=".txt") as f:
73
+ output_file = Path(f.name)
74
+
75
+ command.extend(["--output-file", str(output_file)])
76
+ LOGGER.info("Booting rappel singleton via: %s", " ".join(command))
77
+
78
+ try:
79
+ subprocess.run(
80
+ command,
81
+ check=True,
82
+ timeout=10,
83
+ )
84
+ except subprocess.TimeoutExpired as exc: # pragma: no cover
85
+ LOGGER.error("boot command timed out after %s seconds", exc.timeout)
86
+ raise RuntimeError("unable to boot rappel server") from exc
87
+ except subprocess.CalledProcessError as exc: # pragma: no cover
88
+ LOGGER.error("boot command failed: %s", exc)
89
+ raise RuntimeError("unable to boot rappel server") from exc
90
+ except OSError as exc: # pragma: no cover
91
+ LOGGER.error("unable to spawn boot command: %s", exc)
92
+ raise RuntimeError("unable to boot rappel server") from exc
93
+
94
+ try:
95
+ # We use a file as a message passer because passing a PIPE to the singleton launcher
96
+ # will block our code indefinitely
97
+ # The singleton launches the webserver subprocess to inherit the stdin/stdout that the
98
+ # singleton launcher receives; which means that in the case of a PIPE it would pass that
99
+ # pipe to the subprocess and therefore never correctly close the file descriptor and signal
100
+ # exit process status to Python.
101
+ port_str = output_file.read_text().strip()
102
+ port = int(port_str)
103
+ LOGGER.info("boot command reported singleton port %s", port)
104
+ return port
105
+ except (ValueError, FileNotFoundError) as exc: # pragma: no cover
106
+ raise RuntimeError(f"unable to read port from output file: {exc}") from exc
107
+
108
+
109
+ def _resolve_port() -> int:
110
+ cached = _cached_port()
111
+ if cached is not None:
112
+ return cached
113
+ env_port = _env_port_override()
114
+ if env_port is not None:
115
+ return _remember_port(env_port)
116
+ with _BOOT_MUTEX:
117
+ cached = _cached_port()
118
+ if cached is not None:
119
+ return cached
120
+ port = _boot_singleton_blocking()
121
+ return _remember_port(port)
122
+
123
+
124
+ async def _ensure_port_async() -> int:
125
+ cached = _cached_port()
126
+ if cached is not None:
127
+ return cached
128
+ env_port = _env_port_override()
129
+ if env_port is not None:
130
+ return _remember_port(env_port)
131
+ async with _ASYNC_BOOT_LOCK:
132
+ cached = _cached_port()
133
+ if cached is not None:
134
+ return cached
135
+ loop = asyncio.get_running_loop()
136
+ LOGGER.info("No cached singleton found, booting new instance")
137
+ port = await loop.run_in_executor(None, _boot_singleton_blocking)
138
+ LOGGER.info("Singleton ready on port %s", port)
139
+ return _remember_port(port)
140
+
141
+
142
+ @asynccontextmanager
143
+ async def ensure_singleton() -> AsyncIterator[int]:
144
+ """Yield the HTTP port for the singleton server, booting it exactly once."""
145
+ port = await _ensure_port_async()
146
+ yield port
147
+
148
+
149
+ def _grpc_target() -> str:
150
+ explicit = os.environ.get("RAPPEL_GRPC_ADDR")
151
+ if explicit:
152
+ return explicit
153
+ http_url = os.environ.get("RAPPEL_SERVER_URL")
154
+ host_from_url = None
155
+ port_from_url = None
156
+ if http_url:
157
+ parsed = urlparse(http_url)
158
+ host_from_url = parsed.hostname
159
+ port_from_url = parsed.port
160
+ host = host_from_url or os.environ.get("RAPPEL_SERVER_HOST", DEFAULT_HOST)
161
+ port_override = os.environ.get("RAPPEL_GRPC_PORT")
162
+ if port_override:
163
+ try:
164
+ port = int(port_override)
165
+ except ValueError as exc: # pragma: no cover
166
+ raise RuntimeError(f"invalid RAPPEL_GRPC_PORT value: {port_override}") from exc
167
+ else:
168
+ http_port = port_from_url if port_from_url is not None else _resolve_port()
169
+ port = http_port + 1
170
+ return f"{host}:{port}"
171
+
172
+
173
+ async def _workflow_stub() -> pb2_grpc.WorkflowServiceStub:
174
+ global _GRPC_TARGET, _GRPC_CHANNEL, _GRPC_STUB
175
+ target = _grpc_target()
176
+ channel_to_wait: Optional[aio.Channel] = None
177
+ with _PORT_LOCK:
178
+ if _GRPC_STUB is not None and _GRPC_TARGET == target:
179
+ return _GRPC_STUB
180
+ channel = aio.insecure_channel(target)
181
+ stub = pb2_grpc.WorkflowServiceStub(channel)
182
+ _GRPC_CHANNEL = channel
183
+ _GRPC_STUB = stub
184
+ _GRPC_TARGET = target
185
+ channel_to_wait = channel
186
+ if channel_to_wait is not None:
187
+ await channel_to_wait.channel_ready()
188
+ return _GRPC_STUB # type: ignore[return-value]
189
+
190
+
191
+ async def run_instance(payload: bytes) -> RunInstanceResult:
192
+ """Register a workflow definition and start an instance over the gRPC bridge."""
193
+ async with ensure_singleton():
194
+ stub = await _workflow_stub()
195
+ registration = pb2.WorkflowRegistration()
196
+ registration.ParseFromString(payload)
197
+ request = pb2.RegisterWorkflowRequest(
198
+ registration=registration,
199
+ )
200
+ try:
201
+ response = await stub.RegisterWorkflow(request, timeout=30.0)
202
+ except aio.AioRpcError as exc: # pragma: no cover
203
+ raise RuntimeError(f"register_workflow failed: {exc}") from exc
204
+ return RunInstanceResult(
205
+ workflow_version_id=response.workflow_version_id,
206
+ workflow_instance_id=response.workflow_instance_id,
207
+ )
208
+
209
+
210
+ async def wait_for_instance(
211
+ instance_id: str,
212
+ poll_interval_secs: float = 1.0,
213
+ ) -> Optional[bytes]:
214
+ """Block until the workflow daemon produces the requested instance payload."""
215
+ async with ensure_singleton():
216
+ stub = await _workflow_stub()
217
+ request = pb2.WaitForInstanceRequest(
218
+ instance_id=instance_id,
219
+ poll_interval_secs=poll_interval_secs,
220
+ )
221
+ try:
222
+ response = await stub.WaitForInstance(request, timeout=None)
223
+ except aio.AioRpcError as exc: # pragma: no cover
224
+ status_fn = exc.code
225
+ if callable(status_fn) and status_fn() == grpc.StatusCode.NOT_FOUND:
226
+ return None
227
+ raise RuntimeError(f"wait_for_instance failed: {exc}") from exc
228
+ return bytes(response.payload)
rappel/dependencies.py ADDED
@@ -0,0 +1,135 @@
1
+ from __future__ import annotations
2
+
3
+ import inspect
4
+ from contextlib import AsyncExitStack, asynccontextmanager, contextmanager
5
+ from dataclasses import dataclass
6
+ from typing import Annotated, Any, AsyncIterator, Callable, Optional, get_args, get_origin
7
+
8
+
9
+ @dataclass(frozen=True)
10
+ class Depend:
11
+ """Marker for dependency injection, mirroring FastAPI's Depends syntax."""
12
+
13
+ dependency: Optional[Callable[..., Any]] = None
14
+ use_cache: bool = True
15
+
16
+
17
+ def _depend_from_annotation(annotation: Any) -> Depend | None:
18
+ origin = get_origin(annotation)
19
+ if origin is not Annotated:
20
+ return None
21
+ metadata = get_args(annotation)[1:]
22
+ for meta in metadata:
23
+ if isinstance(meta, Depend):
24
+ return meta
25
+ return None
26
+
27
+
28
+ def _dependency_marker(parameter: inspect.Parameter) -> Depend | None:
29
+ if isinstance(parameter.default, Depend):
30
+ return parameter.default
31
+ return _depend_from_annotation(parameter.annotation)
32
+
33
+
34
+ class _DependencyResolver:
35
+ """Resolve dependency graphs for a callable, including context manager lifetimes."""
36
+
37
+ def __init__(self, initial_kwargs: Optional[dict[str, Any]] = None) -> None:
38
+ self._context: dict[str, Any] = dict(initial_kwargs or {})
39
+ self._cache: dict[Callable[..., Any], Any] = {}
40
+ self._active: set[Callable[..., Any]] = set()
41
+ self._stack = AsyncExitStack()
42
+
43
+ async def close(self) -> None:
44
+ await self._stack.aclose()
45
+
46
+ async def build_call_kwargs(self, func: Callable[..., Any]) -> dict[str, Any]:
47
+ call_kwargs: dict[str, Any] = {}
48
+ signature = inspect.signature(func)
49
+ func_name = func.__name__ if hasattr(func, "__name__") else func.__class__.__name__
50
+ for name, parameter in signature.parameters.items():
51
+ if parameter.kind in (
52
+ inspect.Parameter.VAR_POSITIONAL,
53
+ inspect.Parameter.VAR_KEYWORD,
54
+ ):
55
+ continue
56
+ if name in self._context:
57
+ call_kwargs[name] = self._context[name]
58
+ continue
59
+ marker = _dependency_marker(parameter)
60
+ if marker is not None:
61
+ value = await self._resolve_dependency(marker)
62
+ self._context[name] = value
63
+ call_kwargs[name] = value
64
+ continue
65
+ if parameter.default is not inspect.Parameter.empty:
66
+ call_kwargs[name] = parameter.default
67
+ self._context.setdefault(name, parameter.default)
68
+ continue
69
+ raise TypeError(f"Missing required parameter '{name}' for {func_name}")
70
+ return call_kwargs
71
+
72
+ async def _resolve_dependency(self, marker: Depend) -> Any:
73
+ dependency = marker.dependency
74
+ if dependency is None:
75
+ raise TypeError("Depend requires a dependency callable")
76
+ if marker.use_cache and dependency in self._cache:
77
+ return self._cache[dependency]
78
+ if dependency in self._active:
79
+ name = (
80
+ dependency.__name__
81
+ if hasattr(dependency, "__name__")
82
+ else dependency.__class__.__name__
83
+ )
84
+ raise RuntimeError(f"Circular dependency detected for {name}")
85
+ self._active.add(dependency)
86
+ try:
87
+ kwargs = await self.build_call_kwargs(dependency)
88
+ value = await self._call_dependency(dependency, kwargs)
89
+ if marker.use_cache:
90
+ self._cache[dependency] = value
91
+ return value
92
+ finally:
93
+ self._active.discard(dependency)
94
+
95
+ async def _call_dependency(
96
+ self,
97
+ dependency: Callable[..., Any],
98
+ kwargs: dict[str, Any],
99
+ ) -> Any:
100
+ if inspect.isasyncgenfunction(dependency):
101
+ context_manager = asynccontextmanager(dependency)(**kwargs)
102
+ return await self._stack.enter_async_context(context_manager)
103
+ if inspect.isgeneratorfunction(dependency):
104
+ context_manager = contextmanager(dependency)(**kwargs)
105
+ return self._stack.enter_context(context_manager)
106
+ result = dependency(**kwargs)
107
+ resolved = await self._await_if_needed(result)
108
+ return await self._enter_context_if_needed(resolved)
109
+
110
+ async def _await_if_needed(self, value: Any) -> Any:
111
+ if inspect.isawaitable(value):
112
+ return await value
113
+ return value
114
+
115
+ async def _enter_context_if_needed(self, value: Any) -> Any:
116
+ if hasattr(value, "__aenter__") and hasattr(value, "__aexit__"):
117
+ return await self._stack.enter_async_context(value) # type: ignore[arg-type]
118
+ if hasattr(value, "__enter__") and hasattr(value, "__exit__"):
119
+ return self._stack.enter_context(value) # type: ignore[arg-type]
120
+ return value
121
+
122
+
123
+ @asynccontextmanager
124
+ async def provide_dependencies(
125
+ func: Callable[..., Any],
126
+ kwargs: Optional[dict[str, Any]] = None,
127
+ ) -> AsyncIterator[dict[str, Any]]:
128
+ """Resolve dependencies for ``func`` and manage their lifetimes."""
129
+
130
+ resolver = _DependencyResolver(kwargs)
131
+ try:
132
+ call_kwargs = await resolver.build_call_kwargs(func)
133
+ yield call_kwargs
134
+ finally:
135
+ await resolver.close()
rappel/exceptions.py ADDED
@@ -0,0 +1,11 @@
1
+ """Custom exception types raised by rappel workflows."""
2
+
3
+
4
+ class ExhaustedRetriesError(Exception):
5
+ """Raised when an action exhausts its allotted retry attempts."""
6
+
7
+ def __init__(self, message: str | None = None) -> None:
8
+ super().__init__(message or "action exhausted retries")
9
+
10
+
11
+ ExhaustedRetries = ExhaustedRetriesError
rappel/formatter.py ADDED
@@ -0,0 +1,110 @@
1
+ import os
2
+ import re
3
+ import sys
4
+ from typing import List, Sequence, TextIO
5
+
6
+ RESET = "\033[0m"
7
+
8
+ STYLE_CODES = {
9
+ "bold": "\033[1m",
10
+ "dim": "\033[2m",
11
+ "red": "\033[31m",
12
+ "green": "\033[32m",
13
+ "yellow": "\033[33m",
14
+ "blue": "\033[34m",
15
+ "magenta": "\033[35m",
16
+ "cyan": "\033[36m",
17
+ "white": "\033[37m",
18
+ }
19
+
20
+ _TAG_PATTERN = re.compile(r"\[(/?)([a-zA-Z]+)?\]")
21
+
22
+
23
+ def supports_color(stream: TextIO) -> bool:
24
+ """Return True if the provided stream likely supports ANSI colors."""
25
+
26
+ if os.environ.get("NO_COLOR"):
27
+ return False
28
+ if os.environ.get("FORCE_COLOR"):
29
+ return True
30
+ if stream.isatty():
31
+ if sys.platform != "win32":
32
+ return True
33
+ return bool(
34
+ os.environ.get("ANSICON")
35
+ or os.environ.get("WT_SESSION")
36
+ or os.environ.get("TERM_PROGRAM")
37
+ )
38
+ return False
39
+
40
+
41
+ class Formatter:
42
+ """
43
+ Very small markup formatter inspired by Rich's tag syntax. We want to minimize our python client
44
+ dependencies to just grpc+standard library.
45
+
46
+ """
47
+
48
+ def __init__(self, enable_colors: bool) -> None:
49
+ self._enable_colors = enable_colors
50
+
51
+ @property
52
+ def enable_colors(self) -> bool:
53
+ return self._enable_colors
54
+
55
+ def format(self, text: str) -> str:
56
+ if not text:
57
+ return text
58
+ return _apply_markup(text, self._enable_colors)
59
+
60
+ def apply_styles(self, text: str, styles: Sequence[str]) -> str:
61
+ """Wrap text with markup tags for the provided style sequence."""
62
+
63
+ if not styles:
64
+ return text
65
+ opening = "".join(f"[{style}]" for style in styles)
66
+ closing = "".join(f"[/{style}]" for style in reversed(styles))
67
+ return f"{opening}{text}{closing}"
68
+
69
+
70
+ def _apply_markup(text: str, enable_colors: bool) -> str:
71
+ if not enable_colors:
72
+ return _TAG_PATTERN.sub("", text)
73
+ result: List[str] = []
74
+ stack: List[str] = []
75
+ index = 0
76
+ for match in _TAG_PATTERN.finditer(text):
77
+ result.append(text[index : match.start()])
78
+ is_closing = match.group(1) == "/"
79
+ tag_name = match.group(2)
80
+ if tag_name is None:
81
+ if is_closing:
82
+ if stack:
83
+ stack.clear()
84
+ result.append(RESET)
85
+ else:
86
+ result.append(match.group(0))
87
+ index = match.end()
88
+ continue
89
+ if is_closing:
90
+ if tag_name in stack:
91
+ while stack:
92
+ name = stack.pop()
93
+ result.append(RESET)
94
+ if name == tag_name:
95
+ break
96
+ if stack:
97
+ result.append("".join(STYLE_CODES[name] for name in stack))
98
+ index = match.end()
99
+ continue
100
+ code = STYLE_CODES.get(tag_name)
101
+ if code is None:
102
+ result.append(match.group(0))
103
+ else:
104
+ stack.append(tag_name)
105
+ result.append(code)
106
+ index = match.end()
107
+ result.append(text[index:])
108
+ if stack:
109
+ result.append(RESET)
110
+ return "".join(result)