rappel 0.5.5__py3-none-manylinux_2_39_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rappel might be problematic. Click here for more details.

rappel/bridge.py ADDED
@@ -0,0 +1,228 @@
1
+ import asyncio
2
+ import os
3
+ import shlex
4
+ import subprocess
5
+ import tempfile
6
+ from contextlib import asynccontextmanager
7
+ from dataclasses import dataclass
8
+ from pathlib import Path
9
+ from threading import Lock, RLock
10
+ from typing import AsyncIterator, Optional
11
+
12
+ import grpc
13
+ from grpc import aio # type: ignore[attr-defined]
14
+
15
+ from proto import messages_pb2 as pb2
16
+ from proto import messages_pb2_grpc as pb2_grpc
17
+ from rappel.logger import configure as configure_logger
18
+
19
+ DEFAULT_HOST = "127.0.0.1"
20
+ LOGGER = configure_logger("rappel.bridge")
21
+
22
+ _PORT_LOCK = RLock()
23
+ _CACHED_GRPC_PORT: Optional[int] = None
24
+ _GRPC_TARGET: Optional[str] = None
25
+ _GRPC_CHANNEL: Optional[aio.Channel] = None
26
+ _GRPC_STUB: Optional[pb2_grpc.WorkflowServiceStub] = None
27
+ _GRPC_LOOP: Optional[asyncio.AbstractEventLoop] = None
28
+ _BOOT_MUTEX = Lock()
29
+ _ASYNC_BOOT_LOCK: asyncio.Lock = asyncio.Lock()
30
+
31
+
32
+ @dataclass
33
+ class RunInstanceResult:
34
+ workflow_version_id: str
35
+ workflow_instance_id: str
36
+
37
+
38
+ def _boot_command() -> list[str]:
39
+ override = os.environ.get("RAPPEL_BOOT_COMMAND")
40
+ if override:
41
+ LOGGER.debug("Using RAPPEL_BOOT_COMMAND=%s", override)
42
+ return shlex.split(override)
43
+ binary = os.environ.get("RAPPEL_BOOT_BINARY", "boot-rappel-singleton")
44
+ LOGGER.debug("Using RAPPEL_BOOT_BINARY=%s", binary)
45
+ return [binary]
46
+
47
+
48
+ def _remember_grpc_port(port: int) -> int:
49
+ global _CACHED_GRPC_PORT
50
+ with _PORT_LOCK:
51
+ _CACHED_GRPC_PORT = port
52
+ return port
53
+
54
+
55
+ def _cached_grpc_port() -> Optional[int]:
56
+ with _PORT_LOCK:
57
+ return _CACHED_GRPC_PORT
58
+
59
+
60
+ def _env_grpc_port_override() -> Optional[int]:
61
+ """Check for explicit gRPC port override via environment."""
62
+ override = os.environ.get("RAPPEL_BRIDGE_GRPC_PORT")
63
+ if not override:
64
+ return None
65
+ try:
66
+ return int(override)
67
+ except ValueError as exc: # pragma: no cover
68
+ raise RuntimeError(f"invalid RAPPEL_BRIDGE_GRPC_PORT value: {override}") from exc
69
+
70
+
71
+ def _boot_singleton_blocking() -> int:
72
+ """Boot the singleton and return the gRPC port."""
73
+ command = _boot_command()
74
+ with tempfile.NamedTemporaryFile(mode="w+", suffix=".txt") as f:
75
+ output_file = Path(f.name)
76
+
77
+ command.extend(["--output-file", str(output_file)])
78
+ LOGGER.info("Booting rappel singleton via: %s", " ".join(command))
79
+
80
+ try:
81
+ subprocess.run(
82
+ command,
83
+ check=True,
84
+ timeout=10,
85
+ )
86
+ except subprocess.TimeoutExpired as exc: # pragma: no cover
87
+ LOGGER.error("boot command timed out after %s seconds", exc.timeout)
88
+ raise RuntimeError("unable to boot rappel server") from exc
89
+ except subprocess.CalledProcessError as exc: # pragma: no cover
90
+ LOGGER.error("boot command failed: %s", exc)
91
+ raise RuntimeError("unable to boot rappel server") from exc
92
+ except OSError as exc: # pragma: no cover
93
+ LOGGER.error("unable to spawn boot command: %s", exc)
94
+ raise RuntimeError("unable to boot rappel server") from exc
95
+
96
+ try:
97
+ # We use a file as a message passer because passing a PIPE to the singleton launcher
98
+ # will block our code indefinitely
99
+ # The singleton launches the webserver subprocess to inherit the stdin/stdout that the
100
+ # singleton launcher receives; which means that in the case of a PIPE it would pass that
101
+ # pipe to the subprocess and therefore never correctly close the file descriptor and signal
102
+ # exit process status to Python.
103
+ port_str = output_file.read_text().strip()
104
+ grpc_port = int(port_str)
105
+ LOGGER.info("boot command reported singleton gRPC port %s", grpc_port)
106
+ return grpc_port
107
+ except (ValueError, FileNotFoundError) as exc: # pragma: no cover
108
+ raise RuntimeError(f"unable to read port from output file: {exc}") from exc
109
+
110
+
111
+ def _resolve_grpc_port() -> int:
112
+ """Resolve the gRPC port, booting singleton if necessary."""
113
+ cached = _cached_grpc_port()
114
+ if cached is not None:
115
+ return cached
116
+ env_port = _env_grpc_port_override()
117
+ if env_port is not None:
118
+ return _remember_grpc_port(env_port)
119
+ with _BOOT_MUTEX:
120
+ cached = _cached_grpc_port()
121
+ if cached is not None:
122
+ return cached
123
+ port = _boot_singleton_blocking()
124
+ return _remember_grpc_port(port)
125
+
126
+
127
+ async def _ensure_grpc_port_async() -> int:
128
+ """Ensure we have a gRPC port, booting singleton if necessary."""
129
+ cached = _cached_grpc_port()
130
+ if cached is not None:
131
+ return cached
132
+ env_port = _env_grpc_port_override()
133
+ if env_port is not None:
134
+ return _remember_grpc_port(env_port)
135
+ async with _ASYNC_BOOT_LOCK:
136
+ cached = _cached_grpc_port()
137
+ if cached is not None:
138
+ return cached
139
+ loop = asyncio.get_running_loop()
140
+ LOGGER.info("No cached singleton found, booting new instance")
141
+ port = await loop.run_in_executor(None, _boot_singleton_blocking)
142
+ LOGGER.info("Singleton ready on gRPC port %s", port)
143
+ return _remember_grpc_port(port)
144
+
145
+
146
+ @asynccontextmanager
147
+ async def ensure_singleton() -> AsyncIterator[int]:
148
+ """Yield the gRPC port for the singleton server, booting it exactly once."""
149
+ port = await _ensure_grpc_port_async()
150
+ yield port
151
+
152
+
153
+ def _grpc_target() -> str:
154
+ """Get the gRPC target address for the bridge server."""
155
+ # Check for explicit full address override
156
+ explicit = os.environ.get("RAPPEL_BRIDGE_GRPC_ADDR")
157
+ if explicit:
158
+ return explicit
159
+
160
+ # Otherwise, use host + port
161
+ host = os.environ.get("RAPPEL_BRIDGE_GRPC_HOST", DEFAULT_HOST)
162
+ port = _resolve_grpc_port()
163
+ return f"{host}:{port}"
164
+
165
+
166
+ async def _workflow_stub() -> pb2_grpc.WorkflowServiceStub:
167
+ global _GRPC_TARGET, _GRPC_CHANNEL, _GRPC_STUB, _GRPC_LOOP
168
+ target = _grpc_target()
169
+ loop = asyncio.get_running_loop()
170
+ channel_to_wait: Optional[aio.Channel] = None
171
+ with _PORT_LOCK:
172
+ if (
173
+ _GRPC_STUB is not None
174
+ and _GRPC_TARGET == target
175
+ and _GRPC_LOOP is loop
176
+ and not loop.is_closed()
177
+ ):
178
+ return _GRPC_STUB
179
+ channel = aio.insecure_channel(target)
180
+ stub = pb2_grpc.WorkflowServiceStub(channel)
181
+ _GRPC_CHANNEL = channel
182
+ _GRPC_STUB = stub
183
+ _GRPC_TARGET = target
184
+ _GRPC_LOOP = loop
185
+ channel_to_wait = channel
186
+ if channel_to_wait is not None:
187
+ await channel_to_wait.channel_ready()
188
+ return _GRPC_STUB # type: ignore[return-value]
189
+
190
+
191
+ async def run_instance(payload: bytes) -> RunInstanceResult:
192
+ """Register a workflow definition and start an instance over the gRPC bridge."""
193
+ async with ensure_singleton():
194
+ stub = await _workflow_stub()
195
+ registration = pb2.WorkflowRegistration()
196
+ registration.ParseFromString(payload)
197
+ request = pb2.RegisterWorkflowRequest(
198
+ registration=registration,
199
+ )
200
+ try:
201
+ response = await stub.RegisterWorkflow(request, timeout=30.0)
202
+ except aio.AioRpcError as exc: # pragma: no cover
203
+ raise RuntimeError(f"register_workflow failed: {exc}") from exc
204
+ return RunInstanceResult(
205
+ workflow_version_id=response.workflow_version_id,
206
+ workflow_instance_id=response.workflow_instance_id,
207
+ )
208
+
209
+
210
+ async def wait_for_instance(
211
+ instance_id: str,
212
+ poll_interval_secs: float = 1.0,
213
+ ) -> Optional[bytes]:
214
+ """Block until the workflow daemon produces the requested instance payload."""
215
+ async with ensure_singleton():
216
+ stub = await _workflow_stub()
217
+ request = pb2.WaitForInstanceRequest(
218
+ instance_id=instance_id,
219
+ poll_interval_secs=poll_interval_secs,
220
+ )
221
+ try:
222
+ response = await stub.WaitForInstance(request, timeout=None)
223
+ except aio.AioRpcError as exc: # pragma: no cover
224
+ status_fn = exc.code
225
+ if callable(status_fn) and status_fn() == grpc.StatusCode.NOT_FOUND:
226
+ return None
227
+ raise RuntimeError(f"wait_for_instance failed: {exc}") from exc
228
+ return bytes(response.payload)
rappel/dependencies.py ADDED
@@ -0,0 +1,149 @@
1
+ from __future__ import annotations
2
+
3
+ import inspect
4
+ from contextlib import AsyncExitStack, asynccontextmanager, contextmanager
5
+ from dataclasses import dataclass
6
+ from typing import Annotated, Any, AsyncIterator, Callable, Optional, get_args, get_origin
7
+
8
+
9
+ @dataclass(frozen=True)
10
+ class DependMarker:
11
+ """Internal marker for dependency injection."""
12
+
13
+ dependency: Optional[Callable[..., Any]] = None
14
+ use_cache: bool = True
15
+
16
+
17
+ def Depend( # noqa: N802
18
+ dependency: Optional[Callable[..., Any]] = None,
19
+ *,
20
+ use_cache: bool = True,
21
+ ) -> Any:
22
+ """Marker for dependency injection, mirroring FastAPI's Depends syntax.
23
+
24
+ Returns Any to allow usage as a default parameter value:
25
+ def my_func(service: MyService = Depend(get_service)):
26
+ ...
27
+ """
28
+ return DependMarker(dependency=dependency, use_cache=use_cache)
29
+
30
+
31
+ def _depend_from_annotation(annotation: Any) -> DependMarker | None:
32
+ origin = get_origin(annotation)
33
+ if origin is not Annotated:
34
+ return None
35
+ metadata = get_args(annotation)[1:]
36
+ for meta in metadata:
37
+ if isinstance(meta, DependMarker):
38
+ return meta
39
+ return None
40
+
41
+
42
+ def _dependency_marker(parameter: inspect.Parameter) -> DependMarker | None:
43
+ if isinstance(parameter.default, DependMarker):
44
+ return parameter.default
45
+ return _depend_from_annotation(parameter.annotation)
46
+
47
+
48
+ class _DependencyResolver:
49
+ """Resolve dependency graphs for a callable, including context manager lifetimes."""
50
+
51
+ def __init__(self, initial_kwargs: Optional[dict[str, Any]] = None) -> None:
52
+ self._context: dict[str, Any] = dict(initial_kwargs or {})
53
+ self._cache: dict[Callable[..., Any], Any] = {}
54
+ self._active: set[Callable[..., Any]] = set()
55
+ self._stack = AsyncExitStack()
56
+
57
+ async def close(self) -> None:
58
+ await self._stack.aclose()
59
+
60
+ async def build_call_kwargs(self, func: Callable[..., Any]) -> dict[str, Any]:
61
+ call_kwargs: dict[str, Any] = {}
62
+ signature = inspect.signature(func)
63
+ func_name = func.__name__ if hasattr(func, "__name__") else func.__class__.__name__
64
+ for name, parameter in signature.parameters.items():
65
+ if parameter.kind in (
66
+ inspect.Parameter.VAR_POSITIONAL,
67
+ inspect.Parameter.VAR_KEYWORD,
68
+ ):
69
+ continue
70
+ if name in self._context:
71
+ call_kwargs[name] = self._context[name]
72
+ continue
73
+ marker = _dependency_marker(parameter)
74
+ if marker is not None:
75
+ value = await self._resolve_dependency(marker)
76
+ self._context[name] = value
77
+ call_kwargs[name] = value
78
+ continue
79
+ if parameter.default is not inspect.Parameter.empty:
80
+ call_kwargs[name] = parameter.default
81
+ self._context.setdefault(name, parameter.default)
82
+ continue
83
+ raise TypeError(f"Missing required parameter '{name}' for {func_name}")
84
+ return call_kwargs
85
+
86
+ async def _resolve_dependency(self, marker: DependMarker) -> Any:
87
+ dependency = marker.dependency
88
+ if dependency is None:
89
+ raise TypeError("Depend requires a dependency callable")
90
+ if marker.use_cache and dependency in self._cache:
91
+ return self._cache[dependency]
92
+ if dependency in self._active:
93
+ name = (
94
+ dependency.__name__
95
+ if hasattr(dependency, "__name__")
96
+ else dependency.__class__.__name__
97
+ )
98
+ raise RuntimeError(f"Circular dependency detected for {name}")
99
+ self._active.add(dependency)
100
+ try:
101
+ kwargs = await self.build_call_kwargs(dependency)
102
+ value = await self._call_dependency(dependency, kwargs)
103
+ if marker.use_cache:
104
+ self._cache[dependency] = value
105
+ return value
106
+ finally:
107
+ self._active.discard(dependency)
108
+
109
+ async def _call_dependency(
110
+ self,
111
+ dependency: Callable[..., Any],
112
+ kwargs: dict[str, Any],
113
+ ) -> Any:
114
+ if inspect.isasyncgenfunction(dependency):
115
+ context_manager = asynccontextmanager(dependency)(**kwargs)
116
+ return await self._stack.enter_async_context(context_manager)
117
+ if inspect.isgeneratorfunction(dependency):
118
+ context_manager = contextmanager(dependency)(**kwargs)
119
+ return self._stack.enter_context(context_manager)
120
+ result = dependency(**kwargs)
121
+ resolved = await self._await_if_needed(result)
122
+ return await self._enter_context_if_needed(resolved)
123
+
124
+ async def _await_if_needed(self, value: Any) -> Any:
125
+ if inspect.isawaitable(value):
126
+ return await value
127
+ return value
128
+
129
+ async def _enter_context_if_needed(self, value: Any) -> Any:
130
+ if hasattr(value, "__aenter__") and hasattr(value, "__aexit__"):
131
+ return await self._stack.enter_async_context(value) # type: ignore[arg-type]
132
+ if hasattr(value, "__enter__") and hasattr(value, "__exit__"):
133
+ return self._stack.enter_context(value) # type: ignore[arg-type]
134
+ return value
135
+
136
+
137
+ @asynccontextmanager
138
+ async def provide_dependencies(
139
+ func: Callable[..., Any],
140
+ kwargs: Optional[dict[str, Any]] = None,
141
+ ) -> AsyncIterator[dict[str, Any]]:
142
+ """Resolve dependencies for ``func`` and manage their lifetimes."""
143
+
144
+ resolver = _DependencyResolver(kwargs)
145
+ try:
146
+ call_kwargs = await resolver.build_call_kwargs(func)
147
+ yield call_kwargs
148
+ finally:
149
+ await resolver.close()
rappel/exceptions.py ADDED
@@ -0,0 +1,11 @@
1
+ """Custom exception types raised by rappel workflows."""
2
+
3
+
4
+ class ExhaustedRetriesError(Exception):
5
+ """Raised when an action exhausts its allotted retry attempts."""
6
+
7
+ def __init__(self, message: str | None = None) -> None:
8
+ super().__init__(message or "action exhausted retries")
9
+
10
+
11
+ ExhaustedRetries = ExhaustedRetriesError
rappel/formatter.py ADDED
@@ -0,0 +1,110 @@
1
+ import os
2
+ import re
3
+ import sys
4
+ from typing import List, Sequence, TextIO
5
+
6
+ RESET = "\033[0m"
7
+
8
+ STYLE_CODES = {
9
+ "bold": "\033[1m",
10
+ "dim": "\033[2m",
11
+ "red": "\033[31m",
12
+ "green": "\033[32m",
13
+ "yellow": "\033[33m",
14
+ "blue": "\033[34m",
15
+ "magenta": "\033[35m",
16
+ "cyan": "\033[36m",
17
+ "white": "\033[37m",
18
+ }
19
+
20
+ _TAG_PATTERN = re.compile(r"\[(/?)([a-zA-Z]+)?\]")
21
+
22
+
23
+ def supports_color(stream: TextIO) -> bool:
24
+ """Return True if the provided stream likely supports ANSI colors."""
25
+
26
+ if os.environ.get("NO_COLOR"):
27
+ return False
28
+ if os.environ.get("FORCE_COLOR"):
29
+ return True
30
+ if stream.isatty():
31
+ if sys.platform != "win32":
32
+ return True
33
+ return bool(
34
+ os.environ.get("ANSICON")
35
+ or os.environ.get("WT_SESSION")
36
+ or os.environ.get("TERM_PROGRAM")
37
+ )
38
+ return False
39
+
40
+
41
+ class Formatter:
42
+ """
43
+ Very small markup formatter inspired by Rich's tag syntax. We want to minimize our python client
44
+ dependencies to just grpc+standard library.
45
+
46
+ """
47
+
48
+ def __init__(self, enable_colors: bool) -> None:
49
+ self._enable_colors = enable_colors
50
+
51
+ @property
52
+ def enable_colors(self) -> bool:
53
+ return self._enable_colors
54
+
55
+ def format(self, text: str) -> str:
56
+ if not text:
57
+ return text
58
+ return _apply_markup(text, self._enable_colors)
59
+
60
+ def apply_styles(self, text: str, styles: Sequence[str]) -> str:
61
+ """Wrap text with markup tags for the provided style sequence."""
62
+
63
+ if not styles:
64
+ return text
65
+ opening = "".join(f"[{style}]" for style in styles)
66
+ closing = "".join(f"[/{style}]" for style in reversed(styles))
67
+ return f"{opening}{text}{closing}"
68
+
69
+
70
+ def _apply_markup(text: str, enable_colors: bool) -> str:
71
+ if not enable_colors:
72
+ return _TAG_PATTERN.sub("", text)
73
+ result: List[str] = []
74
+ stack: List[str] = []
75
+ index = 0
76
+ for match in _TAG_PATTERN.finditer(text):
77
+ result.append(text[index : match.start()])
78
+ is_closing = match.group(1) == "/"
79
+ tag_name = match.group(2)
80
+ if tag_name is None:
81
+ if is_closing:
82
+ if stack:
83
+ stack.clear()
84
+ result.append(RESET)
85
+ else:
86
+ result.append(match.group(0))
87
+ index = match.end()
88
+ continue
89
+ if is_closing:
90
+ if tag_name in stack:
91
+ while stack:
92
+ name = stack.pop()
93
+ result.append(RESET)
94
+ if name == tag_name:
95
+ break
96
+ if stack:
97
+ result.append("".join(STYLE_CODES[name] for name in stack))
98
+ index = match.end()
99
+ continue
100
+ code = STYLE_CODES.get(tag_name)
101
+ if code is None:
102
+ result.append(match.group(0))
103
+ else:
104
+ stack.append(tag_name)
105
+ result.append(code)
106
+ index = match.end()
107
+ result.append(text[index:])
108
+ if stack:
109
+ result.append(RESET)
110
+ return "".join(result)