rappel 0.5.5__py3-none-manylinux_2_39_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rappel might be problematic. Click here for more details.

rappel/logger.py ADDED
@@ -0,0 +1,39 @@
1
+ """Simple logging helpers for configurable rappel loggers."""
2
+
3
+ import logging
4
+ import os
5
+ from typing import Optional
6
+
7
+ DEFAULT_LEVEL = logging.INFO
8
+ ENV_VAR = "RAPPEL_LOG_LEVEL"
9
+
10
+
11
+ def _resolve_level(value: Optional[str]) -> int:
12
+ if not value:
13
+ return DEFAULT_LEVEL
14
+ normalized = value.strip().upper()
15
+ mapping = {
16
+ "CRITICAL": logging.CRITICAL,
17
+ "FATAL": logging.FATAL,
18
+ "ERROR": logging.ERROR,
19
+ "WARNING": logging.WARNING,
20
+ "WARN": logging.WARNING,
21
+ "INFO": logging.INFO,
22
+ "DEBUG": logging.DEBUG,
23
+ "NOTSET": logging.NOTSET,
24
+ }
25
+ return mapping.get(normalized, DEFAULT_LEVEL)
26
+
27
+
28
+ def configure(name: str) -> logging.Logger:
29
+ """Return a logger configured from RAPPEL_LOG_LEVEL."""
30
+
31
+ logger = logging.getLogger(name)
32
+ level = _resolve_level(os.environ.get(ENV_VAR))
33
+ logger.setLevel(level)
34
+ if not logger.handlers:
35
+ handler = logging.StreamHandler()
36
+ handler.setFormatter(logging.Formatter("[%(name)s] %(levelname)s: %(message)s"))
37
+ handler.setLevel(level)
38
+ logger.addHandler(handler)
39
+ return logger
rappel/registry.py ADDED
@@ -0,0 +1,106 @@
1
+ from collections.abc import Awaitable, Callable
2
+ from dataclasses import dataclass
3
+ from threading import RLock
4
+ from typing import Any, Optional
5
+
6
+ AsyncAction = Callable[..., Awaitable[Any]]
7
+
8
+
9
+ @dataclass
10
+ class _ActionEntry:
11
+ module: str
12
+ name: str
13
+ func: AsyncAction
14
+
15
+
16
+ def _make_key(module: str, name: str) -> str:
17
+ """Create a registry key from module and action name."""
18
+ return f"{module}:{name}"
19
+
20
+
21
+ class ActionRegistry:
22
+ """In-memory registry of user-defined actions.
23
+
24
+ Actions are keyed by (module, name), allowing the same action name
25
+ to be used in different modules.
26
+ """
27
+
28
+ def __init__(self) -> None:
29
+ self._actions: dict[str, _ActionEntry] = {}
30
+ self._lock = RLock()
31
+
32
+ def _source_fingerprint(self, func: AsyncAction) -> tuple[str | None, str | None]:
33
+ func_any: Any = func
34
+ try:
35
+ code = func_any.__code__
36
+ except AttributeError:
37
+ return (None, None)
38
+ try:
39
+ qualname = func_any.__qualname__
40
+ except AttributeError:
41
+ qualname = None
42
+ filename = code.co_filename
43
+ if not isinstance(filename, str):
44
+ filename = None
45
+ if qualname is not None and not isinstance(qualname, str):
46
+ qualname = None
47
+ return (filename, qualname)
48
+
49
+ def _is_same_action_definition(self, existing: AsyncAction, new: AsyncAction) -> bool:
50
+ if existing is new:
51
+ return True
52
+ existing_fingerprint = self._source_fingerprint(existing)
53
+ new_fingerprint = self._source_fingerprint(new)
54
+ if existing_fingerprint == (None, None) or new_fingerprint == (None, None):
55
+ return False
56
+ return existing_fingerprint == new_fingerprint
57
+
58
+ def register(self, module: str, name: str, func: AsyncAction) -> None:
59
+ """Register an action with its module and name.
60
+
61
+ Args:
62
+ module: The Python module containing the action.
63
+ name: The action name (from @action decorator).
64
+ func: The async function to execute.
65
+
66
+ Raises:
67
+ ValueError: If an action with the same module:name is already registered
68
+ with a different implementation.
69
+ """
70
+ key = _make_key(module, name)
71
+ with self._lock:
72
+ existing = self._actions.get(key)
73
+ if existing is not None:
74
+ if self._is_same_action_definition(existing.func, func):
75
+ self._actions[key] = _ActionEntry(module=module, name=name, func=func)
76
+ return
77
+ raise ValueError(f"action '{module}:{name}' already registered")
78
+ self._actions[key] = _ActionEntry(module=module, name=name, func=func)
79
+
80
+ def get(self, module: str, name: str) -> Optional[AsyncAction]:
81
+ """Look up an action by module and name.
82
+
83
+ Args:
84
+ module: The Python module containing the action.
85
+ name: The action name.
86
+
87
+ Returns:
88
+ The action function if found, None otherwise.
89
+ """
90
+ key = _make_key(module, name)
91
+ with self._lock:
92
+ entry = self._actions.get(key)
93
+ return entry.func if entry else None
94
+
95
+ def names(self) -> list[str]:
96
+ """Return all registered action keys (module:name format)."""
97
+ with self._lock:
98
+ return sorted(self._actions.keys())
99
+
100
+ def reset(self) -> None:
101
+ """Clear all registered actions."""
102
+ with self._lock:
103
+ self._actions.clear()
104
+
105
+
106
+ registry = ActionRegistry()
rappel/schedule.py ADDED
@@ -0,0 +1,357 @@
1
+ """
2
+ Scheduled workflow execution.
3
+
4
+ This module provides functions for registering workflows to run on a cron
5
+ schedule or at fixed intervals.
6
+ """
7
+
8
+ from dataclasses import dataclass
9
+ from datetime import datetime, timedelta
10
+ from typing import Any, Dict, List, Literal, Optional, Type, Union
11
+
12
+ from grpc import aio # type: ignore[attr-defined]
13
+
14
+ from proto import messages_pb2 as pb2
15
+
16
+ from .bridge import _workflow_stub, ensure_singleton
17
+ from .serialization import build_arguments_from_kwargs
18
+ from .workflow import Workflow
19
+
20
+ ScheduleType = Literal["cron", "interval"]
21
+ ScheduleStatus = Literal["active", "paused"]
22
+
23
+
24
+ @dataclass
25
+ class ScheduleInfo:
26
+ """Information about a registered schedule."""
27
+
28
+ id: str
29
+ workflow_name: str
30
+ schedule_name: str
31
+ schedule_type: ScheduleType
32
+ cron_expression: Optional[str]
33
+ interval_seconds: Optional[int]
34
+ jitter_seconds: Optional[int]
35
+ status: ScheduleStatus
36
+ next_run_at: Optional[datetime]
37
+ last_run_at: Optional[datetime]
38
+ last_instance_id: Optional[str]
39
+ created_at: datetime
40
+ updated_at: datetime
41
+
42
+
43
+ async def schedule_workflow(
44
+ workflow_cls: Type[Workflow],
45
+ *,
46
+ schedule_name: str,
47
+ schedule: Union[str, timedelta],
48
+ jitter: Optional[timedelta] = None,
49
+ inputs: Optional[Dict[str, Any]] = None,
50
+ ) -> str:
51
+ """
52
+ Register a schedule for a workflow.
53
+
54
+ This function registers both the workflow DAG and the schedule in a single
55
+ call. When the schedule fires, the registered workflow version will be
56
+ executed.
57
+
58
+ Args:
59
+ workflow_cls: The Workflow class to schedule.
60
+ schedule_name: Unique name for this schedule. Allows multiple schedules
61
+ per workflow with different inputs. Must be unique within
62
+ a workflow.
63
+ schedule: Either a cron expression string (e.g., "0 * * * *" for hourly)
64
+ or a timedelta for interval-based scheduling.
65
+ jitter: Optional jitter window to add to each scheduled run.
66
+ inputs: Optional keyword arguments to pass to each scheduled run.
67
+
68
+ Returns:
69
+ The schedule ID.
70
+
71
+ Examples:
72
+ # Run every hour at minute 0
73
+ await schedule_workflow(
74
+ MyWorkflow,
75
+ schedule_name="hourly-run",
76
+ schedule="0 * * * *"
77
+ )
78
+
79
+ # Run every 5 minutes
80
+ await schedule_workflow(
81
+ MyWorkflow,
82
+ schedule_name="frequent-check",
83
+ schedule=timedelta(minutes=5)
84
+ )
85
+
86
+ # Multiple schedules with different inputs
87
+ await schedule_workflow(
88
+ MyWorkflow,
89
+ schedule_name="small-batch",
90
+ schedule="0 0 * * *",
91
+ inputs={"batch_size": 100}
92
+ )
93
+ await schedule_workflow(
94
+ MyWorkflow,
95
+ schedule_name="large-batch",
96
+ schedule="0 12 * * *",
97
+ inputs={"batch_size": 1000}
98
+ )
99
+
100
+ Raises:
101
+ ValueError: If the cron expression is invalid, interval is non-positive,
102
+ or schedule_name is empty.
103
+ RuntimeError: If the gRPC call fails.
104
+ """
105
+ if not schedule_name:
106
+ raise ValueError("schedule_name is required")
107
+
108
+ workflow_name = workflow_cls.short_name()
109
+
110
+ # Build schedule definition
111
+ schedule_def = pb2.ScheduleDefinition()
112
+ if isinstance(schedule, str):
113
+ schedule_def.type = pb2.SCHEDULE_TYPE_CRON
114
+ schedule_def.cron_expression = schedule
115
+ elif isinstance(schedule, timedelta):
116
+ interval_seconds = int(schedule.total_seconds())
117
+ if interval_seconds <= 0:
118
+ raise ValueError("Interval must be positive")
119
+ schedule_def.type = pb2.SCHEDULE_TYPE_INTERVAL
120
+ schedule_def.interval_seconds = interval_seconds
121
+ else:
122
+ raise TypeError(f"schedule must be str or timedelta, got {type(schedule)}")
123
+
124
+ if jitter is not None:
125
+ jitter_seconds = int(jitter.total_seconds())
126
+ if jitter_seconds < 0:
127
+ raise ValueError("jitter must be non-negative")
128
+ schedule_def.jitter_seconds = jitter_seconds
129
+
130
+ # Build the workflow registration payload to ensure the DAG is registered
131
+ # This is required for the schedule to execute - the scheduler needs a
132
+ # registered workflow version to create instances from.
133
+ registration = workflow_cls._build_registration_payload()
134
+
135
+ # Build request with both registration and schedule
136
+ request = pb2.RegisterScheduleRequest(
137
+ workflow_name=workflow_name,
138
+ schedule_name=schedule_name,
139
+ schedule=schedule_def,
140
+ registration=registration,
141
+ )
142
+
143
+ # Add inputs if provided
144
+ if inputs:
145
+ request.inputs.CopyFrom(build_arguments_from_kwargs(inputs))
146
+
147
+ # Send to server
148
+ async with ensure_singleton():
149
+ stub = await _workflow_stub()
150
+
151
+ try:
152
+ response = await stub.RegisterSchedule(request, timeout=30.0)
153
+ except aio.AioRpcError as exc:
154
+ raise RuntimeError(f"Failed to register schedule: {exc}") from exc
155
+
156
+ return response.schedule_id
157
+
158
+
159
+ async def pause_schedule(workflow_cls: Type[Workflow], *, schedule_name: str) -> bool:
160
+ """
161
+ Pause a workflow's schedule.
162
+
163
+ The schedule will not fire until resumed. Existing running instances
164
+ are not affected.
165
+
166
+ Args:
167
+ workflow_cls: The Workflow class whose schedule to pause.
168
+ schedule_name: The name of the schedule to pause.
169
+
170
+ Returns:
171
+ True if a schedule was found and paused, False otherwise.
172
+
173
+ Raises:
174
+ ValueError: If schedule_name is empty.
175
+ RuntimeError: If the gRPC call fails.
176
+ """
177
+ if not schedule_name:
178
+ raise ValueError("schedule_name is required")
179
+
180
+ request = pb2.UpdateScheduleStatusRequest(
181
+ workflow_name=workflow_cls.short_name(),
182
+ schedule_name=schedule_name,
183
+ status=pb2.SCHEDULE_STATUS_PAUSED,
184
+ )
185
+ async with ensure_singleton():
186
+ stub = await _workflow_stub()
187
+
188
+ try:
189
+ response = await stub.UpdateScheduleStatus(request, timeout=30.0)
190
+ except aio.AioRpcError as exc:
191
+ raise RuntimeError(f"Failed to pause schedule: {exc}") from exc
192
+
193
+ return response.success
194
+
195
+
196
+ async def resume_schedule(workflow_cls: Type[Workflow], *, schedule_name: str) -> bool:
197
+ """
198
+ Resume a paused workflow schedule.
199
+
200
+ Args:
201
+ workflow_cls: The Workflow class whose schedule to resume.
202
+ schedule_name: The name of the schedule to resume.
203
+
204
+ Returns:
205
+ True if a schedule was found and resumed, False otherwise.
206
+
207
+ Raises:
208
+ ValueError: If schedule_name is empty.
209
+ RuntimeError: If the gRPC call fails.
210
+ """
211
+ if not schedule_name:
212
+ raise ValueError("schedule_name is required")
213
+
214
+ request = pb2.UpdateScheduleStatusRequest(
215
+ workflow_name=workflow_cls.short_name(),
216
+ schedule_name=schedule_name,
217
+ status=pb2.SCHEDULE_STATUS_ACTIVE,
218
+ )
219
+ async with ensure_singleton():
220
+ stub = await _workflow_stub()
221
+
222
+ try:
223
+ response = await stub.UpdateScheduleStatus(request, timeout=30.0)
224
+ except aio.AioRpcError as exc:
225
+ raise RuntimeError(f"Failed to resume schedule: {exc}") from exc
226
+
227
+ return response.success
228
+
229
+
230
+ async def delete_schedule(workflow_cls: Type[Workflow], *, schedule_name: str) -> bool:
231
+ """
232
+ Delete a workflow's schedule.
233
+
234
+ The schedule is soft-deleted and can be recreated by calling
235
+ schedule_workflow again.
236
+
237
+ Args:
238
+ workflow_cls: The Workflow class whose schedule to delete.
239
+ schedule_name: The name of the schedule to delete.
240
+
241
+ Returns:
242
+ True if a schedule was found and deleted, False otherwise.
243
+
244
+ Raises:
245
+ ValueError: If schedule_name is empty.
246
+ RuntimeError: If the gRPC call fails.
247
+ """
248
+ if not schedule_name:
249
+ raise ValueError("schedule_name is required")
250
+
251
+ request = pb2.DeleteScheduleRequest(
252
+ workflow_name=workflow_cls.short_name(),
253
+ schedule_name=schedule_name,
254
+ )
255
+ async with ensure_singleton():
256
+ stub = await _workflow_stub()
257
+
258
+ try:
259
+ response = await stub.DeleteSchedule(request, timeout=30.0)
260
+ except aio.AioRpcError as exc:
261
+ raise RuntimeError(f"Failed to delete schedule: {exc}") from exc
262
+
263
+ return response.success
264
+
265
+
266
+ def _parse_iso_datetime(value: str) -> Optional[datetime]:
267
+ """Parse an ISO 8601 datetime string, returning None if empty."""
268
+ if not value:
269
+ return None
270
+ return datetime.fromisoformat(value.replace("Z", "+00:00"))
271
+
272
+
273
+ def _proto_schedule_type_to_str(
274
+ schedule_type: "pb2.ScheduleType.V",
275
+ ) -> ScheduleType:
276
+ """Convert protobuf ScheduleType to string literal."""
277
+ if schedule_type == pb2.SCHEDULE_TYPE_CRON:
278
+ return "cron"
279
+ elif schedule_type == pb2.SCHEDULE_TYPE_INTERVAL:
280
+ return "interval"
281
+ else:
282
+ return "cron" # Default fallback
283
+
284
+
285
+ def _proto_schedule_status_to_str(
286
+ status: "pb2.ScheduleStatus.V",
287
+ ) -> ScheduleStatus:
288
+ """Convert protobuf ScheduleStatus to string literal."""
289
+ if status == pb2.SCHEDULE_STATUS_ACTIVE:
290
+ return "active"
291
+ elif status == pb2.SCHEDULE_STATUS_PAUSED:
292
+ return "paused"
293
+ else:
294
+ return "active" # Default fallback
295
+
296
+
297
+ async def list_schedules(
298
+ status_filter: Optional[ScheduleStatus] = None,
299
+ ) -> List[ScheduleInfo]:
300
+ """
301
+ List all registered workflow schedules.
302
+
303
+ Args:
304
+ status_filter: Optional filter by status ("active" or "paused").
305
+ If None, returns all non-deleted schedules.
306
+
307
+ Returns:
308
+ A list of ScheduleInfo objects containing schedule details.
309
+
310
+ Examples:
311
+ # List all schedules
312
+ schedules = await list_schedules()
313
+ for s in schedules:
314
+ print(f"{s.workflow_name}: {s.status}")
315
+
316
+ # List only active schedules
317
+ active = await list_schedules(status_filter="active")
318
+
319
+ # List only paused schedules
320
+ paused = await list_schedules(status_filter="paused")
321
+
322
+ Raises:
323
+ RuntimeError: If the gRPC call fails.
324
+ """
325
+ request = pb2.ListSchedulesRequest()
326
+ if status_filter is not None:
327
+ request.status_filter = status_filter
328
+
329
+ async with ensure_singleton():
330
+ stub = await _workflow_stub()
331
+
332
+ try:
333
+ response = await stub.ListSchedules(request, timeout=30.0)
334
+ except aio.AioRpcError as exc:
335
+ raise RuntimeError(f"Failed to list schedules: {exc}") from exc
336
+
337
+ schedules = []
338
+ for s in response.schedules:
339
+ schedules.append(
340
+ ScheduleInfo(
341
+ id=s.id,
342
+ workflow_name=s.workflow_name,
343
+ schedule_name=s.schedule_name,
344
+ schedule_type=_proto_schedule_type_to_str(s.schedule_type),
345
+ cron_expression=s.cron_expression if s.cron_expression else None,
346
+ interval_seconds=s.interval_seconds if s.interval_seconds else None,
347
+ jitter_seconds=s.jitter_seconds if s.jitter_seconds else None,
348
+ status=_proto_schedule_status_to_str(s.status),
349
+ next_run_at=_parse_iso_datetime(s.next_run_at),
350
+ last_run_at=_parse_iso_datetime(s.last_run_at),
351
+ last_instance_id=s.last_instance_id if s.last_instance_id else None,
352
+ created_at=_parse_iso_datetime(s.created_at), # type: ignore
353
+ updated_at=_parse_iso_datetime(s.updated_at), # type: ignore
354
+ )
355
+ )
356
+
357
+ return schedules