rappel 0.10.0__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rappel might be problematic. Click here for more details.

rappel/logger.py ADDED
@@ -0,0 +1,39 @@
1
+ """Simple logging helpers for configurable rappel loggers."""
2
+
3
+ import logging
4
+ import os
5
+ from typing import Optional
6
+
7
+ DEFAULT_LEVEL = logging.INFO
8
+ ENV_VAR = "RAPPEL_LOG_LEVEL"
9
+
10
+
11
+ def _resolve_level(value: Optional[str]) -> int:
12
+ if not value:
13
+ return DEFAULT_LEVEL
14
+ normalized = value.strip().upper()
15
+ mapping = {
16
+ "CRITICAL": logging.CRITICAL,
17
+ "FATAL": logging.FATAL,
18
+ "ERROR": logging.ERROR,
19
+ "WARNING": logging.WARNING,
20
+ "WARN": logging.WARNING,
21
+ "INFO": logging.INFO,
22
+ "DEBUG": logging.DEBUG,
23
+ "NOTSET": logging.NOTSET,
24
+ }
25
+ return mapping.get(normalized, DEFAULT_LEVEL)
26
+
27
+
28
+ def configure(name: str) -> logging.Logger:
29
+ """Return a logger configured from RAPPEL_LOG_LEVEL."""
30
+
31
+ logger = logging.getLogger(name)
32
+ level = _resolve_level(os.environ.get(ENV_VAR))
33
+ logger.setLevel(level)
34
+ if not logger.handlers:
35
+ handler = logging.StreamHandler()
36
+ handler.setFormatter(logging.Formatter("[%(name)s] %(levelname)s: %(message)s"))
37
+ handler.setLevel(level)
38
+ logger.addHandler(handler)
39
+ return logger
rappel/registry.py ADDED
@@ -0,0 +1,111 @@
1
+ from collections.abc import Awaitable, Callable
2
+ from dataclasses import dataclass
3
+ from threading import RLock
4
+ from typing import Any, Optional
5
+
6
+ AsyncAction = Callable[..., Awaitable[Any]]
7
+
8
+
9
+ @dataclass
10
+ class _ActionEntry:
11
+ module: str
12
+ name: str
13
+ func: AsyncAction
14
+
15
+
16
+ def _make_key(module: str, name: str) -> str:
17
+ """Create a registry key from module and action name."""
18
+ return f"{module}:{name}"
19
+
20
+
21
+ class ActionRegistry:
22
+ """In-memory registry of user-defined actions.
23
+
24
+ Actions are keyed by (module, name), allowing the same action name
25
+ to be used in different modules.
26
+ """
27
+
28
+ def __init__(self) -> None:
29
+ self._actions: dict[str, _ActionEntry] = {}
30
+ self._lock = RLock()
31
+
32
+ def _source_fingerprint(self, func: AsyncAction) -> tuple[str | None, str | None]:
33
+ func_any: Any = func
34
+ try:
35
+ code = func_any.__code__
36
+ except AttributeError:
37
+ return (None, None)
38
+ try:
39
+ qualname = func_any.__qualname__
40
+ except AttributeError:
41
+ qualname = None
42
+ filename = code.co_filename
43
+ if not isinstance(filename, str):
44
+ filename = None
45
+ if qualname is not None and not isinstance(qualname, str):
46
+ qualname = None
47
+ return (filename, qualname)
48
+
49
+ def _is_same_action_definition(self, existing: AsyncAction, new: AsyncAction) -> bool:
50
+ if existing is new:
51
+ return True
52
+ existing_fingerprint = self._source_fingerprint(existing)
53
+ new_fingerprint = self._source_fingerprint(new)
54
+ if existing_fingerprint == (None, None) or new_fingerprint == (None, None):
55
+ return False
56
+ return existing_fingerprint == new_fingerprint
57
+
58
+ def register(self, module: str, name: str, func: AsyncAction) -> None:
59
+ """Register an action with its module and name.
60
+
61
+ Args:
62
+ module: The Python module containing the action.
63
+ name: The action name (from @action decorator).
64
+ func: The async function to execute.
65
+
66
+ Raises:
67
+ ValueError: If an action with the same module:name is already registered
68
+ with a different implementation.
69
+ """
70
+ key = _make_key(module, name)
71
+ with self._lock:
72
+ existing = self._actions.get(key)
73
+ if existing is not None:
74
+ if self._is_same_action_definition(existing.func, func):
75
+ self._actions[key] = _ActionEntry(module=module, name=name, func=func)
76
+ return
77
+ raise ValueError(f"action '{module}:{name}' already registered")
78
+ self._actions[key] = _ActionEntry(module=module, name=name, func=func)
79
+
80
+ def get(self, module: str, name: str) -> Optional[AsyncAction]:
81
+ """Look up an action by module and name.
82
+
83
+ Args:
84
+ module: The Python module containing the action.
85
+ name: The action name.
86
+
87
+ Returns:
88
+ The action function if found, None otherwise.
89
+ """
90
+ key = _make_key(module, name)
91
+ with self._lock:
92
+ entry = self._actions.get(key)
93
+ return entry.func if entry else None
94
+
95
+ def names(self) -> list[str]:
96
+ """Return all registered action keys (module:name format)."""
97
+ with self._lock:
98
+ return sorted(self._actions.keys())
99
+
100
+ def entries(self) -> list[_ActionEntry]:
101
+ """Return all registered action entries."""
102
+ with self._lock:
103
+ return list(self._actions.values())
104
+
105
+ def reset(self) -> None:
106
+ """Clear all registered actions."""
107
+ with self._lock:
108
+ self._actions.clear()
109
+
110
+
111
+ registry = ActionRegistry()
rappel/schedule.py ADDED
@@ -0,0 +1,376 @@
1
+ """
2
+ Scheduled workflow execution.
3
+
4
+ This module provides functions for registering workflows to run on a cron
5
+ schedule or at fixed intervals.
6
+ """
7
+
8
+ from dataclasses import dataclass
9
+ from datetime import datetime, timedelta
10
+ from typing import Any, Dict, List, Literal, Optional, Type, Union
11
+
12
+ from grpc import StatusCode, aio # type: ignore[attr-defined]
13
+
14
+ from proto import messages_pb2 as pb2
15
+
16
+ from .bridge import _workflow_stub, ensure_singleton
17
+ from .exceptions import ScheduleAlreadyExistsError
18
+ from .workflow import Workflow
19
+
20
+ ScheduleType = Literal["cron", "interval"]
21
+ ScheduleStatus = Literal["active", "paused"]
22
+
23
+
24
+ @dataclass
25
+ class ScheduleInfo:
26
+ """Information about a registered schedule."""
27
+
28
+ id: str
29
+ workflow_name: str
30
+ schedule_name: str
31
+ schedule_type: ScheduleType
32
+ cron_expression: Optional[str]
33
+ interval_seconds: Optional[int]
34
+ jitter_seconds: Optional[int]
35
+ status: ScheduleStatus
36
+ next_run_at: Optional[datetime]
37
+ last_run_at: Optional[datetime]
38
+ last_instance_id: Optional[str]
39
+ created_at: datetime
40
+ updated_at: datetime
41
+
42
+
43
+ async def schedule_workflow(
44
+ workflow_cls: Type[Workflow],
45
+ *,
46
+ schedule_name: str,
47
+ schedule: Union[str, timedelta],
48
+ jitter: Optional[timedelta] = None,
49
+ inputs: Optional[Dict[str, Any]] = None,
50
+ priority: Optional[int] = None,
51
+ ) -> str:
52
+ """
53
+ Register a schedule for a workflow.
54
+
55
+ This function registers both the workflow DAG and the schedule in a single
56
+ call. When the schedule fires, the registered workflow version will be
57
+ executed.
58
+
59
+ Args:
60
+ workflow_cls: The Workflow class to schedule.
61
+ schedule_name: Unique name for this schedule. Allows multiple schedules
62
+ per workflow with different inputs. Must be unique within
63
+ a workflow.
64
+ schedule: Either a cron expression string (e.g., "0 * * * *" for hourly)
65
+ or a timedelta for interval-based scheduling.
66
+ jitter: Optional jitter window to add to each scheduled run.
67
+ inputs: Optional keyword arguments to pass to each scheduled run.
68
+ priority: Optional priority for queue ordering. Higher values are
69
+ processed first. Default is 0.
70
+
71
+ Returns:
72
+ The schedule ID.
73
+
74
+ Examples:
75
+ # Run every hour at minute 0
76
+ await schedule_workflow(
77
+ MyWorkflow,
78
+ schedule_name="hourly-run",
79
+ schedule="0 * * * *"
80
+ )
81
+
82
+ # Run every 5 minutes
83
+ await schedule_workflow(
84
+ MyWorkflow,
85
+ schedule_name="frequent-check",
86
+ schedule=timedelta(minutes=5)
87
+ )
88
+
89
+ # Multiple schedules with different inputs
90
+ await schedule_workflow(
91
+ MyWorkflow,
92
+ schedule_name="small-batch",
93
+ schedule="0 0 * * *",
94
+ inputs={"batch_size": 100}
95
+ )
96
+ await schedule_workflow(
97
+ MyWorkflow,
98
+ schedule_name="large-batch",
99
+ schedule="0 12 * * *",
100
+ inputs={"batch_size": 1000}
101
+ )
102
+
103
+ # High priority schedule
104
+ await schedule_workflow(
105
+ CriticalWorkflow,
106
+ schedule_name="critical-job",
107
+ schedule="*/5 * * * *",
108
+ priority=100
109
+ )
110
+
111
+ Raises:
112
+ ValueError: If the cron expression is invalid, interval is non-positive,
113
+ or schedule_name is empty.
114
+ ScheduleAlreadyExistsError: If a schedule with the same name already exists.
115
+ RuntimeError: If the gRPC call fails.
116
+ """
117
+ if not schedule_name:
118
+ raise ValueError("schedule_name is required")
119
+
120
+ workflow_name = workflow_cls.short_name()
121
+
122
+ # Build schedule definition
123
+ schedule_def = pb2.ScheduleDefinition()
124
+ if isinstance(schedule, str):
125
+ schedule_def.type = pb2.SCHEDULE_TYPE_CRON
126
+ schedule_def.cron_expression = schedule
127
+ elif isinstance(schedule, timedelta):
128
+ interval_seconds = int(schedule.total_seconds())
129
+ if interval_seconds <= 0:
130
+ raise ValueError("Interval must be positive")
131
+ schedule_def.type = pb2.SCHEDULE_TYPE_INTERVAL
132
+ schedule_def.interval_seconds = interval_seconds
133
+ else:
134
+ raise TypeError(f"schedule must be str or timedelta, got {type(schedule)}")
135
+
136
+ if jitter is not None:
137
+ jitter_seconds = int(jitter.total_seconds())
138
+ if jitter_seconds < 0:
139
+ raise ValueError("jitter must be non-negative")
140
+ schedule_def.jitter_seconds = jitter_seconds
141
+
142
+ # Build the workflow registration payload to ensure the DAG is registered
143
+ # This is required for the schedule to execute - the scheduler needs a
144
+ # registered workflow version to create instances from.
145
+ registration = workflow_cls._build_registration_payload()
146
+
147
+ # Build request with both registration and schedule
148
+ request = pb2.RegisterScheduleRequest(
149
+ workflow_name=workflow_name,
150
+ schedule_name=schedule_name,
151
+ schedule=schedule_def,
152
+ registration=registration,
153
+ )
154
+
155
+ initial_context = workflow_cls._build_initial_context((), inputs or {})
156
+ if initial_context.arguments:
157
+ request.inputs.CopyFrom(initial_context)
158
+
159
+ if priority is not None:
160
+ request.priority = priority
161
+
162
+ # Send to server
163
+ async with ensure_singleton():
164
+ stub = await _workflow_stub()
165
+
166
+ try:
167
+ response = await stub.RegisterSchedule(request, timeout=30.0)
168
+ except aio.AioRpcError as exc:
169
+ if exc.code() == StatusCode.ALREADY_EXISTS:
170
+ raise ScheduleAlreadyExistsError(
171
+ f"schedule already exists: {workflow_name}/{schedule_name}"
172
+ ) from exc
173
+ raise RuntimeError(f"Failed to register schedule: {exc}") from exc
174
+
175
+ return response.schedule_id
176
+
177
+
178
+ async def pause_schedule(workflow_cls: Type[Workflow], *, schedule_name: str) -> bool:
179
+ """
180
+ Pause a workflow's schedule.
181
+
182
+ The schedule will not fire until resumed. Existing running instances
183
+ are not affected.
184
+
185
+ Args:
186
+ workflow_cls: The Workflow class whose schedule to pause.
187
+ schedule_name: The name of the schedule to pause.
188
+
189
+ Returns:
190
+ True if a schedule was found and paused, False otherwise.
191
+
192
+ Raises:
193
+ ValueError: If schedule_name is empty.
194
+ RuntimeError: If the gRPC call fails.
195
+ """
196
+ if not schedule_name:
197
+ raise ValueError("schedule_name is required")
198
+
199
+ request = pb2.UpdateScheduleStatusRequest(
200
+ workflow_name=workflow_cls.short_name(),
201
+ schedule_name=schedule_name,
202
+ status=pb2.SCHEDULE_STATUS_PAUSED,
203
+ )
204
+ async with ensure_singleton():
205
+ stub = await _workflow_stub()
206
+
207
+ try:
208
+ response = await stub.UpdateScheduleStatus(request, timeout=30.0)
209
+ except aio.AioRpcError as exc:
210
+ raise RuntimeError(f"Failed to pause schedule: {exc}") from exc
211
+
212
+ return response.success
213
+
214
+
215
+ async def resume_schedule(workflow_cls: Type[Workflow], *, schedule_name: str) -> bool:
216
+ """
217
+ Resume a paused workflow schedule.
218
+
219
+ Args:
220
+ workflow_cls: The Workflow class whose schedule to resume.
221
+ schedule_name: The name of the schedule to resume.
222
+
223
+ Returns:
224
+ True if a schedule was found and resumed, False otherwise.
225
+
226
+ Raises:
227
+ ValueError: If schedule_name is empty.
228
+ RuntimeError: If the gRPC call fails.
229
+ """
230
+ if not schedule_name:
231
+ raise ValueError("schedule_name is required")
232
+
233
+ request = pb2.UpdateScheduleStatusRequest(
234
+ workflow_name=workflow_cls.short_name(),
235
+ schedule_name=schedule_name,
236
+ status=pb2.SCHEDULE_STATUS_ACTIVE,
237
+ )
238
+ async with ensure_singleton():
239
+ stub = await _workflow_stub()
240
+
241
+ try:
242
+ response = await stub.UpdateScheduleStatus(request, timeout=30.0)
243
+ except aio.AioRpcError as exc:
244
+ raise RuntimeError(f"Failed to resume schedule: {exc}") from exc
245
+
246
+ return response.success
247
+
248
+
249
+ async def delete_schedule(workflow_cls: Type[Workflow], *, schedule_name: str) -> bool:
250
+ """
251
+ Delete a workflow's schedule.
252
+
253
+ The schedule is soft-deleted and can be recreated by calling
254
+ schedule_workflow again.
255
+
256
+ Args:
257
+ workflow_cls: The Workflow class whose schedule to delete.
258
+ schedule_name: The name of the schedule to delete.
259
+
260
+ Returns:
261
+ True if a schedule was found and deleted, False otherwise.
262
+
263
+ Raises:
264
+ ValueError: If schedule_name is empty.
265
+ RuntimeError: If the gRPC call fails.
266
+ """
267
+ if not schedule_name:
268
+ raise ValueError("schedule_name is required")
269
+
270
+ request = pb2.DeleteScheduleRequest(
271
+ workflow_name=workflow_cls.short_name(),
272
+ schedule_name=schedule_name,
273
+ )
274
+ async with ensure_singleton():
275
+ stub = await _workflow_stub()
276
+
277
+ try:
278
+ response = await stub.DeleteSchedule(request, timeout=30.0)
279
+ except aio.AioRpcError as exc:
280
+ raise RuntimeError(f"Failed to delete schedule: {exc}") from exc
281
+
282
+ return response.success
283
+
284
+
285
+ def _parse_iso_datetime(value: str) -> Optional[datetime]:
286
+ """Parse an ISO 8601 datetime string, returning None if empty."""
287
+ if not value:
288
+ return None
289
+ return datetime.fromisoformat(value.replace("Z", "+00:00"))
290
+
291
+
292
+ def _proto_schedule_type_to_str(
293
+ schedule_type: "pb2.ScheduleType.V",
294
+ ) -> ScheduleType:
295
+ """Convert protobuf ScheduleType to string literal."""
296
+ if schedule_type == pb2.SCHEDULE_TYPE_CRON:
297
+ return "cron"
298
+ elif schedule_type == pb2.SCHEDULE_TYPE_INTERVAL:
299
+ return "interval"
300
+ else:
301
+ return "cron" # Default fallback
302
+
303
+
304
+ def _proto_schedule_status_to_str(
305
+ status: "pb2.ScheduleStatus.V",
306
+ ) -> ScheduleStatus:
307
+ """Convert protobuf ScheduleStatus to string literal."""
308
+ if status == pb2.SCHEDULE_STATUS_ACTIVE:
309
+ return "active"
310
+ elif status == pb2.SCHEDULE_STATUS_PAUSED:
311
+ return "paused"
312
+ else:
313
+ return "active" # Default fallback
314
+
315
+
316
+ async def list_schedules(
317
+ status_filter: Optional[ScheduleStatus] = None,
318
+ ) -> List[ScheduleInfo]:
319
+ """
320
+ List all registered workflow schedules.
321
+
322
+ Args:
323
+ status_filter: Optional filter by status ("active" or "paused").
324
+ If None, returns all non-deleted schedules.
325
+
326
+ Returns:
327
+ A list of ScheduleInfo objects containing schedule details.
328
+
329
+ Examples:
330
+ # List all schedules
331
+ schedules = await list_schedules()
332
+ for s in schedules:
333
+ print(f"{s.workflow_name}: {s.status}")
334
+
335
+ # List only active schedules
336
+ active = await list_schedules(status_filter="active")
337
+
338
+ # List only paused schedules
339
+ paused = await list_schedules(status_filter="paused")
340
+
341
+ Raises:
342
+ RuntimeError: If the gRPC call fails.
343
+ """
344
+ request = pb2.ListSchedulesRequest()
345
+ if status_filter is not None:
346
+ request.status_filter = status_filter
347
+
348
+ async with ensure_singleton():
349
+ stub = await _workflow_stub()
350
+
351
+ try:
352
+ response = await stub.ListSchedules(request, timeout=30.0)
353
+ except aio.AioRpcError as exc:
354
+ raise RuntimeError(f"Failed to list schedules: {exc}") from exc
355
+
356
+ schedules = []
357
+ for s in response.schedules:
358
+ schedules.append(
359
+ ScheduleInfo(
360
+ id=s.id,
361
+ workflow_name=s.workflow_name,
362
+ schedule_name=s.schedule_name,
363
+ schedule_type=_proto_schedule_type_to_str(s.schedule_type),
364
+ cron_expression=s.cron_expression if s.cron_expression else None,
365
+ interval_seconds=s.interval_seconds if s.interval_seconds else None,
366
+ jitter_seconds=s.jitter_seconds if s.jitter_seconds else None,
367
+ status=_proto_schedule_status_to_str(s.status),
368
+ next_run_at=_parse_iso_datetime(s.next_run_at),
369
+ last_run_at=_parse_iso_datetime(s.last_run_at),
370
+ last_instance_id=s.last_instance_id if s.last_instance_id else None,
371
+ created_at=_parse_iso_datetime(s.created_at), # type: ignore
372
+ updated_at=_parse_iso_datetime(s.updated_at), # type: ignore
373
+ )
374
+ )
375
+
376
+ return schedules