rappel 0.4.1__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rappel might be problematic. Click here for more details.

rappel/logger.py ADDED
@@ -0,0 +1,39 @@
1
+ """Simple logging helpers for configurable rappel loggers."""
2
+
3
+ import logging
4
+ import os
5
+ from typing import Optional
6
+
7
+ DEFAULT_LEVEL = logging.INFO
8
+ ENV_VAR = "RAPPEL_LOG_LEVEL"
9
+
10
+
11
+ def _resolve_level(value: Optional[str]) -> int:
12
+ if not value:
13
+ return DEFAULT_LEVEL
14
+ normalized = value.strip().upper()
15
+ mapping = {
16
+ "CRITICAL": logging.CRITICAL,
17
+ "FATAL": logging.FATAL,
18
+ "ERROR": logging.ERROR,
19
+ "WARNING": logging.WARNING,
20
+ "WARN": logging.WARNING,
21
+ "INFO": logging.INFO,
22
+ "DEBUG": logging.DEBUG,
23
+ "NOTSET": logging.NOTSET,
24
+ }
25
+ return mapping.get(normalized, DEFAULT_LEVEL)
26
+
27
+
28
+ def configure(name: str) -> logging.Logger:
29
+ """Return a logger configured from RAPPEL_LOG_LEVEL."""
30
+
31
+ logger = logging.getLogger(name)
32
+ level = _resolve_level(os.environ.get(ENV_VAR))
33
+ logger.setLevel(level)
34
+ if not logger.handlers:
35
+ handler = logging.StreamHandler()
36
+ handler.setFormatter(logging.Formatter("[%(name)s] %(levelname)s: %(message)s"))
37
+ handler.setLevel(level)
38
+ logger.addHandler(handler)
39
+ return logger
rappel/registry.py ADDED
@@ -0,0 +1,75 @@
1
+ from collections.abc import Awaitable, Callable
2
+ from dataclasses import dataclass
3
+ from threading import RLock
4
+ from typing import Any, Optional
5
+
6
+ AsyncAction = Callable[..., Awaitable[Any]]
7
+
8
+
9
+ @dataclass
10
+ class _ActionEntry:
11
+ module: str
12
+ name: str
13
+ func: AsyncAction
14
+
15
+
16
+ def _make_key(module: str, name: str) -> str:
17
+ """Create a registry key from module and action name."""
18
+ return f"{module}:{name}"
19
+
20
+
21
+ class ActionRegistry:
22
+ """In-memory registry of user-defined actions.
23
+
24
+ Actions are keyed by (module, name), allowing the same action name
25
+ to be used in different modules.
26
+ """
27
+
28
+ def __init__(self) -> None:
29
+ self._actions: dict[str, _ActionEntry] = {}
30
+ self._lock = RLock()
31
+
32
+ def register(self, module: str, name: str, func: AsyncAction) -> None:
33
+ """Register an action with its module and name.
34
+
35
+ Args:
36
+ module: The Python module containing the action.
37
+ name: The action name (from @action decorator).
38
+ func: The async function to execute.
39
+
40
+ Raises:
41
+ ValueError: If an action with the same module:name is already registered.
42
+ """
43
+ key = _make_key(module, name)
44
+ with self._lock:
45
+ if key in self._actions:
46
+ raise ValueError(f"action '{module}:{name}' already registered")
47
+ self._actions[key] = _ActionEntry(module=module, name=name, func=func)
48
+
49
+ def get(self, module: str, name: str) -> Optional[AsyncAction]:
50
+ """Look up an action by module and name.
51
+
52
+ Args:
53
+ module: The Python module containing the action.
54
+ name: The action name.
55
+
56
+ Returns:
57
+ The action function if found, None otherwise.
58
+ """
59
+ key = _make_key(module, name)
60
+ with self._lock:
61
+ entry = self._actions.get(key)
62
+ return entry.func if entry else None
63
+
64
+ def names(self) -> list[str]:
65
+ """Return all registered action keys (module:name format)."""
66
+ with self._lock:
67
+ return sorted(self._actions.keys())
68
+
69
+ def reset(self) -> None:
70
+ """Clear all registered actions."""
71
+ with self._lock:
72
+ self._actions.clear()
73
+
74
+
75
+ registry = ActionRegistry()
rappel/schedule.py ADDED
@@ -0,0 +1,294 @@
1
+ """
2
+ Scheduled workflow execution.
3
+
4
+ This module provides functions for registering workflows to run on a cron
5
+ schedule or at fixed intervals.
6
+ """
7
+
8
+ from dataclasses import dataclass
9
+ from datetime import datetime, timedelta
10
+ from typing import Any, Dict, List, Literal, Optional, Type, Union
11
+
12
+ from grpc import aio # type: ignore[attr-defined]
13
+
14
+ from proto import messages_pb2 as pb2
15
+
16
+ from .bridge import _workflow_stub, ensure_singleton
17
+ from .serialization import build_arguments_from_kwargs
18
+ from .workflow import Workflow
19
+
20
+ ScheduleType = Literal["cron", "interval"]
21
+ ScheduleStatus = Literal["active", "paused"]
22
+
23
+
24
+ @dataclass
25
+ class ScheduleInfo:
26
+ """Information about a registered schedule."""
27
+
28
+ id: str
29
+ workflow_name: str
30
+ schedule_type: ScheduleType
31
+ cron_expression: Optional[str]
32
+ interval_seconds: Optional[int]
33
+ status: ScheduleStatus
34
+ next_run_at: Optional[datetime]
35
+ last_run_at: Optional[datetime]
36
+ last_instance_id: Optional[str]
37
+ created_at: datetime
38
+ updated_at: datetime
39
+
40
+
41
+ async def schedule_workflow(
42
+ workflow_cls: Type[Workflow],
43
+ *,
44
+ schedule: Union[str, timedelta],
45
+ inputs: Optional[Dict[str, Any]] = None,
46
+ ) -> str:
47
+ """
48
+ Register a schedule for a workflow.
49
+
50
+ This function registers both the workflow DAG and the schedule in a single
51
+ call. When the schedule fires, the registered workflow version will be
52
+ executed.
53
+
54
+ Args:
55
+ workflow_cls: The Workflow class to schedule.
56
+ schedule: Either a cron expression string (e.g., "0 * * * *" for hourly)
57
+ or a timedelta for interval-based scheduling.
58
+ inputs: Optional keyword arguments to pass to each scheduled run.
59
+
60
+ Returns:
61
+ The schedule ID.
62
+
63
+ Examples:
64
+ # Run every hour at minute 0
65
+ await schedule_workflow(MyWorkflow, schedule="0 * * * *")
66
+
67
+ # Run every 5 minutes
68
+ await schedule_workflow(MyWorkflow, schedule=timedelta(minutes=5))
69
+
70
+ # Run daily at midnight with inputs
71
+ await schedule_workflow(
72
+ MyWorkflow,
73
+ schedule="0 0 * * *",
74
+ inputs={"batch_size": 100}
75
+ )
76
+
77
+ Raises:
78
+ ValueError: If the cron expression is invalid or interval is non-positive.
79
+ RuntimeError: If the gRPC call fails.
80
+ """
81
+ workflow_name = workflow_cls.short_name()
82
+
83
+ # Build schedule definition
84
+ schedule_def = pb2.ScheduleDefinition()
85
+ if isinstance(schedule, str):
86
+ schedule_def.type = pb2.SCHEDULE_TYPE_CRON
87
+ schedule_def.cron_expression = schedule
88
+ elif isinstance(schedule, timedelta):
89
+ interval_seconds = int(schedule.total_seconds())
90
+ if interval_seconds <= 0:
91
+ raise ValueError("Interval must be positive")
92
+ schedule_def.type = pb2.SCHEDULE_TYPE_INTERVAL
93
+ schedule_def.interval_seconds = interval_seconds
94
+ else:
95
+ raise TypeError(f"schedule must be str or timedelta, got {type(schedule)}")
96
+
97
+ # Build the workflow registration payload to ensure the DAG is registered
98
+ # This is required for the schedule to execute - the scheduler needs a
99
+ # registered workflow version to create instances from.
100
+ registration = workflow_cls._build_registration_payload()
101
+
102
+ # Build request with both registration and schedule
103
+ request = pb2.RegisterScheduleRequest(
104
+ workflow_name=workflow_name,
105
+ schedule=schedule_def,
106
+ registration=registration,
107
+ )
108
+
109
+ # Add inputs if provided
110
+ if inputs:
111
+ request.inputs.CopyFrom(build_arguments_from_kwargs(inputs))
112
+
113
+ # Send to server
114
+ async with ensure_singleton():
115
+ stub = await _workflow_stub()
116
+
117
+ try:
118
+ response = await stub.RegisterSchedule(request, timeout=30.0)
119
+ except aio.AioRpcError as exc:
120
+ raise RuntimeError(f"Failed to register schedule: {exc}") from exc
121
+
122
+ return response.schedule_id
123
+
124
+
125
+ async def pause_schedule(workflow_cls: Type[Workflow]) -> bool:
126
+ """
127
+ Pause a workflow's schedule.
128
+
129
+ The schedule will not fire until resumed. Existing running instances
130
+ are not affected.
131
+
132
+ Args:
133
+ workflow_cls: The Workflow class whose schedule to pause.
134
+
135
+ Returns:
136
+ True if a schedule was found and paused, False otherwise.
137
+ """
138
+ request = pb2.UpdateScheduleStatusRequest(
139
+ workflow_name=workflow_cls.short_name(),
140
+ status=pb2.SCHEDULE_STATUS_PAUSED,
141
+ )
142
+ async with ensure_singleton():
143
+ stub = await _workflow_stub()
144
+
145
+ try:
146
+ response = await stub.UpdateScheduleStatus(request, timeout=30.0)
147
+ except aio.AioRpcError as exc:
148
+ raise RuntimeError(f"Failed to pause schedule: {exc}") from exc
149
+
150
+ return response.success
151
+
152
+
153
+ async def resume_schedule(workflow_cls: Type[Workflow]) -> bool:
154
+ """
155
+ Resume a paused workflow schedule.
156
+
157
+ Args:
158
+ workflow_cls: The Workflow class whose schedule to resume.
159
+
160
+ Returns:
161
+ True if a schedule was found and resumed, False otherwise.
162
+ """
163
+ request = pb2.UpdateScheduleStatusRequest(
164
+ workflow_name=workflow_cls.short_name(),
165
+ status=pb2.SCHEDULE_STATUS_ACTIVE,
166
+ )
167
+ async with ensure_singleton():
168
+ stub = await _workflow_stub()
169
+
170
+ try:
171
+ response = await stub.UpdateScheduleStatus(request, timeout=30.0)
172
+ except aio.AioRpcError as exc:
173
+ raise RuntimeError(f"Failed to resume schedule: {exc}") from exc
174
+
175
+ return response.success
176
+
177
+
178
+ async def delete_schedule(workflow_cls: Type[Workflow]) -> bool:
179
+ """
180
+ Delete a workflow's schedule.
181
+
182
+ The schedule is soft-deleted and can be recreated by calling
183
+ schedule_workflow again.
184
+
185
+ Args:
186
+ workflow_cls: The Workflow class whose schedule to delete.
187
+
188
+ Returns:
189
+ True if a schedule was found and deleted, False otherwise.
190
+ """
191
+ request = pb2.DeleteScheduleRequest(
192
+ workflow_name=workflow_cls.short_name(),
193
+ )
194
+ async with ensure_singleton():
195
+ stub = await _workflow_stub()
196
+
197
+ try:
198
+ response = await stub.DeleteSchedule(request, timeout=30.0)
199
+ except aio.AioRpcError as exc:
200
+ raise RuntimeError(f"Failed to delete schedule: {exc}") from exc
201
+
202
+ return response.success
203
+
204
+
205
+ def _parse_iso_datetime(value: str) -> Optional[datetime]:
206
+ """Parse an ISO 8601 datetime string, returning None if empty."""
207
+ if not value:
208
+ return None
209
+ return datetime.fromisoformat(value.replace("Z", "+00:00"))
210
+
211
+
212
+ def _proto_schedule_type_to_str(
213
+ schedule_type: "pb2.ScheduleType.V",
214
+ ) -> ScheduleType:
215
+ """Convert protobuf ScheduleType to string literal."""
216
+ if schedule_type == pb2.SCHEDULE_TYPE_CRON:
217
+ return "cron"
218
+ elif schedule_type == pb2.SCHEDULE_TYPE_INTERVAL:
219
+ return "interval"
220
+ else:
221
+ return "cron" # Default fallback
222
+
223
+
224
+ def _proto_schedule_status_to_str(
225
+ status: "pb2.ScheduleStatus.V",
226
+ ) -> ScheduleStatus:
227
+ """Convert protobuf ScheduleStatus to string literal."""
228
+ if status == pb2.SCHEDULE_STATUS_ACTIVE:
229
+ return "active"
230
+ elif status == pb2.SCHEDULE_STATUS_PAUSED:
231
+ return "paused"
232
+ else:
233
+ return "active" # Default fallback
234
+
235
+
236
+ async def list_schedules(
237
+ status_filter: Optional[ScheduleStatus] = None,
238
+ ) -> List[ScheduleInfo]:
239
+ """
240
+ List all registered workflow schedules.
241
+
242
+ Args:
243
+ status_filter: Optional filter by status ("active" or "paused").
244
+ If None, returns all non-deleted schedules.
245
+
246
+ Returns:
247
+ A list of ScheduleInfo objects containing schedule details.
248
+
249
+ Examples:
250
+ # List all schedules
251
+ schedules = await list_schedules()
252
+ for s in schedules:
253
+ print(f"{s.workflow_name}: {s.status}")
254
+
255
+ # List only active schedules
256
+ active = await list_schedules(status_filter="active")
257
+
258
+ # List only paused schedules
259
+ paused = await list_schedules(status_filter="paused")
260
+
261
+ Raises:
262
+ RuntimeError: If the gRPC call fails.
263
+ """
264
+ request = pb2.ListSchedulesRequest()
265
+ if status_filter is not None:
266
+ request.status_filter = status_filter
267
+
268
+ async with ensure_singleton():
269
+ stub = await _workflow_stub()
270
+
271
+ try:
272
+ response = await stub.ListSchedules(request, timeout=30.0)
273
+ except aio.AioRpcError as exc:
274
+ raise RuntimeError(f"Failed to list schedules: {exc}") from exc
275
+
276
+ schedules = []
277
+ for s in response.schedules:
278
+ schedules.append(
279
+ ScheduleInfo(
280
+ id=s.id,
281
+ workflow_name=s.workflow_name,
282
+ schedule_type=_proto_schedule_type_to_str(s.schedule_type),
283
+ cron_expression=s.cron_expression if s.cron_expression else None,
284
+ interval_seconds=s.interval_seconds if s.interval_seconds else None,
285
+ status=_proto_schedule_status_to_str(s.status),
286
+ next_run_at=_parse_iso_datetime(s.next_run_at),
287
+ last_run_at=_parse_iso_datetime(s.last_run_at),
288
+ last_instance_id=s.last_instance_id if s.last_instance_id else None,
289
+ created_at=_parse_iso_datetime(s.created_at), # type: ignore
290
+ updated_at=_parse_iso_datetime(s.updated_at), # type: ignore
291
+ )
292
+ )
293
+
294
+ return schedules
@@ -0,0 +1,205 @@
1
+ import dataclasses
2
+ import importlib
3
+ import traceback
4
+ from typing import Any
5
+
6
+ from google.protobuf import json_format, struct_pb2
7
+ from pydantic import BaseModel
8
+
9
+ from proto import messages_pb2 as pb2
10
+
11
+ NULL_VALUE = struct_pb2.NULL_VALUE # type: ignore[attr-defined]
12
+
13
+ PRIMITIVE_TYPES = (str, int, float, bool, type(None))
14
+
15
+
16
+ def dumps(value: Any) -> pb2.WorkflowArgumentValue:
17
+ """Serialize a Python value into a WorkflowArgumentValue message."""
18
+
19
+ return _to_argument_value(value)
20
+
21
+
22
+ def loads(data: Any) -> Any:
23
+ """Deserialize a workflow argument payload into a Python object."""
24
+
25
+ if isinstance(data, pb2.WorkflowArgumentValue):
26
+ argument = data
27
+ elif isinstance(data, dict):
28
+ argument = pb2.WorkflowArgumentValue()
29
+ json_format.ParseDict(data, argument)
30
+ else:
31
+ raise TypeError("argument value payload must be a dict or ArgumentValue message")
32
+ return _from_argument_value(argument)
33
+
34
+
35
+ def build_arguments_from_kwargs(kwargs: dict[str, Any]) -> pb2.WorkflowArguments:
36
+ arguments = pb2.WorkflowArguments()
37
+ for key, value in kwargs.items():
38
+ entry = arguments.arguments.add()
39
+ entry.key = key
40
+ entry.value.CopyFrom(dumps(value))
41
+ return arguments
42
+
43
+
44
+ def arguments_to_kwargs(arguments: pb2.WorkflowArguments | None) -> dict[str, Any]:
45
+ if arguments is None:
46
+ return {}
47
+ result: dict[str, Any] = {}
48
+ for entry in arguments.arguments:
49
+ result[entry.key] = loads(entry.value)
50
+ return result
51
+
52
+
53
+ def _to_argument_value(value: Any) -> pb2.WorkflowArgumentValue:
54
+ argument = pb2.WorkflowArgumentValue()
55
+ if isinstance(value, PRIMITIVE_TYPES):
56
+ argument.primitive.CopyFrom(_serialize_primitive(value))
57
+ return argument
58
+ if isinstance(value, BaseException):
59
+ argument.exception.type = value.__class__.__name__
60
+ argument.exception.module = value.__class__.__module__
61
+ argument.exception.message = str(value)
62
+ tb_text = "".join(traceback.format_exception(type(value), value, value.__traceback__))
63
+ argument.exception.traceback = tb_text
64
+ return argument
65
+ if _is_base_model(value):
66
+ model_class = value.__class__
67
+ model_data = _serialize_model_data(value)
68
+ argument.basemodel.module = model_class.__module__
69
+ argument.basemodel.name = model_class.__qualname__
70
+ # Serialize as dict to preserve types (Struct converts all numbers to float)
71
+ for key, item in model_data.items():
72
+ entry = argument.basemodel.data.entries.add()
73
+ entry.key = key
74
+ entry.value.CopyFrom(_to_argument_value(item))
75
+ return argument
76
+ if _is_dataclass_instance(value):
77
+ # Dataclasses use the same basemodel serialization path as Pydantic models
78
+ dc_class = value.__class__
79
+ dc_data = dataclasses.asdict(value)
80
+ argument.basemodel.module = dc_class.__module__
81
+ argument.basemodel.name = dc_class.__qualname__
82
+ for key, item in dc_data.items():
83
+ entry = argument.basemodel.data.entries.add()
84
+ entry.key = key
85
+ entry.value.CopyFrom(_to_argument_value(item))
86
+ return argument
87
+ if isinstance(value, dict):
88
+ argument.dict_value.SetInParent()
89
+ for key, item in value.items():
90
+ if not isinstance(key, str):
91
+ raise TypeError("workflow dict keys must be strings")
92
+ entry = argument.dict_value.entries.add()
93
+ entry.key = key
94
+ entry.value.CopyFrom(_to_argument_value(item))
95
+ return argument
96
+ if isinstance(value, list):
97
+ argument.list_value.SetInParent()
98
+ for item in value:
99
+ item_value = argument.list_value.items.add()
100
+ item_value.CopyFrom(_to_argument_value(item))
101
+ return argument
102
+ if isinstance(value, tuple):
103
+ argument.tuple_value.SetInParent()
104
+ for item in value:
105
+ item_value = argument.tuple_value.items.add()
106
+ item_value.CopyFrom(_to_argument_value(item))
107
+ return argument
108
+ raise TypeError(f"unsupported value type {type(value)!r}")
109
+
110
+
111
+ def _from_argument_value(argument: pb2.WorkflowArgumentValue) -> Any:
112
+ kind = argument.WhichOneof("kind") # type: ignore[attr-defined]
113
+ if kind == "primitive":
114
+ return _primitive_to_python(argument.primitive)
115
+ if kind == "basemodel":
116
+ module = argument.basemodel.module
117
+ name = argument.basemodel.name
118
+ # Deserialize dict entries to preserve types
119
+ data: dict[str, Any] = {}
120
+ for entry in argument.basemodel.data.entries:
121
+ data[entry.key] = _from_argument_value(entry.value)
122
+ return _instantiate_serialized_model(module, name, data)
123
+ if kind == "exception":
124
+ return {
125
+ "type": argument.exception.type,
126
+ "module": argument.exception.module,
127
+ "message": argument.exception.message,
128
+ "traceback": argument.exception.traceback,
129
+ }
130
+ if kind == "list_value":
131
+ return [_from_argument_value(item) for item in argument.list_value.items]
132
+ if kind == "tuple_value":
133
+ return tuple(_from_argument_value(item) for item in argument.tuple_value.items)
134
+ if kind == "dict_value":
135
+ result: dict[str, Any] = {}
136
+ for entry in argument.dict_value.entries:
137
+ result[entry.key] = _from_argument_value(entry.value)
138
+ return result
139
+ raise ValueError("argument value missing kind discriminator")
140
+
141
+
142
+ def _serialize_model_data(model: BaseModel) -> dict[str, Any]:
143
+ if hasattr(model, "model_dump"):
144
+ return model.model_dump(mode="python") # type: ignore[attr-defined]
145
+ if hasattr(model, "dict"):
146
+ return model.dict() # type: ignore[attr-defined]
147
+ return model.__dict__
148
+
149
+
150
+ def _serialize_primitive(value: Any) -> pb2.PrimitiveWorkflowArgument:
151
+ primitive = pb2.PrimitiveWorkflowArgument()
152
+ if value is None:
153
+ primitive.null_value = NULL_VALUE
154
+ elif isinstance(value, bool):
155
+ primitive.bool_value = value
156
+ elif isinstance(value, int) and not isinstance(value, bool):
157
+ primitive.int_value = value
158
+ elif isinstance(value, float):
159
+ primitive.double_value = value
160
+ elif isinstance(value, str):
161
+ primitive.string_value = value
162
+ else: # pragma: no cover - unreachable given PRIMITIVE_TYPES
163
+ raise TypeError(f"unsupported primitive type {type(value)!r}")
164
+ return primitive
165
+
166
+
167
+ def _primitive_to_python(primitive: pb2.PrimitiveWorkflowArgument) -> Any:
168
+ kind = primitive.WhichOneof("kind") # type: ignore[attr-defined]
169
+ if kind == "string_value":
170
+ return primitive.string_value
171
+ if kind == "double_value":
172
+ return primitive.double_value
173
+ if kind == "int_value":
174
+ return primitive.int_value
175
+ if kind == "bool_value":
176
+ return primitive.bool_value
177
+ if kind == "null_value":
178
+ return None
179
+ raise ValueError("primitive argument missing kind discriminator")
180
+
181
+
182
+ def _instantiate_serialized_model(module: str, name: str, model_data: dict[str, Any]) -> Any:
183
+ cls = _import_symbol(module, name)
184
+ if hasattr(cls, "model_validate"):
185
+ return cls.model_validate(model_data) # type: ignore[attr-defined]
186
+ return cls(**model_data)
187
+
188
+
189
+ def _is_base_model(value: Any) -> bool:
190
+ return isinstance(value, BaseModel)
191
+
192
+
193
+ def _is_dataclass_instance(value: Any) -> bool:
194
+ """Check if value is a dataclass instance (not a class)."""
195
+ return dataclasses.is_dataclass(value) and not isinstance(value, type)
196
+
197
+
198
+ def _import_symbol(module: str, qualname: str) -> Any:
199
+ module_obj = importlib.import_module(module)
200
+ attr: Any = module_obj
201
+ for part in qualname.split("."):
202
+ attr = getattr(attr, part)
203
+ if not isinstance(attr, type):
204
+ raise ValueError(f"{qualname} from {module} is not a class")
205
+ return attr