rappel 0.4.1__py3-none-win_amd64.whl → 0.8.1__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rappel might be problematic. Click here for more details.

rappel/registry.py CHANGED
@@ -29,6 +29,32 @@ class ActionRegistry:
29
29
  self._actions: dict[str, _ActionEntry] = {}
30
30
  self._lock = RLock()
31
31
 
32
+ def _source_fingerprint(self, func: AsyncAction) -> tuple[str | None, str | None]:
33
+ func_any: Any = func
34
+ try:
35
+ code = func_any.__code__
36
+ except AttributeError:
37
+ return (None, None)
38
+ try:
39
+ qualname = func_any.__qualname__
40
+ except AttributeError:
41
+ qualname = None
42
+ filename = code.co_filename
43
+ if not isinstance(filename, str):
44
+ filename = None
45
+ if qualname is not None and not isinstance(qualname, str):
46
+ qualname = None
47
+ return (filename, qualname)
48
+
49
+ def _is_same_action_definition(self, existing: AsyncAction, new: AsyncAction) -> bool:
50
+ if existing is new:
51
+ return True
52
+ existing_fingerprint = self._source_fingerprint(existing)
53
+ new_fingerprint = self._source_fingerprint(new)
54
+ if existing_fingerprint == (None, None) or new_fingerprint == (None, None):
55
+ return False
56
+ return existing_fingerprint == new_fingerprint
57
+
32
58
  def register(self, module: str, name: str, func: AsyncAction) -> None:
33
59
  """Register an action with its module and name.
34
60
 
@@ -38,11 +64,16 @@ class ActionRegistry:
38
64
  func: The async function to execute.
39
65
 
40
66
  Raises:
41
- ValueError: If an action with the same module:name is already registered.
67
+ ValueError: If an action with the same module:name is already registered
68
+ with a different implementation.
42
69
  """
43
70
  key = _make_key(module, name)
44
71
  with self._lock:
45
- if key in self._actions:
72
+ existing = self._actions.get(key)
73
+ if existing is not None:
74
+ if self._is_same_action_definition(existing.func, func):
75
+ self._actions[key] = _ActionEntry(module=module, name=name, func=func)
76
+ return
46
77
  raise ValueError(f"action '{module}:{name}' already registered")
47
78
  self._actions[key] = _ActionEntry(module=module, name=name, func=func)
48
79
 
@@ -66,6 +97,11 @@ class ActionRegistry:
66
97
  with self._lock:
67
98
  return sorted(self._actions.keys())
68
99
 
100
+ def entries(self) -> list[_ActionEntry]:
101
+ """Return all registered action entries."""
102
+ with self._lock:
103
+ return list(self._actions.values())
104
+
69
105
  def reset(self) -> None:
70
106
  """Clear all registered actions."""
71
107
  with self._lock:
rappel/schedule.py CHANGED
@@ -9,12 +9,12 @@ from dataclasses import dataclass
9
9
  from datetime import datetime, timedelta
10
10
  from typing import Any, Dict, List, Literal, Optional, Type, Union
11
11
 
12
- from grpc import aio # type: ignore[attr-defined]
12
+ from grpc import StatusCode, aio # type: ignore[attr-defined]
13
13
 
14
14
  from proto import messages_pb2 as pb2
15
15
 
16
16
  from .bridge import _workflow_stub, ensure_singleton
17
- from .serialization import build_arguments_from_kwargs
17
+ from .exceptions import ScheduleAlreadyExistsError
18
18
  from .workflow import Workflow
19
19
 
20
20
  ScheduleType = Literal["cron", "interval"]
@@ -27,9 +27,11 @@ class ScheduleInfo:
27
27
 
28
28
  id: str
29
29
  workflow_name: str
30
+ schedule_name: str
30
31
  schedule_type: ScheduleType
31
32
  cron_expression: Optional[str]
32
33
  interval_seconds: Optional[int]
34
+ jitter_seconds: Optional[int]
33
35
  status: ScheduleStatus
34
36
  next_run_at: Optional[datetime]
35
37
  last_run_at: Optional[datetime]
@@ -41,7 +43,9 @@ class ScheduleInfo:
41
43
  async def schedule_workflow(
42
44
  workflow_cls: Type[Workflow],
43
45
  *,
46
+ schedule_name: str,
44
47
  schedule: Union[str, timedelta],
48
+ jitter: Optional[timedelta] = None,
45
49
  inputs: Optional[Dict[str, Any]] = None,
46
50
  ) -> str:
47
51
  """
@@ -53,8 +57,12 @@ async def schedule_workflow(
53
57
 
54
58
  Args:
55
59
  workflow_cls: The Workflow class to schedule.
60
+ schedule_name: Unique name for this schedule. Allows multiple schedules
61
+ per workflow with different inputs. Must be unique within
62
+ a workflow.
56
63
  schedule: Either a cron expression string (e.g., "0 * * * *" for hourly)
57
64
  or a timedelta for interval-based scheduling.
65
+ jitter: Optional jitter window to add to each scheduled run.
58
66
  inputs: Optional keyword arguments to pass to each scheduled run.
59
67
 
60
68
  Returns:
@@ -62,22 +70,42 @@ async def schedule_workflow(
62
70
 
63
71
  Examples:
64
72
  # Run every hour at minute 0
65
- await schedule_workflow(MyWorkflow, schedule="0 * * * *")
73
+ await schedule_workflow(
74
+ MyWorkflow,
75
+ schedule_name="hourly-run",
76
+ schedule="0 * * * *"
77
+ )
66
78
 
67
79
  # Run every 5 minutes
68
- await schedule_workflow(MyWorkflow, schedule=timedelta(minutes=5))
80
+ await schedule_workflow(
81
+ MyWorkflow,
82
+ schedule_name="frequent-check",
83
+ schedule=timedelta(minutes=5)
84
+ )
69
85
 
70
- # Run daily at midnight with inputs
86
+ # Multiple schedules with different inputs
71
87
  await schedule_workflow(
72
88
  MyWorkflow,
89
+ schedule_name="small-batch",
73
90
  schedule="0 0 * * *",
74
91
  inputs={"batch_size": 100}
75
92
  )
93
+ await schedule_workflow(
94
+ MyWorkflow,
95
+ schedule_name="large-batch",
96
+ schedule="0 12 * * *",
97
+ inputs={"batch_size": 1000}
98
+ )
76
99
 
77
100
  Raises:
78
- ValueError: If the cron expression is invalid or interval is non-positive.
101
+ ValueError: If the cron expression is invalid, interval is non-positive,
102
+ or schedule_name is empty.
103
+ ScheduleAlreadyExistsError: If a schedule with the same name already exists.
79
104
  RuntimeError: If the gRPC call fails.
80
105
  """
106
+ if not schedule_name:
107
+ raise ValueError("schedule_name is required")
108
+
81
109
  workflow_name = workflow_cls.short_name()
82
110
 
83
111
  # Build schedule definition
@@ -94,6 +122,12 @@ async def schedule_workflow(
94
122
  else:
95
123
  raise TypeError(f"schedule must be str or timedelta, got {type(schedule)}")
96
124
 
125
+ if jitter is not None:
126
+ jitter_seconds = int(jitter.total_seconds())
127
+ if jitter_seconds < 0:
128
+ raise ValueError("jitter must be non-negative")
129
+ schedule_def.jitter_seconds = jitter_seconds
130
+
97
131
  # Build the workflow registration payload to ensure the DAG is registered
98
132
  # This is required for the schedule to execute - the scheduler needs a
99
133
  # registered workflow version to create instances from.
@@ -102,13 +136,14 @@ async def schedule_workflow(
102
136
  # Build request with both registration and schedule
103
137
  request = pb2.RegisterScheduleRequest(
104
138
  workflow_name=workflow_name,
139
+ schedule_name=schedule_name,
105
140
  schedule=schedule_def,
106
141
  registration=registration,
107
142
  )
108
143
 
109
- # Add inputs if provided
110
- if inputs:
111
- request.inputs.CopyFrom(build_arguments_from_kwargs(inputs))
144
+ initial_context = workflow_cls._build_initial_context((), inputs or {})
145
+ if initial_context.arguments:
146
+ request.inputs.CopyFrom(initial_context)
112
147
 
113
148
  # Send to server
114
149
  async with ensure_singleton():
@@ -117,12 +152,16 @@ async def schedule_workflow(
117
152
  try:
118
153
  response = await stub.RegisterSchedule(request, timeout=30.0)
119
154
  except aio.AioRpcError as exc:
155
+ if exc.code() == StatusCode.ALREADY_EXISTS:
156
+ raise ScheduleAlreadyExistsError(
157
+ f"schedule already exists: {workflow_name}/{schedule_name}"
158
+ ) from exc
120
159
  raise RuntimeError(f"Failed to register schedule: {exc}") from exc
121
160
 
122
161
  return response.schedule_id
123
162
 
124
163
 
125
- async def pause_schedule(workflow_cls: Type[Workflow]) -> bool:
164
+ async def pause_schedule(workflow_cls: Type[Workflow], *, schedule_name: str) -> bool:
126
165
  """
127
166
  Pause a workflow's schedule.
128
167
 
@@ -131,12 +170,21 @@ async def pause_schedule(workflow_cls: Type[Workflow]) -> bool:
131
170
 
132
171
  Args:
133
172
  workflow_cls: The Workflow class whose schedule to pause.
173
+ schedule_name: The name of the schedule to pause.
134
174
 
135
175
  Returns:
136
176
  True if a schedule was found and paused, False otherwise.
177
+
178
+ Raises:
179
+ ValueError: If schedule_name is empty.
180
+ RuntimeError: If the gRPC call fails.
137
181
  """
182
+ if not schedule_name:
183
+ raise ValueError("schedule_name is required")
184
+
138
185
  request = pb2.UpdateScheduleStatusRequest(
139
186
  workflow_name=workflow_cls.short_name(),
187
+ schedule_name=schedule_name,
140
188
  status=pb2.SCHEDULE_STATUS_PAUSED,
141
189
  )
142
190
  async with ensure_singleton():
@@ -150,18 +198,27 @@ async def pause_schedule(workflow_cls: Type[Workflow]) -> bool:
150
198
  return response.success
151
199
 
152
200
 
153
- async def resume_schedule(workflow_cls: Type[Workflow]) -> bool:
201
+ async def resume_schedule(workflow_cls: Type[Workflow], *, schedule_name: str) -> bool:
154
202
  """
155
203
  Resume a paused workflow schedule.
156
204
 
157
205
  Args:
158
206
  workflow_cls: The Workflow class whose schedule to resume.
207
+ schedule_name: The name of the schedule to resume.
159
208
 
160
209
  Returns:
161
210
  True if a schedule was found and resumed, False otherwise.
211
+
212
+ Raises:
213
+ ValueError: If schedule_name is empty.
214
+ RuntimeError: If the gRPC call fails.
162
215
  """
216
+ if not schedule_name:
217
+ raise ValueError("schedule_name is required")
218
+
163
219
  request = pb2.UpdateScheduleStatusRequest(
164
220
  workflow_name=workflow_cls.short_name(),
221
+ schedule_name=schedule_name,
165
222
  status=pb2.SCHEDULE_STATUS_ACTIVE,
166
223
  )
167
224
  async with ensure_singleton():
@@ -175,7 +232,7 @@ async def resume_schedule(workflow_cls: Type[Workflow]) -> bool:
175
232
  return response.success
176
233
 
177
234
 
178
- async def delete_schedule(workflow_cls: Type[Workflow]) -> bool:
235
+ async def delete_schedule(workflow_cls: Type[Workflow], *, schedule_name: str) -> bool:
179
236
  """
180
237
  Delete a workflow's schedule.
181
238
 
@@ -184,12 +241,21 @@ async def delete_schedule(workflow_cls: Type[Workflow]) -> bool:
184
241
 
185
242
  Args:
186
243
  workflow_cls: The Workflow class whose schedule to delete.
244
+ schedule_name: The name of the schedule to delete.
187
245
 
188
246
  Returns:
189
247
  True if a schedule was found and deleted, False otherwise.
248
+
249
+ Raises:
250
+ ValueError: If schedule_name is empty.
251
+ RuntimeError: If the gRPC call fails.
190
252
  """
253
+ if not schedule_name:
254
+ raise ValueError("schedule_name is required")
255
+
191
256
  request = pb2.DeleteScheduleRequest(
192
257
  workflow_name=workflow_cls.short_name(),
258
+ schedule_name=schedule_name,
193
259
  )
194
260
  async with ensure_singleton():
195
261
  stub = await _workflow_stub()
@@ -279,9 +345,11 @@ async def list_schedules(
279
345
  ScheduleInfo(
280
346
  id=s.id,
281
347
  workflow_name=s.workflow_name,
348
+ schedule_name=s.schedule_name,
282
349
  schedule_type=_proto_schedule_type_to_str(s.schedule_type),
283
350
  cron_expression=s.cron_expression if s.cron_expression else None,
284
351
  interval_seconds=s.interval_seconds if s.interval_seconds else None,
352
+ jitter_seconds=s.jitter_seconds if s.jitter_seconds else None,
285
353
  status=_proto_schedule_status_to_str(s.status),
286
354
  next_run_at=_parse_iso_datetime(s.next_run_at),
287
355
  last_run_at=_parse_iso_datetime(s.last_run_at),
rappel/serialization.py CHANGED
@@ -1,7 +1,13 @@
1
1
  import dataclasses
2
2
  import importlib
3
3
  import traceback
4
+ from base64 import b64encode
5
+ from datetime import date, datetime, time, timedelta
6
+ from decimal import Decimal
7
+ from enum import Enum
8
+ from pathlib import PurePath
4
9
  from typing import Any
10
+ from uuid import UUID
5
11
 
6
12
  from google.protobuf import json_format, struct_pb2
7
13
  from pydantic import BaseModel
@@ -55,12 +61,68 @@ def _to_argument_value(value: Any) -> pb2.WorkflowArgumentValue:
55
61
  if isinstance(value, PRIMITIVE_TYPES):
56
62
  argument.primitive.CopyFrom(_serialize_primitive(value))
57
63
  return argument
64
+ if isinstance(value, UUID):
65
+ # Serialize UUID as string primitive
66
+ argument.primitive.CopyFrom(_serialize_primitive(str(value)))
67
+ return argument
68
+ if isinstance(value, datetime):
69
+ # Serialize datetime as ISO format string
70
+ argument.primitive.CopyFrom(_serialize_primitive(value.isoformat()))
71
+ return argument
72
+ if isinstance(value, date):
73
+ # Serialize date as ISO format string (must come after datetime check)
74
+ argument.primitive.CopyFrom(_serialize_primitive(value.isoformat()))
75
+ return argument
76
+ if isinstance(value, time):
77
+ # Serialize time as ISO format string
78
+ argument.primitive.CopyFrom(_serialize_primitive(value.isoformat()))
79
+ return argument
80
+ if isinstance(value, timedelta):
81
+ # Serialize timedelta as total seconds
82
+ argument.primitive.CopyFrom(_serialize_primitive(value.total_seconds()))
83
+ return argument
84
+ if isinstance(value, Decimal):
85
+ # Serialize Decimal as string to preserve precision
86
+ argument.primitive.CopyFrom(_serialize_primitive(str(value)))
87
+ return argument
88
+ if isinstance(value, Enum):
89
+ # Serialize Enum as its value
90
+ return _to_argument_value(value.value)
91
+ if isinstance(value, bytes):
92
+ # Serialize bytes as base64 string
93
+ argument.primitive.CopyFrom(_serialize_primitive(b64encode(value).decode("ascii")))
94
+ return argument
95
+ if isinstance(value, PurePath):
96
+ # Serialize Path as string
97
+ argument.primitive.CopyFrom(_serialize_primitive(str(value)))
98
+ return argument
99
+ if isinstance(value, (set, frozenset)):
100
+ # Serialize sets as lists
101
+ argument.list_value.SetInParent()
102
+ for item in value:
103
+ item_value = argument.list_value.items.add()
104
+ item_value.CopyFrom(_to_argument_value(item))
105
+ return argument
58
106
  if isinstance(value, BaseException):
59
107
  argument.exception.type = value.__class__.__name__
60
108
  argument.exception.module = value.__class__.__module__
61
109
  argument.exception.message = str(value)
62
110
  tb_text = "".join(traceback.format_exception(type(value), value, value.__traceback__))
63
111
  argument.exception.traceback = tb_text
112
+ # Include the exception class hierarchy (MRO) for proper except matching.
113
+ # This allows `except LookupError:` to catch KeyError, etc.
114
+ for cls in value.__class__.__mro__:
115
+ if cls is object:
116
+ continue # Skip 'object' as it's not useful for exception matching
117
+ argument.exception.type_hierarchy.append(cls.__name__)
118
+ values = _serialize_exception_values(value)
119
+ for key, item in values.items():
120
+ entry = argument.exception.values.entries.add()
121
+ entry.key = key
122
+ try:
123
+ entry.value.CopyFrom(_to_argument_value(item))
124
+ except TypeError:
125
+ entry.value.CopyFrom(_to_argument_value(str(item)))
64
126
  return argument
65
127
  if _is_base_model(value):
66
128
  model_class = value.__class__
@@ -121,11 +183,16 @@ def _from_argument_value(argument: pb2.WorkflowArgumentValue) -> Any:
121
183
  data[entry.key] = _from_argument_value(entry.value)
122
184
  return _instantiate_serialized_model(module, name, data)
123
185
  if kind == "exception":
186
+ values: dict[str, Any] = {}
187
+ if argument.exception.HasField("values"):
188
+ for entry in argument.exception.values.entries:
189
+ values[entry.key] = _from_argument_value(entry.value)
124
190
  return {
125
191
  "type": argument.exception.type,
126
192
  "module": argument.exception.module,
127
193
  "message": argument.exception.message,
128
194
  "traceback": argument.exception.traceback,
195
+ "values": values,
129
196
  }
130
197
  if kind == "list_value":
131
198
  return [_from_argument_value(item) for item in argument.list_value.items]
@@ -141,12 +208,19 @@ def _from_argument_value(argument: pb2.WorkflowArgumentValue) -> Any:
141
208
 
142
209
  def _serialize_model_data(model: BaseModel) -> dict[str, Any]:
143
210
  if hasattr(model, "model_dump"):
144
- return model.model_dump(mode="python") # type: ignore[attr-defined]
211
+ return model.model_dump(mode="json") # type: ignore[attr-defined]
145
212
  if hasattr(model, "dict"):
146
213
  return model.dict() # type: ignore[attr-defined]
147
214
  return model.__dict__
148
215
 
149
216
 
217
+ def _serialize_exception_values(exc: BaseException) -> dict[str, Any]:
218
+ values = dict(vars(exc))
219
+ if "args" not in values:
220
+ values["args"] = exc.args
221
+ return values
222
+
223
+
150
224
  def _serialize_primitive(value: Any) -> pb2.PrimitiveWorkflowArgument:
151
225
  primitive = pb2.PrimitiveWorkflowArgument()
152
226
  if value is None:
rappel/workflow.py CHANGED
@@ -60,9 +60,34 @@ class Workflow:
60
60
  _ir_lock: ClassVar[RLock] = RLock()
61
61
  _workflow_version_id: ClassVar[Optional[str]] = None
62
62
 
63
- async def run(self) -> Any:
63
+ async def run(self, *args: Any, _blocking: bool = True, **kwargs: Any) -> Any:
64
64
  raise NotImplementedError
65
65
 
66
+ @classmethod
67
+ def _normalize_run_inputs(cls, args: tuple[Any, ...], kwargs: dict[str, Any]) -> dict[str, Any]:
68
+ try:
69
+ run_impl = cls.__workflow_run_impl__ # type: ignore[attr-defined]
70
+ except AttributeError:
71
+ run_impl = cls.run
72
+ sig = inspect.signature(run_impl)
73
+ params = list(sig.parameters.keys())[1:] # Skip 'self'
74
+
75
+ normalized = dict(kwargs)
76
+ for i, arg in enumerate(args):
77
+ if i < len(params):
78
+ normalized[params[i]] = arg
79
+
80
+ bound = sig.bind_partial(None, **normalized)
81
+ bound.apply_defaults()
82
+ return {key: value for key, value in bound.arguments.items() if key != "self"}
83
+
84
+ @classmethod
85
+ def _build_initial_context(
86
+ cls, args: tuple[Any, ...], kwargs: dict[str, Any]
87
+ ) -> pb2.WorkflowArguments:
88
+ initial_kwargs = cls._normalize_run_inputs(args, kwargs)
89
+ return build_arguments_from_kwargs(initial_kwargs)
90
+
66
91
  async def run_action(
67
92
  self,
68
93
  awaitable: Awaitable[TResult],
@@ -163,26 +188,20 @@ def workflow(cls: type[TWorkflow]) -> type[TWorkflow]:
163
188
  raise TypeError("workflow run() must be defined with 'async def'")
164
189
 
165
190
  @wraps(run_impl)
166
- async def run_public(self: Workflow, *args: Any, **kwargs: Any) -> Any:
191
+ async def run_public(self: Workflow, *args: Any, _blocking: bool = True, **kwargs: Any) -> Any:
167
192
  if _running_under_pytest():
168
193
  cls.workflow_ir()
169
194
  return await run_impl(self, *args, **kwargs)
170
195
 
171
- # Get the signature of run() to map positional args to parameter names
172
- sig = inspect.signature(run_impl)
173
- params = list(sig.parameters.keys())[1:] # Skip 'self'
174
-
175
- # Convert positional args to kwargs
176
- for i, arg in enumerate(args):
177
- if i < len(params):
178
- kwargs[params[i]] = arg
179
-
180
- # Serialize kwargs using common logic
181
- initial_context = build_arguments_from_kwargs(kwargs)
196
+ initial_context = cls._build_initial_context(args, kwargs)
182
197
 
183
198
  payload = cls._build_registration_payload(initial_context)
184
199
  run_result = await bridge.run_instance(payload.SerializeToString())
185
200
  cls._workflow_version_id = run_result.workflow_version_id
201
+
202
+ if not _blocking:
203
+ return run_result.workflow_instance_id
204
+
186
205
  if _skip_wait_for_instance():
187
206
  logger.info(
188
207
  "Skipping wait_for_instance for workflow %s due to RAPPEL_SKIP_WAIT_FOR_INSTANCE",
@@ -6,8 +6,13 @@ action dispatch commands from the Rust scheduler.
6
6
 
7
7
  import asyncio
8
8
  import dataclasses
9
+ from base64 import b64decode
9
10
  from dataclasses import dataclass
10
- from typing import Any, Dict, get_type_hints
11
+ from datetime import date, datetime, time, timedelta
12
+ from decimal import Decimal
13
+ from pathlib import Path, PurePath
14
+ from typing import Any, Dict, get_args, get_origin, get_type_hints
15
+ from uuid import UUID
11
16
 
12
17
  from pydantic import BaseModel
13
18
 
@@ -45,6 +50,87 @@ def _is_dataclass_type(cls: type) -> bool:
45
50
  return dataclasses.is_dataclass(cls) and isinstance(cls, type)
46
51
 
47
52
 
53
+ def _coerce_primitive(value: Any, target_type: type) -> Any:
54
+ """Coerce a value to a primitive type based on target_type.
55
+
56
+ Handles conversion of serialized values (strings, floats) back to their
57
+ native Python types (UUID, datetime, etc.).
58
+ """
59
+ # Handle None
60
+ if value is None:
61
+ return None
62
+
63
+ # UUID from string
64
+ if target_type is UUID:
65
+ if isinstance(value, UUID):
66
+ return value
67
+ if isinstance(value, str):
68
+ return UUID(value)
69
+ return value
70
+
71
+ # datetime from ISO string
72
+ if target_type is datetime:
73
+ if isinstance(value, datetime):
74
+ return value
75
+ if isinstance(value, str):
76
+ return datetime.fromisoformat(value)
77
+ return value
78
+
79
+ # date from ISO string
80
+ if target_type is date:
81
+ if isinstance(value, date):
82
+ return value
83
+ if isinstance(value, str):
84
+ return date.fromisoformat(value)
85
+ return value
86
+
87
+ # time from ISO string
88
+ if target_type is time:
89
+ if isinstance(value, time):
90
+ return value
91
+ if isinstance(value, str):
92
+ return time.fromisoformat(value)
93
+ return value
94
+
95
+ # timedelta from total seconds
96
+ if target_type is timedelta:
97
+ if isinstance(value, timedelta):
98
+ return value
99
+ if isinstance(value, (int, float)):
100
+ return timedelta(seconds=value)
101
+ return value
102
+
103
+ # Decimal from string
104
+ if target_type is Decimal:
105
+ if isinstance(value, Decimal):
106
+ return value
107
+ if isinstance(value, (str, int, float)):
108
+ return Decimal(str(value))
109
+ return value
110
+
111
+ # bytes from base64 string
112
+ if target_type is bytes:
113
+ if isinstance(value, bytes):
114
+ return value
115
+ if isinstance(value, str):
116
+ return b64decode(value)
117
+ return value
118
+
119
+ # Path from string
120
+ if target_type is Path or target_type is PurePath:
121
+ if isinstance(value, PurePath):
122
+ return value
123
+ if isinstance(value, str):
124
+ return Path(value)
125
+ return value
126
+
127
+ return value
128
+
129
+
130
+ # Types that can be coerced from serialized form
131
+ COERCIBLE_TYPES = (UUID, datetime, date, time, timedelta, Decimal, bytes, Path, PurePath)
132
+
133
+
48
134
  def _coerce_dict_to_model(value: Any, target_type: type) -> Any:
49
135
  """Convert a dict to a Pydantic model or dataclass if needed.
50
136
 
@@ -67,12 +153,76 @@ def _coerce_dict_to_model(value: Any, target_type: type) -> Any:
67
153
  return value
68
154
 
69
155
 
156
+ def _coerce_value(value: Any, target_type: type) -> Any:
157
+ """Coerce a value to the target type.
158
+
159
+ Handles:
160
+ - Primitive types (UUID, datetime, etc.)
161
+ - Pydantic models and dataclasses (from dicts)
162
+ - Generic collections like list[UUID], set[datetime]
163
+ """
164
+ # Handle None
165
+ if value is None:
166
+ return None
167
+
168
+ # Check for coercible primitive types
169
+ if isinstance(target_type, type) and issubclass(target_type, COERCIBLE_TYPES):
170
+ return _coerce_primitive(value, target_type)
171
+
172
+ # Check for Pydantic models or dataclasses
173
+ if isinstance(value, dict):
174
+ coerced = _coerce_dict_to_model(value, target_type)
175
+ if coerced is not value:
176
+ return coerced
177
+
178
+ # Handle generic types like list[UUID], set[datetime]
179
+ origin = get_origin(target_type)
180
+ if origin is not None:
181
+ args = get_args(target_type)
182
+
183
+ # Handle list[T]
184
+ if origin is list and isinstance(value, list) and args:
185
+ item_type = args[0]
186
+ return [_coerce_value(item, item_type) for item in value]
187
+
188
+ # Handle set[T] (serialized as list)
189
+ if origin is set and isinstance(value, list) and args:
190
+ item_type = args[0]
191
+ return {_coerce_value(item, item_type) for item in value}
192
+
193
+ # Handle frozenset[T] (serialized as list)
194
+ if origin is frozenset and isinstance(value, list) and args:
195
+ item_type = args[0]
196
+ return frozenset(_coerce_value(item, item_type) for item in value)
197
+
198
+ # Handle tuple[T, ...] (serialized as list)
199
+ if origin is tuple and isinstance(value, (list, tuple)) and args:
200
+ # Variable length tuple like tuple[int, ...]
201
+ if len(args) == 2 and args[1] is ...:
202
+ item_type = args[0]
203
+ return tuple(_coerce_value(item, item_type) for item in value)
204
+ # Fixed length tuple like tuple[int, str, UUID]
205
+ return tuple(
206
+ _coerce_value(item, item_type) for item, item_type in zip(value, args, strict=False)
207
+ )
208
+
209
+ # Handle dict[K, V]
210
+ if origin is dict and isinstance(value, dict) and len(args) == 2:
211
+ key_type, val_type = args
212
+ return {
213
+ _coerce_value(k, key_type): _coerce_value(v, val_type) for k, v in value.items()
214
+ }
215
+
216
+ return value
217
+
218
+
70
219
  def _coerce_kwargs_to_type_hints(handler: Any, kwargs: Dict[str, Any]) -> Dict[str, Any]:
71
- """Coerce dict kwargs to Pydantic models or dataclasses based on type hints.
220
+ """Coerce kwargs to expected types based on handler's type hints.
72
221
 
73
- When the IR converts a Pydantic model or dataclass constructor call to a dict,
74
- the action runner needs to convert that dict back to the expected type based
75
- on the handler's type annotations.
222
+ Handles:
223
+ - Pydantic models and dataclasses (from dicts)
224
+ - Primitive types like UUID, datetime, Decimal, etc.
225
+ - Generic collections like list[UUID], dict[str, datetime]
76
226
  """
77
227
  try:
78
228
  type_hints = get_type_hints(handler)
@@ -84,7 +234,7 @@ def _coerce_kwargs_to_type_hints(handler: Any, kwargs: Dict[str, Any]) -> Dict[s
84
234
  for key, value in kwargs.items():
85
235
  if key in type_hints:
86
236
  target_type = type_hints[key]
87
- coerced[key] = _coerce_dict_to_model(value, target_type)
237
+ coerced[key] = _coerce_value(value, target_type)
88
238
  else:
89
239
  coerced[key] = value
90
240