rappel 0.4.1__py3-none-win_amd64.whl → 0.8.1__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rappel might be problematic. Click here for more details.

proto/messages_pb2.pyi CHANGED
@@ -648,10 +648,23 @@ class WorkflowErrorValue(google.protobuf.message.Message):
648
648
  MODULE_FIELD_NUMBER: builtins.int
649
649
  MESSAGE_FIELD_NUMBER: builtins.int
650
650
  TRACEBACK_FIELD_NUMBER: builtins.int
651
+ VALUES_FIELD_NUMBER: builtins.int
652
+ TYPE_HIERARCHY_FIELD_NUMBER: builtins.int
651
653
  type: builtins.str
652
654
  module: builtins.str
653
655
  message: builtins.str
654
656
  traceback: builtins.str
657
+ @property
658
+ def values(self) -> Global___WorkflowDictArgument: ...
659
+ @property
660
+ def type_hierarchy(
661
+ self,
662
+ ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
663
+ """Exception class hierarchy (MRO) for proper except matching.
664
+ e.g., for KeyError: ["KeyError", "LookupError", "Exception", "BaseException"]
665
+ This allows `except LookupError:` to catch KeyError.
666
+ """
667
+
655
668
  def __init__(
656
669
  self,
657
670
  *,
@@ -659,11 +672,25 @@ class WorkflowErrorValue(google.protobuf.message.Message):
659
672
  module: builtins.str = ...,
660
673
  message: builtins.str = ...,
661
674
  traceback: builtins.str = ...,
675
+ values: Global___WorkflowDictArgument | None = ...,
676
+ type_hierarchy: collections.abc.Iterable[builtins.str] | None = ...,
662
677
  ) -> None: ...
678
+ def HasField(self, field_name: typing.Literal["values", b"values"]) -> builtins.bool: ...
663
679
  def ClearField(
664
680
  self,
665
681
  field_name: typing.Literal[
666
- "message", b"message", "module", b"module", "traceback", b"traceback", "type", b"type"
682
+ "message",
683
+ b"message",
684
+ "module",
685
+ b"module",
686
+ "traceback",
687
+ b"traceback",
688
+ "type",
689
+ b"type",
690
+ "type_hierarchy",
691
+ b"type_hierarchy",
692
+ "values",
693
+ b"values",
667
694
  ],
668
695
  ) -> None: ...
669
696
 
@@ -875,17 +902,21 @@ class ScheduleDefinition(google.protobuf.message.Message):
875
902
  TYPE_FIELD_NUMBER: builtins.int
876
903
  CRON_EXPRESSION_FIELD_NUMBER: builtins.int
877
904
  INTERVAL_SECONDS_FIELD_NUMBER: builtins.int
905
+ JITTER_SECONDS_FIELD_NUMBER: builtins.int
878
906
  type: Global___ScheduleType.ValueType
879
907
  cron_expression: builtins.str
880
908
  """For cron: the cron expression (e.g., "0 * * * *")"""
881
909
  interval_seconds: builtins.int
882
910
  """For interval: duration in seconds"""
911
+ jitter_seconds: builtins.int
912
+ """Optional: jitter window in seconds (random 0..jitter_seconds)"""
883
913
  def __init__(
884
914
  self,
885
915
  *,
886
916
  type: Global___ScheduleType.ValueType = ...,
887
917
  cron_expression: builtins.str = ...,
888
918
  interval_seconds: builtins.int = ...,
919
+ jitter_seconds: builtins.int = ...,
889
920
  ) -> None: ...
890
921
  def ClearField(
891
922
  self,
@@ -894,6 +925,8 @@ class ScheduleDefinition(google.protobuf.message.Message):
894
925
  b"cron_expression",
895
926
  "interval_seconds",
896
927
  b"interval_seconds",
928
+ "jitter_seconds",
929
+ b"jitter_seconds",
897
930
  "type",
898
931
  b"type",
899
932
  ],
@@ -909,7 +942,12 @@ class RegisterScheduleRequest(google.protobuf.message.Message):
909
942
  SCHEDULE_FIELD_NUMBER: builtins.int
910
943
  INPUTS_FIELD_NUMBER: builtins.int
911
944
  REGISTRATION_FIELD_NUMBER: builtins.int
945
+ SCHEDULE_NAME_FIELD_NUMBER: builtins.int
912
946
  workflow_name: builtins.str
947
+ schedule_name: builtins.str
948
+ """Required: unique name for this schedule. Allows multiple schedules per workflow
949
+ with different inputs. Must be unique within a workflow.
950
+ """
913
951
  @property
914
952
  def schedule(self) -> Global___ScheduleDefinition: ...
915
953
  @property
@@ -931,6 +969,7 @@ class RegisterScheduleRequest(google.protobuf.message.Message):
931
969
  schedule: Global___ScheduleDefinition | None = ...,
932
970
  inputs: Global___WorkflowArguments | None = ...,
933
971
  registration: Global___WorkflowRegistration | None = ...,
972
+ schedule_name: builtins.str = ...,
934
973
  ) -> None: ...
935
974
  def HasField(
936
975
  self,
@@ -947,6 +986,8 @@ class RegisterScheduleRequest(google.protobuf.message.Message):
947
986
  b"registration",
948
987
  "schedule",
949
988
  b"schedule",
989
+ "schedule_name",
990
+ b"schedule_name",
950
991
  "workflow_name",
951
992
  b"workflow_name",
952
993
  ],
@@ -982,16 +1023,28 @@ class UpdateScheduleStatusRequest(google.protobuf.message.Message):
982
1023
 
983
1024
  WORKFLOW_NAME_FIELD_NUMBER: builtins.int
984
1025
  STATUS_FIELD_NUMBER: builtins.int
1026
+ SCHEDULE_NAME_FIELD_NUMBER: builtins.int
985
1027
  workflow_name: builtins.str
986
1028
  status: Global___ScheduleStatus.ValueType
1029
+ schedule_name: builtins.str
1030
+ """Required: name of the schedule to update."""
987
1031
  def __init__(
988
1032
  self,
989
1033
  *,
990
1034
  workflow_name: builtins.str = ...,
991
1035
  status: Global___ScheduleStatus.ValueType = ...,
1036
+ schedule_name: builtins.str = ...,
992
1037
  ) -> None: ...
993
1038
  def ClearField(
994
- self, field_name: typing.Literal["status", b"status", "workflow_name", b"workflow_name"]
1039
+ self,
1040
+ field_name: typing.Literal[
1041
+ "schedule_name",
1042
+ b"schedule_name",
1043
+ "status",
1044
+ b"status",
1045
+ "workflow_name",
1046
+ b"workflow_name",
1047
+ ],
995
1048
  ) -> None: ...
996
1049
 
997
1050
  Global___UpdateScheduleStatusRequest: typing_extensions.TypeAlias = UpdateScheduleStatusRequest
@@ -1016,13 +1069,22 @@ class DeleteScheduleRequest(google.protobuf.message.Message):
1016
1069
  DESCRIPTOR: google.protobuf.descriptor.Descriptor
1017
1070
 
1018
1071
  WORKFLOW_NAME_FIELD_NUMBER: builtins.int
1072
+ SCHEDULE_NAME_FIELD_NUMBER: builtins.int
1019
1073
  workflow_name: builtins.str
1074
+ schedule_name: builtins.str
1075
+ """Required: name of the schedule to delete."""
1020
1076
  def __init__(
1021
1077
  self,
1022
1078
  *,
1023
1079
  workflow_name: builtins.str = ...,
1080
+ schedule_name: builtins.str = ...,
1081
+ ) -> None: ...
1082
+ def ClearField(
1083
+ self,
1084
+ field_name: typing.Literal[
1085
+ "schedule_name", b"schedule_name", "workflow_name", b"workflow_name"
1086
+ ],
1024
1087
  ) -> None: ...
1025
- def ClearField(self, field_name: typing.Literal["workflow_name", b"workflow_name"]) -> None: ...
1026
1088
 
1027
1089
  Global___DeleteScheduleRequest: typing_extensions.TypeAlias = DeleteScheduleRequest
1028
1090
 
@@ -1086,6 +1148,8 @@ class ScheduleInfo(google.protobuf.message.Message):
1086
1148
  LAST_INSTANCE_ID_FIELD_NUMBER: builtins.int
1087
1149
  CREATED_AT_FIELD_NUMBER: builtins.int
1088
1150
  UPDATED_AT_FIELD_NUMBER: builtins.int
1151
+ SCHEDULE_NAME_FIELD_NUMBER: builtins.int
1152
+ JITTER_SECONDS_FIELD_NUMBER: builtins.int
1089
1153
  id: builtins.str
1090
1154
  workflow_name: builtins.str
1091
1155
  schedule_type: Global___ScheduleType.ValueType
@@ -1104,6 +1168,10 @@ class ScheduleInfo(google.protobuf.message.Message):
1104
1168
  """ISO 8601 timestamp"""
1105
1169
  updated_at: builtins.str
1106
1170
  """ISO 8601 timestamp"""
1171
+ schedule_name: builtins.str
1172
+ """Name of this schedule (allows multiple per workflow)"""
1173
+ jitter_seconds: builtins.int
1174
+ """0 if no jitter configured"""
1107
1175
  def __init__(
1108
1176
  self,
1109
1177
  *,
@@ -1118,6 +1186,8 @@ class ScheduleInfo(google.protobuf.message.Message):
1118
1186
  last_instance_id: builtins.str = ...,
1119
1187
  created_at: builtins.str = ...,
1120
1188
  updated_at: builtins.str = ...,
1189
+ schedule_name: builtins.str = ...,
1190
+ jitter_seconds: builtins.int = ...,
1121
1191
  ) -> None: ...
1122
1192
  def ClearField(
1123
1193
  self,
@@ -1130,12 +1200,16 @@ class ScheduleInfo(google.protobuf.message.Message):
1130
1200
  b"id",
1131
1201
  "interval_seconds",
1132
1202
  b"interval_seconds",
1203
+ "jitter_seconds",
1204
+ b"jitter_seconds",
1133
1205
  "last_instance_id",
1134
1206
  b"last_instance_id",
1135
1207
  "last_run_at",
1136
1208
  b"last_run_at",
1137
1209
  "next_run_at",
1138
1210
  b"next_run_at",
1211
+ "schedule_name",
1212
+ b"schedule_name",
1139
1213
  "schedule_type",
1140
1214
  b"schedule_type",
1141
1215
  "status",
rappel/__init__.py CHANGED
@@ -10,7 +10,11 @@ from .actions import (
10
10
  serialize_result_payload,
11
11
  )
12
12
  from .dependencies import Depend, provide_dependencies
13
- from .exceptions import ExhaustedRetries, ExhaustedRetriesError
13
+ from .exceptions import (
14
+ ExhaustedRetries,
15
+ ExhaustedRetriesError,
16
+ ScheduleAlreadyExistsError,
17
+ )
14
18
  from .ir_builder import UnsupportedPatternError, build_workflow_ir
15
19
  from .registry import registry
16
20
  from .schedule import (
@@ -45,6 +49,7 @@ __all__ = [
45
49
  "bridge",
46
50
  "ExhaustedRetries",
47
51
  "ExhaustedRetriesError",
52
+ "ScheduleAlreadyExistsError",
48
53
  "UnsupportedPatternError",
49
54
  # Schedule functions
50
55
  "schedule_workflow",
rappel/actions.py CHANGED
@@ -1,9 +1,11 @@
1
1
  import inspect
2
2
  from dataclasses import dataclass
3
+ from functools import wraps
3
4
  from typing import Any, Callable, Optional, TypeVar, overload
4
5
 
5
6
  from proto import messages_pb2 as pb2
6
7
 
8
+ from .dependencies import provide_dependencies
7
9
  from .registry import AsyncAction, registry
8
10
  from .serialization import dumps, loads
9
11
 
@@ -13,7 +15,7 @@ TAsync = TypeVar("TAsync", bound=AsyncAction)
13
15
  @dataclass
14
16
  class ActionResultPayload:
15
17
  result: Any | None
16
- error: dict[str, str] | None
18
+ error: dict[str, Any] | None
17
19
 
18
20
 
19
21
  def serialize_result_payload(value: Any) -> pb2.WorkflowArguments:
@@ -64,17 +66,42 @@ def action(
64
66
  *,
65
67
  name: Optional[str] = None,
66
68
  ) -> Callable[[TAsync], TAsync] | TAsync:
67
- """Decorator for registering async actions."""
69
+ """Decorator for registering async actions.
70
+
71
+ Actions decorated with @action will automatically resolve Depend() markers
72
+ when called directly (e.g., during pytest runs where workflows bypass the
73
+ gRPC bridge).
74
+ """
68
75
 
69
76
  def decorator(target: TAsync) -> TAsync:
70
77
  if not inspect.iscoroutinefunction(target):
71
78
  raise TypeError(f"action '{target.__name__}' must be defined with 'async def'")
72
79
  action_name = name or target.__name__
73
80
  action_module = target.__module__
81
+
82
+ @wraps(target)
83
+ async def wrapper(*args: Any, **kwargs: Any) -> Any:
84
+ # Convert positional args to kwargs based on the signature
85
+ sig = inspect.signature(target)
86
+ params = list(sig.parameters.keys())
87
+ for i, arg in enumerate(args):
88
+ if i < len(params):
89
+ kwargs[params[i]] = arg
90
+
91
+ # Resolve dependencies using the same mechanism as execute_action
92
+ async with provide_dependencies(target, kwargs) as call_kwargs:
93
+ return await target(**call_kwargs)
94
+
95
+ # Copy over the original function's attributes for introspection
96
+ wrapper.__wrapped__ = target # type: ignore[attr-defined]
97
+ wrapper.__rappel_action_name__ = action_name # type: ignore[attr-defined]
98
+ wrapper.__rappel_action_module__ = action_module # type: ignore[attr-defined]
99
+
100
+ # Register the original function (not the wrapper) so execute_action
101
+ # doesn't double-resolve dependencies
74
102
  registry.register(action_module, action_name, target)
75
- target.__rappel_action_name__ = action_name
76
- target.__rappel_action_module__ = action_module
77
- return target
103
+
104
+ return wrapper # type: ignore[return-value]
78
105
 
79
106
  if func is not None:
80
107
  return decorator(func)
Binary file
Binary file
Binary file
rappel/bridge.py CHANGED
@@ -8,7 +8,6 @@ from dataclasses import dataclass
8
8
  from pathlib import Path
9
9
  from threading import Lock, RLock
10
10
  from typing import AsyncIterator, Optional
11
- from urllib.parse import urlparse
12
11
 
13
12
  import grpc
14
13
  from grpc import aio # type: ignore[attr-defined]
@@ -21,10 +20,11 @@ DEFAULT_HOST = "127.0.0.1"
21
20
  LOGGER = configure_logger("rappel.bridge")
22
21
 
23
22
  _PORT_LOCK = RLock()
24
- _CACHED_PORT: Optional[int] = None
23
+ _CACHED_GRPC_PORT: Optional[int] = None
25
24
  _GRPC_TARGET: Optional[str] = None
26
25
  _GRPC_CHANNEL: Optional[aio.Channel] = None
27
26
  _GRPC_STUB: Optional[pb2_grpc.WorkflowServiceStub] = None
27
+ _GRPC_LOOP: Optional[asyncio.AbstractEventLoop] = None
28
28
  _BOOT_MUTEX = Lock()
29
29
  _ASYNC_BOOT_LOCK: asyncio.Lock = asyncio.Lock()
30
30
 
@@ -45,29 +45,31 @@ def _boot_command() -> list[str]:
45
45
  return [binary]
46
46
 
47
47
 
48
- def _remember_port(port: int) -> int:
49
- global _CACHED_PORT
48
+ def _remember_grpc_port(port: int) -> int:
49
+ global _CACHED_GRPC_PORT
50
50
  with _PORT_LOCK:
51
- _CACHED_PORT = port
51
+ _CACHED_GRPC_PORT = port
52
52
  return port
53
53
 
54
54
 
55
- def _cached_port() -> Optional[int]:
55
+ def _cached_grpc_port() -> Optional[int]:
56
56
  with _PORT_LOCK:
57
- return _CACHED_PORT
57
+ return _CACHED_GRPC_PORT
58
58
 
59
59
 
60
- def _env_port_override() -> Optional[int]:
61
- override = os.environ.get("RAPPEL_SERVER_PORT")
60
+ def _env_grpc_port_override() -> Optional[int]:
61
+ """Check for explicit gRPC port override via environment."""
62
+ override = os.environ.get("RAPPEL_BRIDGE_GRPC_PORT")
62
63
  if not override:
63
64
  return None
64
65
  try:
65
66
  return int(override)
66
67
  except ValueError as exc: # pragma: no cover
67
- raise RuntimeError(f"invalid RAPPEL_SERVER_PORT value: {override}") from exc
68
+ raise RuntimeError(f"invalid RAPPEL_BRIDGE_GRPC_PORT value: {override}") from exc
68
69
 
69
70
 
70
71
  def _boot_singleton_blocking() -> int:
72
+ """Boot the singleton and return the gRPC port."""
71
73
  command = _boot_command()
72
74
  with tempfile.NamedTemporaryFile(mode="w+", suffix=".txt") as f:
73
75
  output_file = Path(f.name)
@@ -99,89 +101,87 @@ def _boot_singleton_blocking() -> int:
99
101
  # pipe to the subprocess and therefore never correctly close the file descriptor and signal
100
102
  # exit process status to Python.
101
103
  port_str = output_file.read_text().strip()
102
- port = int(port_str)
103
- LOGGER.info("boot command reported singleton port %s", port)
104
- return port
104
+ grpc_port = int(port_str)
105
+ LOGGER.info("boot command reported singleton gRPC port %s", grpc_port)
106
+ return grpc_port
105
107
  except (ValueError, FileNotFoundError) as exc: # pragma: no cover
106
108
  raise RuntimeError(f"unable to read port from output file: {exc}") from exc
107
109
 
108
110
 
109
- def _resolve_port() -> int:
110
- cached = _cached_port()
111
+ def _resolve_grpc_port() -> int:
112
+ """Resolve the gRPC port, booting singleton if necessary."""
113
+ cached = _cached_grpc_port()
111
114
  if cached is not None:
112
115
  return cached
113
- env_port = _env_port_override()
116
+ env_port = _env_grpc_port_override()
114
117
  if env_port is not None:
115
- return _remember_port(env_port)
118
+ return _remember_grpc_port(env_port)
116
119
  with _BOOT_MUTEX:
117
- cached = _cached_port()
120
+ cached = _cached_grpc_port()
118
121
  if cached is not None:
119
122
  return cached
120
123
  port = _boot_singleton_blocking()
121
- return _remember_port(port)
124
+ return _remember_grpc_port(port)
122
125
 
123
126
 
124
- async def _ensure_port_async() -> int:
125
- cached = _cached_port()
127
+ async def _ensure_grpc_port_async() -> int:
128
+ """Ensure we have a gRPC port, booting singleton if necessary."""
129
+ cached = _cached_grpc_port()
126
130
  if cached is not None:
127
131
  return cached
128
- env_port = _env_port_override()
132
+ env_port = _env_grpc_port_override()
129
133
  if env_port is not None:
130
- return _remember_port(env_port)
134
+ return _remember_grpc_port(env_port)
131
135
  async with _ASYNC_BOOT_LOCK:
132
- cached = _cached_port()
136
+ cached = _cached_grpc_port()
133
137
  if cached is not None:
134
138
  return cached
135
139
  loop = asyncio.get_running_loop()
136
140
  LOGGER.info("No cached singleton found, booting new instance")
137
141
  port = await loop.run_in_executor(None, _boot_singleton_blocking)
138
- LOGGER.info("Singleton ready on port %s", port)
139
- return _remember_port(port)
142
+ LOGGER.info("Singleton ready on gRPC port %s", port)
143
+ return _remember_grpc_port(port)
140
144
 
141
145
 
142
146
  @asynccontextmanager
143
147
  async def ensure_singleton() -> AsyncIterator[int]:
144
- """Yield the HTTP port for the singleton server, booting it exactly once."""
145
- port = await _ensure_port_async()
148
+ """Yield the gRPC port for the singleton server, booting it exactly once."""
149
+ port = await _ensure_grpc_port_async()
146
150
  yield port
147
151
 
148
152
 
149
153
  def _grpc_target() -> str:
150
- explicit = os.environ.get("RAPPEL_GRPC_ADDR")
154
+ """Get the gRPC target address for the bridge server."""
155
+ # Check for explicit full address override
156
+ explicit = os.environ.get("RAPPEL_BRIDGE_GRPC_ADDR")
151
157
  if explicit:
152
158
  return explicit
153
- http_url = os.environ.get("RAPPEL_SERVER_URL")
154
- host_from_url = None
155
- port_from_url = None
156
- if http_url:
157
- parsed = urlparse(http_url)
158
- host_from_url = parsed.hostname
159
- port_from_url = parsed.port
160
- host = host_from_url or os.environ.get("RAPPEL_SERVER_HOST", DEFAULT_HOST)
161
- port_override = os.environ.get("RAPPEL_GRPC_PORT")
162
- if port_override:
163
- try:
164
- port = int(port_override)
165
- except ValueError as exc: # pragma: no cover
166
- raise RuntimeError(f"invalid RAPPEL_GRPC_PORT value: {port_override}") from exc
167
- else:
168
- http_port = port_from_url if port_from_url is not None else _resolve_port()
169
- port = http_port + 1
159
+
160
+ # Otherwise, use host + port
161
+ host = os.environ.get("RAPPEL_BRIDGE_GRPC_HOST", DEFAULT_HOST)
162
+ port = _resolve_grpc_port()
170
163
  return f"{host}:{port}"
171
164
 
172
165
 
173
166
  async def _workflow_stub() -> pb2_grpc.WorkflowServiceStub:
174
- global _GRPC_TARGET, _GRPC_CHANNEL, _GRPC_STUB
167
+ global _GRPC_TARGET, _GRPC_CHANNEL, _GRPC_STUB, _GRPC_LOOP
175
168
  target = _grpc_target()
169
+ loop = asyncio.get_running_loop()
176
170
  channel_to_wait: Optional[aio.Channel] = None
177
171
  with _PORT_LOCK:
178
- if _GRPC_STUB is not None and _GRPC_TARGET == target:
172
+ if (
173
+ _GRPC_STUB is not None
174
+ and _GRPC_TARGET == target
175
+ and _GRPC_LOOP is loop
176
+ and not loop.is_closed()
177
+ ):
179
178
  return _GRPC_STUB
180
179
  channel = aio.insecure_channel(target)
181
180
  stub = pb2_grpc.WorkflowServiceStub(channel)
182
181
  _GRPC_CHANNEL = channel
183
182
  _GRPC_STUB = stub
184
183
  _GRPC_TARGET = target
184
+ _GRPC_LOOP = loop
185
185
  channel_to_wait = channel
186
186
  if channel_to_wait is not None:
187
187
  await channel_to_wait.channel_ready()
rappel/dependencies.py CHANGED
@@ -7,26 +7,40 @@ from typing import Annotated, Any, AsyncIterator, Callable, Optional, get_args,
7
7
 
8
8
 
9
9
  @dataclass(frozen=True)
10
- class Depend:
11
- """Marker for dependency injection, mirroring FastAPI's Depends syntax."""
10
+ class DependMarker:
11
+ """Internal marker for dependency injection."""
12
12
 
13
13
  dependency: Optional[Callable[..., Any]] = None
14
14
  use_cache: bool = True
15
15
 
16
16
 
17
- def _depend_from_annotation(annotation: Any) -> Depend | None:
17
+ def Depend( # noqa: N802
18
+ dependency: Optional[Callable[..., Any]] = None,
19
+ *,
20
+ use_cache: bool = True,
21
+ ) -> Any:
22
+ """Marker for dependency injection, mirroring FastAPI's Depends syntax.
23
+
24
+ Returns Any to allow usage as a default parameter value:
25
+ def my_func(service: MyService = Depend(get_service)):
26
+ ...
27
+ """
28
+ return DependMarker(dependency=dependency, use_cache=use_cache)
29
+
30
+
31
+ def _depend_from_annotation(annotation: Any) -> DependMarker | None:
18
32
  origin = get_origin(annotation)
19
33
  if origin is not Annotated:
20
34
  return None
21
35
  metadata = get_args(annotation)[1:]
22
36
  for meta in metadata:
23
- if isinstance(meta, Depend):
37
+ if isinstance(meta, DependMarker):
24
38
  return meta
25
39
  return None
26
40
 
27
41
 
28
- def _dependency_marker(parameter: inspect.Parameter) -> Depend | None:
29
- if isinstance(parameter.default, Depend):
42
+ def _dependency_marker(parameter: inspect.Parameter) -> DependMarker | None:
43
+ if isinstance(parameter.default, DependMarker):
30
44
  return parameter.default
31
45
  return _depend_from_annotation(parameter.annotation)
32
46
 
@@ -69,7 +83,7 @@ class _DependencyResolver:
69
83
  raise TypeError(f"Missing required parameter '{name}' for {func_name}")
70
84
  return call_kwargs
71
85
 
72
- async def _resolve_dependency(self, marker: Depend) -> Any:
86
+ async def _resolve_dependency(self, marker: DependMarker) -> Any:
73
87
  dependency = marker.dependency
74
88
  if dependency is None:
75
89
  raise TypeError("Depend requires a dependency callable")
rappel/exceptions.py CHANGED
@@ -9,3 +9,10 @@ class ExhaustedRetriesError(Exception):
9
9
 
10
10
 
11
11
  ExhaustedRetries = ExhaustedRetriesError
12
+
13
+
14
+ class ScheduleAlreadyExistsError(Exception):
15
+ """Raised when a schedule name is already registered."""
16
+
17
+ def __init__(self, message: str | None = None) -> None:
18
+ super().__init__(message or "schedule already exists")