hatchet-sdk 1.16.4__py3-none-any.whl → 1.17.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

Files changed (38) hide show
  1. hatchet_sdk/__init__.py +2 -1
  2. hatchet_sdk/clients/events.py +5 -2
  3. hatchet_sdk/clients/rest/__init__.py +32 -0
  4. hatchet_sdk/clients/rest/api/__init__.py +1 -0
  5. hatchet_sdk/clients/rest/api/webhook_api.py +1551 -0
  6. hatchet_sdk/clients/rest/models/__init__.py +31 -0
  7. hatchet_sdk/clients/rest/models/v1_create_webhook_request.py +215 -0
  8. hatchet_sdk/clients/rest/models/v1_create_webhook_request_api_key.py +126 -0
  9. hatchet_sdk/clients/rest/models/v1_create_webhook_request_api_key_all_of_auth_type.py +82 -0
  10. hatchet_sdk/clients/rest/models/v1_create_webhook_request_base.py +98 -0
  11. hatchet_sdk/clients/rest/models/v1_create_webhook_request_basic_auth.py +126 -0
  12. hatchet_sdk/clients/rest/models/v1_create_webhook_request_basic_auth_all_of_auth_type.py +82 -0
  13. hatchet_sdk/clients/rest/models/v1_create_webhook_request_hmac.py +126 -0
  14. hatchet_sdk/clients/rest/models/v1_create_webhook_request_hmac_all_of_auth_type.py +82 -0
  15. hatchet_sdk/clients/rest/models/v1_event.py +7 -0
  16. hatchet_sdk/clients/rest/models/v1_webhook.py +126 -0
  17. hatchet_sdk/clients/rest/models/v1_webhook_api_key_auth.py +90 -0
  18. hatchet_sdk/clients/rest/models/v1_webhook_auth_type.py +38 -0
  19. hatchet_sdk/clients/rest/models/v1_webhook_basic_auth.py +86 -0
  20. hatchet_sdk/clients/rest/models/v1_webhook_hmac_algorithm.py +39 -0
  21. hatchet_sdk/clients/rest/models/v1_webhook_hmac_auth.py +115 -0
  22. hatchet_sdk/clients/rest/models/v1_webhook_hmac_encoding.py +38 -0
  23. hatchet_sdk/clients/rest/models/v1_webhook_list.py +110 -0
  24. hatchet_sdk/clients/rest/models/v1_webhook_receive200_response.py +83 -0
  25. hatchet_sdk/clients/rest/models/v1_webhook_source_name.py +38 -0
  26. hatchet_sdk/context/context.py +31 -2
  27. hatchet_sdk/exceptions.py +70 -5
  28. hatchet_sdk/hatchet.py +29 -11
  29. hatchet_sdk/rate_limit.py +1 -21
  30. hatchet_sdk/runnables/task.py +109 -19
  31. hatchet_sdk/runnables/workflow.py +23 -8
  32. hatchet_sdk/utils/typing.py +27 -0
  33. hatchet_sdk/worker/runner/runner.py +27 -19
  34. hatchet_sdk/worker/runner/utils/capture_logs.py +24 -11
  35. {hatchet_sdk-1.16.4.dist-info → hatchet_sdk-1.17.0.dist-info}/METADATA +2 -3
  36. {hatchet_sdk-1.16.4.dist-info → hatchet_sdk-1.17.0.dist-info}/RECORD +38 -19
  37. {hatchet_sdk-1.16.4.dist-info → hatchet_sdk-1.17.0.dist-info}/WHEEL +0 -0
  38. {hatchet_sdk-1.16.4.dist-info → hatchet_sdk-1.17.0.dist-info}/entry_points.txt +0 -0
hatchet_sdk/hatchet.py CHANGED
@@ -3,7 +3,7 @@ import logging
3
3
  from collections.abc import Callable
4
4
  from datetime import timedelta
5
5
  from functools import cached_property
6
- from typing import Any, cast, overload
6
+ from typing import Any, Concatenate, ParamSpec, cast, overload
7
7
 
8
8
  from hatchet_sdk import Context, DurableContext
9
9
  from hatchet_sdk.client import Client
@@ -40,6 +40,8 @@ from hatchet_sdk.utils.timedelta_to_expression import Duration
40
40
  from hatchet_sdk.utils.typing import CoroutineLike
41
41
  from hatchet_sdk.worker.worker import LifespanFn, Worker
42
42
 
43
+ P = ParamSpec("P")
44
+
43
45
 
44
46
  class Hatchet:
45
47
  """
@@ -346,7 +348,7 @@ class Hatchet:
346
348
  backoff_max_seconds: int | None = None,
347
349
  default_filters: list[DefaultFilter] | None = None,
348
350
  ) -> Callable[
349
- [Callable[[EmptyModel, Context], R | CoroutineLike[R]]],
351
+ [Callable[Concatenate[EmptyModel, Context, P], R | CoroutineLike[R]]],
350
352
  Standalone[EmptyModel, R],
351
353
  ]: ...
352
354
 
@@ -372,7 +374,7 @@ class Hatchet:
372
374
  backoff_max_seconds: int | None = None,
373
375
  default_filters: list[DefaultFilter] | None = None,
374
376
  ) -> Callable[
375
- [Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
377
+ [Callable[Concatenate[TWorkflowInput, Context, P], R | CoroutineLike[R]]],
376
378
  Standalone[TWorkflowInput, R],
377
379
  ]: ...
378
380
 
@@ -398,11 +400,11 @@ class Hatchet:
398
400
  default_filters: list[DefaultFilter] | None = None,
399
401
  ) -> (
400
402
  Callable[
401
- [Callable[[EmptyModel, Context], R | CoroutineLike[R]]],
403
+ [Callable[Concatenate[EmptyModel, Context, P], R | CoroutineLike[R]]],
402
404
  Standalone[EmptyModel, R],
403
405
  ]
404
406
  | Callable[
405
- [Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
407
+ [Callable[Concatenate[TWorkflowInput, Context, P], R | CoroutineLike[R]]],
406
408
  Standalone[TWorkflowInput, R],
407
409
  ]
408
410
  ):
@@ -447,7 +449,9 @@ class Hatchet:
447
449
  """
448
450
 
449
451
  def inner(
450
- func: Callable[[TWorkflowInput, Context], R | CoroutineLike[R]],
452
+ func: Callable[
453
+ Concatenate[TWorkflowInput, Context, P], R | CoroutineLike[R]
454
+ ],
451
455
  ) -> Standalone[TWorkflowInput, R]:
452
456
  inferred_name = name or func.__name__
453
457
 
@@ -518,7 +522,7 @@ class Hatchet:
518
522
  backoff_max_seconds: int | None = None,
519
523
  default_filters: list[DefaultFilter] | None = None,
520
524
  ) -> Callable[
521
- [Callable[[EmptyModel, DurableContext], R | CoroutineLike[R]]],
525
+ [Callable[Concatenate[EmptyModel, DurableContext, P], R | CoroutineLike[R]]],
522
526
  Standalone[EmptyModel, R],
523
527
  ]: ...
524
528
 
@@ -544,7 +548,11 @@ class Hatchet:
544
548
  backoff_max_seconds: int | None = None,
545
549
  default_filters: list[DefaultFilter] | None = None,
546
550
  ) -> Callable[
547
- [Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]],
551
+ [
552
+ Callable[
553
+ Concatenate[TWorkflowInput, DurableContext, P], R | CoroutineLike[R]
554
+ ]
555
+ ],
548
556
  Standalone[TWorkflowInput, R],
549
557
  ]: ...
550
558
 
@@ -570,11 +578,19 @@ class Hatchet:
570
578
  default_filters: list[DefaultFilter] | None = None,
571
579
  ) -> (
572
580
  Callable[
573
- [Callable[[EmptyModel, DurableContext], R | CoroutineLike[R]]],
581
+ [
582
+ Callable[
583
+ Concatenate[EmptyModel, DurableContext, P], R | CoroutineLike[R]
584
+ ]
585
+ ],
574
586
  Standalone[EmptyModel, R],
575
587
  ]
576
588
  | Callable[
577
- [Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]],
589
+ [
590
+ Callable[
591
+ Concatenate[TWorkflowInput, DurableContext, P], R | CoroutineLike[R]
592
+ ]
593
+ ],
578
594
  Standalone[TWorkflowInput, R],
579
595
  ]
580
596
  ):
@@ -619,7 +635,9 @@ class Hatchet:
619
635
  """
620
636
 
621
637
  def inner(
622
- func: Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]],
638
+ func: Callable[
639
+ Concatenate[TWorkflowInput, DurableContext, P], R | CoroutineLike[R]
640
+ ],
623
641
  ) -> Standalone[TWorkflowInput, R]:
624
642
  inferred_name = name or func.__name__
625
643
  workflow = Workflow[TWorkflowInput](
hatchet_sdk/rate_limit.py CHANGED
@@ -1,20 +1,10 @@
1
1
  from enum import Enum
2
2
 
3
- from celpy import CELEvalError, Environment # type: ignore
4
3
  from pydantic import BaseModel, model_validator
5
4
 
6
5
  from hatchet_sdk.contracts.v1.workflows_pb2 import CreateTaskRateLimit
7
6
 
8
7
 
9
- def validate_cel_expression(expr: str) -> bool:
10
- env = Environment()
11
- try:
12
- env.compile(expr)
13
- return True
14
- except CELEvalError:
15
- return False
16
-
17
-
18
8
  class RateLimitDuration(str, Enum):
19
9
  SECOND = "SECOND"
20
10
  MINUTE = "MINUTE"
@@ -72,17 +62,7 @@ class RateLimit(BaseModel):
72
62
  if self.dynamic_key and self.static_key:
73
63
  raise ValueError("Cannot have both static key and dynamic key set")
74
64
 
75
- if self.dynamic_key and not validate_cel_expression(self.dynamic_key):
76
- raise ValueError(f"Invalid CEL expression: {self.dynamic_key}")
77
-
78
- if not isinstance(self.units, int) and not validate_cel_expression(self.units):
79
- raise ValueError(f"Invalid CEL expression: {self.units}")
80
-
81
- if (
82
- self.limit
83
- and not isinstance(self.limit, int)
84
- and not validate_cel_expression(self.limit)
85
- ):
65
+ if self.limit and not isinstance(self.limit, int):
86
66
  raise ValueError(f"Invalid CEL expression: {self.limit}")
87
67
 
88
68
  if self.dynamic_key and not self.limit:
@@ -1,5 +1,21 @@
1
+ import asyncio
1
2
  from collections.abc import Callable
2
- from typing import TYPE_CHECKING, Any, Generic, cast, get_type_hints
3
+ from inspect import Parameter, iscoroutinefunction, signature
4
+ from typing import (
5
+ TYPE_CHECKING,
6
+ Annotated,
7
+ Any,
8
+ Concatenate,
9
+ Generic,
10
+ ParamSpec,
11
+ TypeVar,
12
+ cast,
13
+ get_args,
14
+ get_origin,
15
+ get_type_hints,
16
+ )
17
+
18
+ from pydantic import BaseModel, ConfigDict
3
19
 
4
20
  from hatchet_sdk.conditions import (
5
21
  Action,
@@ -18,6 +34,7 @@ from hatchet_sdk.contracts.v1.workflows_pb2 import (
18
34
  CreateTaskRateLimit,
19
35
  DesiredWorkerLabels,
20
36
  )
37
+ from hatchet_sdk.exceptions import InvalidDependencyError
21
38
  from hatchet_sdk.runnables.types import (
22
39
  ConcurrencyExpression,
23
40
  EmptyModel,
@@ -25,7 +42,6 @@ from hatchet_sdk.runnables.types import (
25
42
  StepType,
26
43
  TWorkflowInput,
27
44
  is_async_fn,
28
- is_durable_sync_fn,
29
45
  is_sync_fn,
30
46
  )
31
47
  from hatchet_sdk.utils.timedelta_to_expression import Duration, timedelta_to_expr
@@ -41,16 +57,45 @@ from hatchet_sdk.worker.runner.utils.capture_logs import AsyncLogSender
41
57
  if TYPE_CHECKING:
42
58
  from hatchet_sdk.runnables.workflow import Workflow
43
59
 
60
+ T = TypeVar("T")
61
+ P = ParamSpec("P")
62
+
63
+
64
+ class Depends(Generic[T, TWorkflowInput]):
65
+ def __init__(
66
+ self, fn: Callable[[TWorkflowInput, Context], T | CoroutineLike[T]]
67
+ ) -> None:
68
+ sig = signature(fn)
69
+ params = list(sig.parameters.values())
70
+
71
+ if len(params) != 2:
72
+ raise InvalidDependencyError(
73
+ f"Dependency function {fn.__name__} must have exactly two parameters: input and ctx."
74
+ )
75
+
76
+ self.fn = fn
77
+
78
+
79
+ class DependencyToInject(BaseModel):
80
+ model_config = ConfigDict(arbitrary_types_allowed=True)
81
+
82
+ name: str
83
+ value: Any
84
+
44
85
 
45
86
  class Task(Generic[TWorkflowInput, R]):
46
87
  def __init__(
47
88
  self,
48
89
  _fn: (
49
- Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]
50
- | Callable[[TWorkflowInput, Context], AwaitableLike[R]]
90
+ Callable[Concatenate[TWorkflowInput, Context, P], R | CoroutineLike[R]]
91
+ | Callable[Concatenate[TWorkflowInput, Context, P], AwaitableLike[R]]
51
92
  | (
52
- Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]
53
- | Callable[[TWorkflowInput, DurableContext], AwaitableLike[R]]
93
+ Callable[
94
+ Concatenate[TWorkflowInput, DurableContext, P], R | CoroutineLike[R]
95
+ ]
96
+ | Callable[
97
+ Concatenate[TWorkflowInput, DurableContext, P], AwaitableLike[R]
98
+ ]
54
99
  )
55
100
  ),
56
101
  is_durable: bool,
@@ -100,33 +145,74 @@ class Task(Generic[TWorkflowInput, R]):
100
145
  step_output=return_type if is_basemodel_subclass(return_type) else None,
101
146
  )
102
147
 
103
- def call(self, ctx: Context | DurableContext) -> R:
148
+ async def _parse_parameter(
149
+ self,
150
+ name: str,
151
+ param: Parameter,
152
+ input: TWorkflowInput,
153
+ ctx: Context | DurableContext,
154
+ ) -> DependencyToInject | None:
155
+ annotation = param.annotation
156
+
157
+ if get_origin(annotation) is Annotated:
158
+ args = get_args(annotation)
159
+
160
+ if len(args) < 2:
161
+ return None
162
+
163
+ metadata = args[1:]
164
+
165
+ for item in metadata:
166
+ if isinstance(item, Depends):
167
+ if iscoroutinefunction(item.fn):
168
+ return DependencyToInject(
169
+ name=name, value=await item.fn(input, ctx)
170
+ )
171
+
172
+ return DependencyToInject(
173
+ name=name, value=await asyncio.to_thread(item.fn, input, ctx)
174
+ )
175
+
176
+ return None
177
+
178
+ async def _unpack_dependencies(
179
+ self, ctx: Context | DurableContext
180
+ ) -> dict[str, Any]:
181
+ sig = signature(self.fn)
182
+ input = self.workflow._get_workflow_input(ctx)
183
+ return {
184
+ parsed.name: parsed.value
185
+ for n, p in sig.parameters.items()
186
+ if (parsed := await self._parse_parameter(n, p, input, ctx)) is not None
187
+ }
188
+
189
+ def call(
190
+ self, ctx: Context | DurableContext, dependencies: dict[str, Any] | None = None
191
+ ) -> R:
104
192
  if self.is_async_function:
105
193
  raise TypeError(f"{self.name} is not a sync function. Use `acall` instead.")
106
194
 
107
195
  workflow_input = self.workflow._get_workflow_input(ctx)
196
+ dependencies = dependencies or {}
108
197
 
109
- if self.is_durable:
110
- fn = cast(Callable[[TWorkflowInput, DurableContext], R], self.fn)
111
- if is_durable_sync_fn(fn):
112
- return fn(workflow_input, cast(DurableContext, ctx))
113
- else:
114
- fn = cast(Callable[[TWorkflowInput, Context], R], self.fn)
115
- if is_sync_fn(fn):
116
- return fn(workflow_input, ctx)
198
+ if is_sync_fn(self.fn): # type: ignore
199
+ return self.fn(workflow_input, cast(Context, ctx), **dependencies) # type: ignore
117
200
 
118
201
  raise TypeError(f"{self.name} is not a sync function. Use `acall` instead.")
119
202
 
120
- async def aio_call(self, ctx: Context | DurableContext) -> R:
203
+ async def aio_call(
204
+ self, ctx: Context | DurableContext, dependencies: dict[str, Any] | None = None
205
+ ) -> R:
121
206
  if not self.is_async_function:
122
207
  raise TypeError(
123
208
  f"{self.name} is not an async function. Use `call` instead."
124
209
  )
125
210
 
126
211
  workflow_input = self.workflow._get_workflow_input(ctx)
212
+ dependencies = dependencies or {}
127
213
 
128
214
  if is_async_fn(self.fn): # type: ignore
129
- return await self.fn(workflow_input, cast(Context, ctx)) # type: ignore
215
+ return await self.fn(workflow_input, cast(Context, ctx), **dependencies) # type: ignore
130
216
 
131
217
  raise TypeError(f"{self.name} is not an async function. Use `call` instead.")
132
218
 
@@ -255,6 +341,7 @@ class Task(Generic[TWorkflowInput, R]):
255
341
  parent_outputs: dict[str, JSONSerializableMapping] | None = None,
256
342
  retry_count: int = 0,
257
343
  lifespan: Any = None,
344
+ dependencies: dict[str, Any] | None = None,
258
345
  ) -> R:
259
346
  """
260
347
  Mimic the execution of a task. This method is intended to be used to unit test
@@ -266,6 +353,7 @@ class Task(Generic[TWorkflowInput, R]):
266
353
  :param parent_outputs: Outputs from parent tasks, if any. This is useful for mimicking DAG functionality. For instance, if you have a task `step_2` that has a `parent` which is `step_1`, you can pass `parent_outputs={"step_1": {"result": "Hello, world!"}}` to `step_2.mock_run()` to be able to access `ctx.task_output(step_1)` in `step_2`.
267
354
  :param retry_count: The number of times the task has been retried.
268
355
  :param lifespan: The lifespan to be used in the task, which is useful if one was set on the worker. This will allow you to access `ctx.lifespan` inside of your task.
356
+ :param dependencies: Dependencies to be injected into the task. This is useful for tasks that have dependencies defined using `Depends`. **IMPORTANT**: You must pass the dependencies _directly_, **not** the `Depends` objects themselves. For example, if you have a task that has a dependency `config: Annotated[str, Depends(get_config)]`, you should pass `dependencies={"config": "config_value"}` to `aio_mock_run`.
269
357
 
270
358
  :return: The output of the task.
271
359
  :raises TypeError: If the task is an async function and `mock_run` is called, or if the task is a sync function and `aio_mock_run` is called.
@@ -280,7 +368,7 @@ class Task(Generic[TWorkflowInput, R]):
280
368
  input, additional_metadata, parent_outputs, retry_count, lifespan
281
369
  )
282
370
 
283
- return self.call(ctx)
371
+ return self.call(ctx, dependencies)
284
372
 
285
373
  async def aio_mock_run(
286
374
  self,
@@ -289,6 +377,7 @@ class Task(Generic[TWorkflowInput, R]):
289
377
  parent_outputs: dict[str, JSONSerializableMapping] | None = None,
290
378
  retry_count: int = 0,
291
379
  lifespan: Any = None,
380
+ dependencies: dict[str, Any] | None = None,
292
381
  ) -> R:
293
382
  """
294
383
  Mimic the execution of a task. This method is intended to be used to unit test
@@ -300,6 +389,7 @@ class Task(Generic[TWorkflowInput, R]):
300
389
  :param parent_outputs: Outputs from parent tasks, if any. This is useful for mimicking DAG functionality. For instance, if you have a task `step_2` that has a `parent` which is `step_1`, you can pass `parent_outputs={"step_1": {"result": "Hello, world!"}}` to `step_2.mock_run()` to be able to access `ctx.task_output(step_1)` in `step_2`.
301
390
  :param retry_count: The number of times the task has been retried.
302
391
  :param lifespan: The lifespan to be used in the task, which is useful if one was set on the worker. This will allow you to access `ctx.lifespan` inside of your task.
392
+ :param dependencies: Dependencies to be injected into the task. This is useful for tasks that have dependencies defined using `Depends`. **IMPORTANT**: You must pass the dependencies _directly_, **not** the `Depends` objects themselves. For example, if you have a task that has a dependency `config: Annotated[str, Depends(get_config)]`, you should pass `dependencies={"config": "config_value"}` to `aio_mock_run`.
303
393
 
304
394
  :return: The output of the task.
305
395
  :raises TypeError: If the task is an async function and `mock_run` is called, or if the task is a sync function and `aio_mock_run` is called.
@@ -318,4 +408,4 @@ class Task(Generic[TWorkflowInput, R]):
318
408
  lifespan,
319
409
  )
320
410
 
321
- return await self.aio_call(ctx)
411
+ return await self.aio_call(ctx, dependencies)
@@ -5,8 +5,10 @@ from functools import cached_property
5
5
  from typing import (
6
6
  TYPE_CHECKING,
7
7
  Any,
8
+ Concatenate,
8
9
  Generic,
9
10
  Literal,
11
+ ParamSpec,
10
12
  TypeVar,
11
13
  cast,
12
14
  get_type_hints,
@@ -60,6 +62,7 @@ if TYPE_CHECKING:
60
62
 
61
63
 
62
64
  T = TypeVar("T")
65
+ P = ParamSpec("P")
63
66
 
64
67
 
65
68
  def fall_back_to_default(value: T, param_default: T, fallback_value: T | None) -> T:
@@ -800,7 +803,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
800
803
  skip_if: list[Condition | OrGroup] | None = None,
801
804
  cancel_if: list[Condition | OrGroup] | None = None,
802
805
  ) -> Callable[
803
- [Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
806
+ [Callable[Concatenate[TWorkflowInput, Context, P], R | CoroutineLike[R]]],
804
807
  Task[TWorkflowInput, R],
805
808
  ]:
806
809
  """
@@ -845,7 +848,9 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
845
848
  )
846
849
 
847
850
  def inner(
848
- func: Callable[[TWorkflowInput, Context], R | CoroutineLike[R]],
851
+ func: Callable[
852
+ Concatenate[TWorkflowInput, Context, P], R | CoroutineLike[R]
853
+ ],
849
854
  ) -> Task[TWorkflowInput, R]:
850
855
  task = Task(
851
856
  _fn=func,
@@ -892,7 +897,11 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
892
897
  skip_if: list[Condition | OrGroup] | None = None,
893
898
  cancel_if: list[Condition | OrGroup] | None = None,
894
899
  ) -> Callable[
895
- [Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]],
900
+ [
901
+ Callable[
902
+ Concatenate[TWorkflowInput, DurableContext, P], R | CoroutineLike[R]
903
+ ]
904
+ ],
896
905
  Task[TWorkflowInput, R],
897
906
  ]:
898
907
  """
@@ -941,7 +950,9 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
941
950
  )
942
951
 
943
952
  def inner(
944
- func: Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]],
953
+ func: Callable[
954
+ Concatenate[TWorkflowInput, DurableContext, P], R | CoroutineLike[R]
955
+ ],
945
956
  ) -> Task[TWorkflowInput, R]:
946
957
  task = Task(
947
958
  _fn=func,
@@ -983,7 +994,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
983
994
  backoff_max_seconds: int | None = None,
984
995
  concurrency: list[ConcurrencyExpression] | None = None,
985
996
  ) -> Callable[
986
- [Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
997
+ [Callable[Concatenate[TWorkflowInput, Context, P], R | CoroutineLike[R]]],
987
998
  Task[TWorkflowInput, R],
988
999
  ]:
989
1000
  """
@@ -1009,7 +1020,9 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
1009
1020
  """
1010
1021
 
1011
1022
  def inner(
1012
- func: Callable[[TWorkflowInput, Context], R | CoroutineLike[R]],
1023
+ func: Callable[
1024
+ Concatenate[TWorkflowInput, Context, P], R | CoroutineLike[R]
1025
+ ],
1013
1026
  ) -> Task[TWorkflowInput, R]:
1014
1027
  task = Task(
1015
1028
  is_durable=False,
@@ -1051,7 +1064,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
1051
1064
  backoff_max_seconds: int | None = None,
1052
1065
  concurrency: list[ConcurrencyExpression] | None = None,
1053
1066
  ) -> Callable[
1054
- [Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
1067
+ [Callable[Concatenate[TWorkflowInput, Context, P], R | CoroutineLike[R]]],
1055
1068
  Task[TWorkflowInput, R],
1056
1069
  ]:
1057
1070
  """
@@ -1077,7 +1090,9 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
1077
1090
  """
1078
1091
 
1079
1092
  def inner(
1080
- func: Callable[[TWorkflowInput, Context], R | CoroutineLike[R]],
1093
+ func: Callable[
1094
+ Concatenate[TWorkflowInput, Context, P], R | CoroutineLike[R]
1095
+ ],
1081
1096
  ) -> Task[TWorkflowInput, R]:
1082
1097
  task = Task(
1083
1098
  is_durable=False,
@@ -1,5 +1,6 @@
1
1
  import sys
2
2
  from collections.abc import Awaitable, Coroutine, Generator
3
+ from enum import Enum
3
4
  from typing import Any, Literal, TypeAlias, TypeGuard, TypeVar
4
5
 
5
6
  from pydantic import BaseModel
@@ -31,3 +32,29 @@ else:
31
32
 
32
33
  STOP_LOOP_TYPE = Literal["STOP_LOOP"]
33
34
  STOP_LOOP: STOP_LOOP_TYPE = "STOP_LOOP" # Sentinel object to stop the loop
35
+
36
+
37
+ class LogLevel(str, Enum):
38
+ DEBUG = "DEBUG"
39
+ INFO = "INFO"
40
+ WARN = "WARN"
41
+ ERROR = "ERROR"
42
+
43
+ @classmethod
44
+ def from_levelname(cls, levelname: str) -> "LogLevel":
45
+ levelname = levelname.upper()
46
+
47
+ if levelname == "DEBUG":
48
+ return cls.DEBUG
49
+
50
+ if levelname == "INFO":
51
+ return cls.INFO
52
+
53
+ if levelname in ["WARNING", "WARN"]:
54
+ return cls.WARN
55
+
56
+ if levelname == "ERROR":
57
+ return cls.ERROR
58
+
59
+ # fall back to INFO
60
+ return cls.INFO
@@ -166,22 +166,22 @@ class Runner:
166
166
  except Exception as e:
167
167
  should_not_retry = isinstance(e, NonRetryableException)
168
168
 
169
- exc = TaskRunError.from_exception(e)
169
+ exc = TaskRunError.from_exception(e, action.step_run_id)
170
170
 
171
171
  # This except is coming from the application itself, so we want to send that to the Hatchet instance
172
172
  self.event_queue.put(
173
173
  ActionEvent(
174
174
  action=action,
175
175
  type=STEP_EVENT_TYPE_FAILED,
176
- payload=exc.serialize(),
176
+ payload=exc.serialize(include_metadata=True),
177
177
  should_not_retry=should_not_retry,
178
178
  )
179
179
  )
180
180
 
181
- log_with_level = logger.info if should_not_retry else logger.error
181
+ log_with_level = logger.info if should_not_retry else logger.exception
182
182
 
183
183
  log_with_level(
184
- f"failed step run: {action.action_id}/{action.step_run_id}\n{exc.serialize()}"
184
+ f"failed step run: {action.action_id}/{action.step_run_id}\n{exc.serialize(include_metadata=False)}"
185
185
  )
186
186
 
187
187
  return
@@ -198,18 +198,18 @@ class Runner:
198
198
  )
199
199
  )
200
200
  except IllegalTaskOutputError as e:
201
- exc = TaskRunError.from_exception(e)
201
+ exc = TaskRunError.from_exception(e, action.step_run_id)
202
202
  self.event_queue.put(
203
203
  ActionEvent(
204
204
  action=action,
205
205
  type=STEP_EVENT_TYPE_FAILED,
206
- payload=exc.serialize(),
206
+ payload=exc.serialize(include_metadata=True),
207
207
  should_not_retry=False,
208
208
  )
209
209
  )
210
210
 
211
- logger.error(
212
- f"failed step run: {action.action_id}/{action.step_run_id}\n{exc.serialize()}"
211
+ logger.exception(
212
+ f"failed step run: {action.action_id}/{action.step_run_id}\n{exc.serialize(include_metadata=False)}"
213
213
  )
214
214
 
215
215
  return
@@ -230,19 +230,19 @@ class Runner:
230
230
  try:
231
231
  output = task.result()
232
232
  except Exception as e:
233
- exc = TaskRunError.from_exception(e)
233
+ exc = TaskRunError.from_exception(e, action.step_run_id)
234
234
 
235
235
  self.event_queue.put(
236
236
  ActionEvent(
237
237
  action=action,
238
238
  type=GROUP_KEY_EVENT_TYPE_FAILED,
239
- payload=exc.serialize(),
239
+ payload=exc.serialize(include_metadata=True),
240
240
  should_not_retry=False,
241
241
  )
242
242
  )
243
243
 
244
- logger.error(
245
- f"failed step run: {action.action_id}/{action.step_run_id}\n{exc.serialize()}"
244
+ logger.exception(
245
+ f"failed step run: {action.action_id}/{action.step_run_id}\n{exc.serialize(include_metadata=False)}"
246
246
  )
247
247
 
248
248
  return
@@ -259,18 +259,18 @@ class Runner:
259
259
  )
260
260
  )
261
261
  except IllegalTaskOutputError as e:
262
- exc = TaskRunError.from_exception(e)
262
+ exc = TaskRunError.from_exception(e, action.step_run_id)
263
263
  self.event_queue.put(
264
264
  ActionEvent(
265
265
  action=action,
266
266
  type=STEP_EVENT_TYPE_FAILED,
267
- payload=exc.serialize(),
267
+ payload=exc.serialize(include_metadata=True),
268
268
  should_not_retry=False,
269
269
  )
270
270
  )
271
271
 
272
- logger.error(
273
- f"failed step run: {action.action_id}/{action.step_run_id}\n{exc.serialize()}"
272
+ logger.exception(
273
+ f"failed step run: {action.action_id}/{action.step_run_id}\n{exc.serialize(include_metadata=False)}"
274
274
  )
275
275
 
276
276
  return
@@ -280,12 +280,16 @@ class Runner:
280
280
  return inner_callback
281
281
 
282
282
  def thread_action_func(
283
- self, ctx: Context, task: Task[TWorkflowInput, R], action: Action
283
+ self,
284
+ ctx: Context,
285
+ task: Task[TWorkflowInput, R],
286
+ action: Action,
287
+ dependencies: dict[str, Any],
284
288
  ) -> R:
285
289
  if action.step_run_id or action.get_group_key_run_id:
286
290
  self.threads[action.key] = current_thread()
287
291
 
288
- return task.call(ctx)
292
+ return task.call(ctx, dependencies)
289
293
 
290
294
  # We wrap all actions in an async func
291
295
  async def async_wrapped_action_func(
@@ -300,9 +304,12 @@ class Runner:
300
304
  ctx_action_key.set(action.key)
301
305
  ctx_additional_metadata.set(action.additional_metadata)
302
306
 
307
+ dependencies = await task._unpack_dependencies(ctx)
308
+
303
309
  try:
304
310
  if task.is_async_function:
305
- return await task.aio_call(ctx)
311
+ return await task.aio_call(ctx, dependencies)
312
+
306
313
  pfunc = functools.partial(
307
314
  # we must copy the context vars to the new thread, as only asyncio natively supports
308
315
  # contextvars
@@ -343,6 +350,7 @@ class Runner:
343
350
  ctx,
344
351
  task,
345
352
  action,
353
+ dependencies,
346
354
  )
347
355
 
348
356
  loop = asyncio.get_event_loop()