hatchet-sdk 1.0.0__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

Files changed (73) hide show
  1. hatchet_sdk/__init__.py +32 -16
  2. hatchet_sdk/client.py +25 -63
  3. hatchet_sdk/clients/admin.py +203 -142
  4. hatchet_sdk/clients/dispatcher/action_listener.py +42 -42
  5. hatchet_sdk/clients/dispatcher/dispatcher.py +18 -16
  6. hatchet_sdk/clients/durable_event_listener.py +327 -0
  7. hatchet_sdk/clients/rest/__init__.py +12 -1
  8. hatchet_sdk/clients/rest/api/log_api.py +258 -0
  9. hatchet_sdk/clients/rest/api/task_api.py +32 -6
  10. hatchet_sdk/clients/rest/api/workflow_runs_api.py +626 -0
  11. hatchet_sdk/clients/rest/models/__init__.py +12 -1
  12. hatchet_sdk/clients/rest/models/v1_log_line.py +94 -0
  13. hatchet_sdk/clients/rest/models/v1_log_line_level.py +39 -0
  14. hatchet_sdk/clients/rest/models/v1_log_line_list.py +110 -0
  15. hatchet_sdk/clients/rest/models/v1_task_summary.py +80 -64
  16. hatchet_sdk/clients/rest/models/v1_trigger_workflow_run_request.py +95 -0
  17. hatchet_sdk/clients/rest/models/v1_workflow_run_display_name.py +98 -0
  18. hatchet_sdk/clients/rest/models/v1_workflow_run_display_name_list.py +114 -0
  19. hatchet_sdk/clients/rest/models/workflow_run_shape_item_for_workflow_run_details.py +9 -4
  20. hatchet_sdk/clients/rest/models/workflow_runs_metrics.py +5 -1
  21. hatchet_sdk/clients/run_event_listener.py +0 -1
  22. hatchet_sdk/clients/v1/api_client.py +81 -0
  23. hatchet_sdk/context/context.py +86 -159
  24. hatchet_sdk/contracts/dispatcher_pb2_grpc.py +1 -1
  25. hatchet_sdk/contracts/events_pb2.py +2 -2
  26. hatchet_sdk/contracts/events_pb2_grpc.py +1 -1
  27. hatchet_sdk/contracts/v1/dispatcher_pb2.py +36 -0
  28. hatchet_sdk/contracts/v1/dispatcher_pb2.pyi +38 -0
  29. hatchet_sdk/contracts/v1/dispatcher_pb2_grpc.py +145 -0
  30. hatchet_sdk/contracts/v1/shared/condition_pb2.py +39 -0
  31. hatchet_sdk/contracts/v1/shared/condition_pb2.pyi +72 -0
  32. hatchet_sdk/contracts/v1/shared/condition_pb2_grpc.py +29 -0
  33. hatchet_sdk/contracts/v1/workflows_pb2.py +67 -0
  34. hatchet_sdk/contracts/v1/workflows_pb2.pyi +228 -0
  35. hatchet_sdk/contracts/v1/workflows_pb2_grpc.py +234 -0
  36. hatchet_sdk/contracts/workflows_pb2_grpc.py +1 -1
  37. hatchet_sdk/features/cron.py +91 -121
  38. hatchet_sdk/features/logs.py +16 -0
  39. hatchet_sdk/features/metrics.py +75 -0
  40. hatchet_sdk/features/rate_limits.py +45 -0
  41. hatchet_sdk/features/runs.py +221 -0
  42. hatchet_sdk/features/scheduled.py +114 -131
  43. hatchet_sdk/features/workers.py +41 -0
  44. hatchet_sdk/features/workflows.py +55 -0
  45. hatchet_sdk/hatchet.py +463 -165
  46. hatchet_sdk/opentelemetry/instrumentor.py +8 -13
  47. hatchet_sdk/rate_limit.py +33 -39
  48. hatchet_sdk/runnables/contextvars.py +12 -0
  49. hatchet_sdk/runnables/standalone.py +192 -0
  50. hatchet_sdk/runnables/task.py +144 -0
  51. hatchet_sdk/runnables/types.py +138 -0
  52. hatchet_sdk/runnables/workflow.py +771 -0
  53. hatchet_sdk/utils/aio_utils.py +0 -79
  54. hatchet_sdk/utils/proto_enums.py +0 -7
  55. hatchet_sdk/utils/timedelta_to_expression.py +23 -0
  56. hatchet_sdk/utils/typing.py +2 -2
  57. hatchet_sdk/v0/clients/rest_client.py +9 -0
  58. hatchet_sdk/v0/worker/action_listener_process.py +18 -2
  59. hatchet_sdk/waits.py +120 -0
  60. hatchet_sdk/worker/action_listener_process.py +64 -30
  61. hatchet_sdk/worker/runner/run_loop_manager.py +35 -26
  62. hatchet_sdk/worker/runner/runner.py +72 -55
  63. hatchet_sdk/worker/runner/utils/capture_logs.py +3 -11
  64. hatchet_sdk/worker/worker.py +155 -118
  65. hatchet_sdk/workflow_run.py +4 -5
  66. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/METADATA +1 -2
  67. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/RECORD +69 -43
  68. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/entry_points.txt +2 -0
  69. hatchet_sdk/clients/rest_client.py +0 -636
  70. hatchet_sdk/semver.py +0 -30
  71. hatchet_sdk/worker/runner/utils/error_with_traceback.py +0 -6
  72. hatchet_sdk/workflow.py +0 -527
  73. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/WHEEL +0 -0
hatchet_sdk/hatchet.py CHANGED
@@ -1,46 +1,39 @@
1
1
  import asyncio
2
2
  import logging
3
- from typing import Any, Callable, Type, TypeVar, cast, overload
3
+ from typing import Any, Callable, Type, cast, overload
4
4
 
5
- from hatchet_sdk.client import Client, new_client, new_client_raw
6
- from hatchet_sdk.clients.admin import AdminClient
5
+ from hatchet_sdk import Context, DurableContext
6
+ from hatchet_sdk.client import Client
7
7
  from hatchet_sdk.clients.dispatcher.dispatcher import DispatcherClient
8
8
  from hatchet_sdk.clients.events import EventClient
9
- from hatchet_sdk.clients.rest_client import RestApi
10
9
  from hatchet_sdk.clients.run_event_listener import RunEventListenerClient
11
10
  from hatchet_sdk.config import ClientConfig
12
- from hatchet_sdk.context.context import Context
13
- from hatchet_sdk.contracts.workflows_pb2 import DesiredWorkerLabels
14
11
  from hatchet_sdk.features.cron import CronClient
12
+ from hatchet_sdk.features.logs import LogsClient
13
+ from hatchet_sdk.features.metrics import MetricsClient
14
+ from hatchet_sdk.features.rate_limits import RateLimitsClient
15
+ from hatchet_sdk.features.runs import RunsClient
15
16
  from hatchet_sdk.features.scheduled import ScheduledClient
17
+ from hatchet_sdk.features.workers import WorkersClient
18
+ from hatchet_sdk.features.workflows import WorkflowsClient
16
19
  from hatchet_sdk.labels import DesiredWorkerLabel
17
20
  from hatchet_sdk.logger import logger
18
21
  from hatchet_sdk.rate_limit import RateLimit
19
- from hatchet_sdk.worker.worker import Worker
20
- from hatchet_sdk.workflow import (
22
+ from hatchet_sdk.runnables.standalone import Standalone
23
+ from hatchet_sdk.runnables.types import (
24
+ DEFAULT_EXECUTION_TIMEOUT,
25
+ DEFAULT_SCHEDULE_TIMEOUT,
21
26
  ConcurrencyExpression,
22
27
  EmptyModel,
23
- Step,
24
- StepType,
28
+ R,
25
29
  StickyStrategy,
26
- Task,
30
+ TaskDefaults,
27
31
  TWorkflowInput,
28
32
  WorkflowConfig,
29
- WorkflowDeclaration,
30
33
  )
31
-
32
- R = TypeVar("R")
33
-
34
-
35
- def transform_desired_worker_label(d: DesiredWorkerLabel) -> DesiredWorkerLabels:
36
- value = d.value
37
- return DesiredWorkerLabels(
38
- strValue=value if not isinstance(value, int) else None,
39
- intValue=value if isinstance(value, int) else None,
40
- required=d.required,
41
- weight=d.weight,
42
- comparator=d.comparator, # type: ignore[arg-type]
43
- )
34
+ from hatchet_sdk.runnables.workflow import BaseWorkflow, Workflow
35
+ from hatchet_sdk.utils.timedelta_to_expression import Duration
36
+ from hatchet_sdk.worker.worker import Worker
44
37
 
45
38
 
46
39
  class Hatchet:
@@ -59,20 +52,6 @@ class Hatchet:
59
52
  rest (RestApi): Interface for REST API operations.
60
53
  """
61
54
 
62
- _client: Client
63
- cron: CronClient
64
- scheduled: ScheduledClient
65
-
66
- @classmethod
67
- def from_environment(
68
- cls, defaults: ClientConfig = ClientConfig(), **kwargs: Any
69
- ) -> "Hatchet":
70
- return cls(client=new_client(defaults), **kwargs)
71
-
72
- @classmethod
73
- def from_config(cls, config: ClientConfig, **kwargs: Any) -> "Hatchet":
74
- return cls(client=new_client_raw(config), **kwargs)
75
-
76
55
  def __init__(
77
56
  self,
78
57
  debug: bool = False,
@@ -82,22 +61,52 @@ class Hatchet:
82
61
  """
83
62
  Initialize a new Hatchet instance.
84
63
 
85
- Args:
86
- debug (bool, optional): Enable debug logging. Defaults to False.
87
- client (Optional[Client], optional): A pre-configured Client instance. Defaults to None.
88
- config (ClientConfig, optional): Configuration for creating a new Client. Defaults to ClientConfig().
64
+ :param debug: Enable debug logging. Default: `False`
65
+ :type debug: bool
66
+
67
+ :param client: A pre-configured `Client` instance. Default: `None`.
68
+ :type client: Client | None
69
+
70
+ :param config: Configuration for creating a new Client. Defaults to ClientConfig()
71
+ :type config: ClientConfig
89
72
  """
90
73
 
91
74
  if debug:
92
75
  logger.setLevel(logging.DEBUG)
93
76
 
94
- self._client = client if client else new_client(config, debug)
95
- self.cron = CronClient(self._client)
96
- self.scheduled = ScheduledClient(self._client)
77
+ self._client = client if client else Client(config=config, debug=debug)
78
+
79
+ @property
80
+ def cron(self) -> CronClient:
81
+ return self._client.cron
82
+
83
+ @property
84
+ def logs(self) -> LogsClient:
85
+ return self._client.logs
86
+
87
+ @property
88
+ def metrics(self) -> MetricsClient:
89
+ return self._client.metrics
90
+
91
+ @property
92
+ def rate_limits(self) -> RateLimitsClient:
93
+ return self._client.rate_limits
94
+
95
+ @property
96
+ def runs(self) -> RunsClient:
97
+ return self._client.runs
98
+
99
+ @property
100
+ def scheduled(self) -> ScheduledClient:
101
+ return self._client.scheduled
97
102
 
98
103
  @property
99
- def admin(self) -> AdminClient:
100
- return self._client.admin
104
+ def workers(self) -> WorkersClient:
105
+ return self._client.workers
106
+
107
+ @property
108
+ def workflows(self) -> WorkflowsClient:
109
+ return self._client.workflows
101
110
 
102
111
  @property
103
112
  def dispatcher(self) -> DispatcherClient:
@@ -107,10 +116,6 @@ class Hatchet:
107
116
  def event(self) -> EventClient:
108
117
  return self._client.event
109
118
 
110
- @property
111
- def rest(self) -> RestApi:
112
- return self._client.rest
113
-
114
119
  @property
115
120
  def listener(self) -> RunEventListenerClient:
116
121
  return self._client.listener
@@ -123,185 +128,478 @@ class Hatchet:
123
128
  def tenant_id(self) -> str:
124
129
  return self._client.config.tenant_id
125
130
 
126
- def step(
131
+ def worker(
132
+ self,
133
+ name: str,
134
+ slots: int = 100,
135
+ labels: dict[str, str | int] = {},
136
+ workflows: list[BaseWorkflow[Any]] = [],
137
+ ) -> Worker:
138
+ """
139
+ Create a Hatchet worker on which to run workflows.
140
+
141
+ :param name: The name of the worker.
142
+ :type name: str
143
+
144
+ :param slots: The number of workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time. Default: 100
145
+ :type slots: int
146
+
147
+ :param labels: A dictionary of labels to assign to the worker. For more details, view examples on affinity and worker labels. Defaults to an empty dictionary (no labels)
148
+ :type labels: dict[str, str | int]
149
+
150
+ :param workflows: A list of workflows to register on the worker, as a shorthand for calling `register_workflow` on each or `register_workflows` on all of them. Defaults to an empty list
151
+ :type workflows: list[Workflow]
152
+
153
+
154
+ :returns: The created `Worker` object, which exposes an instance method `start` which can be called to start the worker.
155
+ :rtype: Worker
156
+ """
157
+
158
+ try:
159
+ loop = asyncio.get_running_loop()
160
+ except RuntimeError:
161
+ loop = None
162
+
163
+ return Worker(
164
+ name=name,
165
+ slots=slots,
166
+ labels=labels,
167
+ config=self._client.config,
168
+ debug=self._client.debug,
169
+ owned_loop=loop is None,
170
+ workflows=workflows,
171
+ )
172
+
173
+ @overload
174
+ def workflow(
127
175
  self,
128
- name: str = "",
129
- timeout: str = "60m",
130
- parents: list[str] = [],
176
+ *,
177
+ name: str,
178
+ description: str | None = None,
179
+ input_validator: None = None,
180
+ on_events: list[str] = [],
181
+ on_crons: list[str] = [],
182
+ version: str | None = None,
183
+ sticky: StickyStrategy | None = None,
184
+ default_priority: int = 1,
185
+ concurrency: ConcurrencyExpression | None = None,
186
+ task_defaults: TaskDefaults = TaskDefaults(),
187
+ ) -> Workflow[EmptyModel]: ...
188
+
189
+ @overload
190
+ def workflow(
191
+ self,
192
+ *,
193
+ name: str,
194
+ description: str | None = None,
195
+ input_validator: Type[TWorkflowInput],
196
+ on_events: list[str] = [],
197
+ on_crons: list[str] = [],
198
+ version: str | None = None,
199
+ sticky: StickyStrategy | None = None,
200
+ default_priority: int = 1,
201
+ concurrency: ConcurrencyExpression | None = None,
202
+ task_defaults: TaskDefaults = TaskDefaults(),
203
+ ) -> Workflow[TWorkflowInput]: ...
204
+
205
+ def workflow(
206
+ self,
207
+ *,
208
+ name: str,
209
+ description: str | None = None,
210
+ input_validator: Type[TWorkflowInput] | None = None,
211
+ on_events: list[str] = [],
212
+ on_crons: list[str] = [],
213
+ version: str | None = None,
214
+ sticky: StickyStrategy | None = None,
215
+ default_priority: int = 1,
216
+ concurrency: ConcurrencyExpression | None = None,
217
+ task_defaults: TaskDefaults = TaskDefaults(),
218
+ ) -> Workflow[EmptyModel] | Workflow[TWorkflowInput]:
219
+ """
220
+ Define a Hatchet workflow, which can then declare `task`s and be `run`, `schedule`d, and so on.
221
+
222
+ :param name: The name of the workflow.
223
+ :type name: str
224
+
225
+ :param description: A description for the workflow. Default: None
226
+ :type description: str | None
227
+
228
+ :param version: A version for the workflow. Default: None
229
+ :type version: str | None
230
+
231
+ :param input_validator: A Pydantic model to use as a validator for the `input` to the tasks in the workflow. If no validator is provided, defaults to an `EmptyModel` under the hood. The `EmptyModel` is a Pydantic model with no fields specified, and with the `extra` config option set to `"allow"`.
232
+ :type input_validator: Type[BaseModel]
233
+
234
+ :param on_events: A list of event triggers for the workflow - events which cause the workflow to be run. Defaults to an empty list, meaning the workflow will not be run on any event pushes.
235
+ :type on_events: list[str]
236
+
237
+ :param on_crons: A list of cron triggers for the workflow. Defaults to an empty list, meaning the workflow will not be run on any cron schedules.
238
+ :type on_crons: list[str]
239
+
240
+ :param sticky: A sticky strategy for the workflow. Default: `None`
241
+ :type sticky: StickyStategy
242
+
243
+ :param default_priority: The priority of the workflow. Higher values will cause this workflow to have priority in scheduling over other, lower priority ones. Default: `1`
244
+ :type default_priority: int
245
+
246
+ :param concurrency: A concurrency object controlling the concurrency settings for this workflow.
247
+ :type concurrency: ConcurrencyExpression | None
248
+
249
+ :param task_defaults: A `TaskDefaults` object controlling the default task settings for this workflow.
250
+ :type task_defaults: TaskDefaults
251
+
252
+ :returns: The created `Workflow` object, which can be used to declare tasks, run the workflow, and so on.
253
+ :rtype: Workflow
254
+ """
255
+
256
+ return Workflow[TWorkflowInput](
257
+ WorkflowConfig(
258
+ name=name,
259
+ version=version,
260
+ description=description,
261
+ on_events=on_events,
262
+ on_crons=on_crons,
263
+ sticky=sticky,
264
+ concurrency=concurrency,
265
+ input_validator=input_validator
266
+ or cast(Type[TWorkflowInput], EmptyModel),
267
+ task_defaults=task_defaults,
268
+ ),
269
+ self,
270
+ )
271
+
272
+ @overload
273
+ def task(
274
+ self,
275
+ *,
276
+ name: str,
277
+ description: str | None = None,
278
+ input_validator: None = None,
279
+ on_events: list[str] = [],
280
+ on_crons: list[str] = [],
281
+ version: str | None = None,
282
+ sticky: StickyStrategy | None = None,
283
+ default_priority: int = 1,
284
+ concurrency: ConcurrencyExpression | None = None,
285
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
286
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
131
287
  retries: int = 0,
132
288
  rate_limits: list[RateLimit] = [],
133
289
  desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
134
290
  backoff_factor: float | None = None,
135
291
  backoff_max_seconds: int | None = None,
136
- ) -> Callable[[Callable[[Any, Context], R]], Step[R]]:
137
- def inner(func: Callable[[Any, Context], R]) -> Step[R]:
138
- return Step(
139
- fn=func,
140
- type=StepType.DEFAULT,
141
- name=name.lower() or str(func.__name__).lower(),
142
- timeout=timeout,
143
- parents=parents,
144
- retries=retries,
145
- rate_limits=[r for rate_limit in rate_limits if (r := rate_limit._req)],
146
- desired_worker_labels={
147
- key: transform_desired_worker_label(d)
148
- for key, d in desired_worker_labels.items()
149
- },
150
- backoff_factor=backoff_factor,
151
- backoff_max_seconds=backoff_max_seconds,
152
- )
292
+ ) -> Callable[[Callable[[EmptyModel, Context], R]], Standalone[EmptyModel, R]]: ...
153
293
 
154
- return inner
155
-
156
- def on_failure_step(
294
+ @overload
295
+ def task(
157
296
  self,
158
- name: str = "",
159
- timeout: str = "60m",
160
- parents: list[str] = [],
297
+ *,
298
+ name: str,
299
+ description: str | None = None,
300
+ input_validator: Type[TWorkflowInput],
301
+ on_events: list[str] = [],
302
+ on_crons: list[str] = [],
303
+ version: str | None = None,
304
+ sticky: StickyStrategy | None = None,
305
+ default_priority: int = 1,
306
+ concurrency: ConcurrencyExpression | None = None,
307
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
308
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
161
309
  retries: int = 0,
162
310
  rate_limits: list[RateLimit] = [],
163
311
  desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
164
312
  backoff_factor: float | None = None,
165
313
  backoff_max_seconds: int | None = None,
166
- ) -> Callable[[Callable[[Any, Context], R]], Step[R]]:
167
- def inner(func: Callable[[Any, Context], R]) -> Step[R]:
168
- return Step(
169
- fn=func,
170
- type=StepType.ON_FAILURE,
171
- name=name.lower() or str(func.__name__).lower(),
172
- timeout=timeout,
173
- parents=parents,
174
- retries=retries,
175
- rate_limits=[r for rate_limit in rate_limits if (r := rate_limit._req)],
176
- desired_worker_labels={
177
- key: transform_desired_worker_label(d)
178
- for key, d in desired_worker_labels.items()
179
- },
180
- backoff_factor=backoff_factor,
181
- backoff_max_seconds=backoff_max_seconds,
182
- )
183
-
184
- return inner
314
+ ) -> Callable[
315
+ [Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]
316
+ ]: ...
185
317
 
186
318
  def task(
187
319
  self,
188
- name: str = "",
320
+ *,
321
+ name: str,
322
+ description: str | None = None,
323
+ input_validator: Type[TWorkflowInput] | None = None,
189
324
  on_events: list[str] = [],
190
325
  on_crons: list[str] = [],
191
- version: str = "",
192
- timeout: str = "60m",
193
- schedule_timeout: str = "5m",
326
+ version: str | None = None,
194
327
  sticky: StickyStrategy | None = None,
328
+ default_priority: int = 1,
329
+ concurrency: ConcurrencyExpression | None = None,
330
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
331
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
195
332
  retries: int = 0,
196
333
  rate_limits: list[RateLimit] = [],
197
334
  desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
198
- concurrency: ConcurrencyExpression | None = None,
199
- on_failure: Task[Any, Any] | None = None,
200
- default_priority: int = 1,
201
- input_validator: Type[TWorkflowInput] | None = None,
202
335
  backoff_factor: float | None = None,
203
336
  backoff_max_seconds: int | None = None,
204
- ) -> Callable[[Callable[[Context], R]], Task[R, TWorkflowInput]]:
205
- def inner(func: Callable[[Context], R]) -> Task[R, TWorkflowInput]:
206
- return Task[R, TWorkflowInput](
207
- func,
208
- hatchet=self,
337
+ ) -> (
338
+ Callable[[Callable[[EmptyModel, Context], R]], Standalone[EmptyModel, R]]
339
+ | Callable[
340
+ [Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]
341
+ ]
342
+ ):
343
+ """
344
+ A decorator to transform a function into a standalone Hatchet task that runs as part of a workflow.
345
+
346
+ :param name: The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator.
347
+ :type name: str
348
+
349
+ :param description: An optional description for the task. Default: None
350
+ :type description: str | None
351
+
352
+ :param input_validator: A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`.
353
+ :type input_validator: Type[BaseModel]
354
+
355
+ :param on_events: A list of event triggers for the task - events which cause the task to be run. Defaults to an empty list.
356
+ :type on_events: list[str]
357
+
358
+ :param on_crons: A list of cron triggers for the task. Defaults to an empty list.
359
+ :type on_crons: list[str]
360
+
361
+ :param version: A version for the task. Default: None
362
+ :type version: str | None
363
+
364
+ :param sticky: A sticky strategy for the task. Default: None
365
+ :type sticky: StickyStrategy | None
366
+
367
+ :param default_priority: The priority of the task. Higher values will cause this task to have priority in scheduling. Default: 1
368
+ :type default_priority: int
369
+
370
+ :param concurrency: A concurrency object controlling the concurrency settings for this task.
371
+ :type concurrency: ConcurrencyExpression | None
372
+
373
+ :param schedule_timeout: The maximum time allowed for scheduling the task. Default: DEFAULT_SCHEDULE_TIMEOUT
374
+ :type schedule_timeout: Duration
375
+
376
+ :param execution_timeout: The maximum time allowed for executing the task. Default: DEFAULT_EXECUTION_TIMEOUT
377
+ :type execution_timeout: Duration
378
+
379
+ :param retries: The number of times to retry the task before failing. Default: 0
380
+ :type retries: int
381
+
382
+ :param rate_limits: A list of rate limit configurations for the task. Defaults to an empty list.
383
+ :type rate_limits: list[RateLimit]
384
+
385
+ :param desired_worker_labels: A dictionary of desired worker labels that determine to which worker the task should be assigned.
386
+ :type desired_worker_labels: dict[str, DesiredWorkerLabel]
387
+
388
+ :param backoff_factor: The backoff factor for controlling exponential backoff in retries. Default: None
389
+ :type backoff_factor: float | None
390
+
391
+ :param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue. Default: None
392
+ :type backoff_max_seconds: int | None
393
+
394
+ :returns: A decorator which creates a `Standalone` task object.
395
+ :rtype: Callable[[Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]]
396
+ """
397
+
398
+ workflow = Workflow[TWorkflowInput](
399
+ WorkflowConfig(
209
400
  name=name,
401
+ version=version,
402
+ description=description,
210
403
  on_events=on_events,
211
404
  on_crons=on_crons,
212
- version=version,
213
- timeout=timeout,
214
- schedule_timeout=schedule_timeout,
215
405
  sticky=sticky,
216
- retries=retries,
217
- rate_limits=rate_limits,
218
- desired_worker_labels=desired_worker_labels,
219
406
  concurrency=concurrency,
220
- on_failure=on_failure,
221
- default_priority=default_priority,
222
- input_validator=input_validator,
223
- backoff_factor=backoff_factor,
224
- backoff_max_seconds=backoff_max_seconds,
225
- )
226
-
227
- return inner
228
-
229
- def worker(
230
- self, name: str, max_runs: int | None = None, labels: dict[str, str | int] = {}
231
- ) -> Worker:
232
- try:
233
- loop = asyncio.get_running_loop()
234
- except RuntimeError:
235
- loop = None
407
+ input_validator=input_validator
408
+ or cast(Type[TWorkflowInput], EmptyModel),
409
+ ),
410
+ self,
411
+ )
236
412
 
237
- return Worker(
413
+ task_wrapper = workflow.task(
238
414
  name=name,
239
- max_runs=max_runs,
240
- labels=labels,
241
- config=self._client.config,
242
- debug=self._client.debug,
243
- owned_loop=loop is None,
415
+ schedule_timeout=schedule_timeout,
416
+ execution_timeout=execution_timeout,
417
+ parents=[],
418
+ retries=retries,
419
+ rate_limits=rate_limits,
420
+ desired_worker_labels=desired_worker_labels,
421
+ backoff_factor=backoff_factor,
422
+ backoff_max_seconds=backoff_max_seconds,
423
+ concurrency=[concurrency] if concurrency else [],
244
424
  )
245
425
 
426
+ def inner(
427
+ func: Callable[[TWorkflowInput, Context], R]
428
+ ) -> Standalone[TWorkflowInput, R]:
429
+ created_task = task_wrapper(func)
430
+
431
+ return Standalone[TWorkflowInput, R](
432
+ workflow=workflow,
433
+ task=created_task,
434
+ )
435
+
436
+ return inner
437
+
246
438
  @overload
247
- def declare_workflow(
439
+ def durable_task(
248
440
  self,
249
441
  *,
250
- name: str = "",
442
+ name: str,
443
+ description: str | None = None,
444
+ input_validator: None = None,
251
445
  on_events: list[str] = [],
252
446
  on_crons: list[str] = [],
253
- version: str = "",
254
- timeout: str = "60m",
255
- schedule_timeout: str = "5m",
447
+ version: str | None = None,
256
448
  sticky: StickyStrategy | None = None,
257
449
  default_priority: int = 1,
258
450
  concurrency: ConcurrencyExpression | None = None,
259
- input_validator: None = None,
260
- ) -> WorkflowDeclaration[EmptyModel]: ...
451
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
452
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
453
+ retries: int = 0,
454
+ rate_limits: list[RateLimit] = [],
455
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
456
+ backoff_factor: float | None = None,
457
+ backoff_max_seconds: int | None = None,
458
+ ) -> Callable[
459
+ [Callable[[EmptyModel, DurableContext], R]], Standalone[EmptyModel, R]
460
+ ]: ...
261
461
 
262
462
  @overload
263
- def declare_workflow(
463
+ def durable_task(
264
464
  self,
265
465
  *,
266
- name: str = "",
466
+ name: str,
467
+ description: str | None = None,
468
+ input_validator: Type[TWorkflowInput],
267
469
  on_events: list[str] = [],
268
470
  on_crons: list[str] = [],
269
- version: str = "",
270
- timeout: str = "60m",
271
- schedule_timeout: str = "5m",
471
+ version: str | None = None,
272
472
  sticky: StickyStrategy | None = None,
273
473
  default_priority: int = 1,
274
474
  concurrency: ConcurrencyExpression | None = None,
275
- input_validator: Type[TWorkflowInput],
276
- ) -> WorkflowDeclaration[TWorkflowInput]: ...
475
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
476
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
477
+ retries: int = 0,
478
+ rate_limits: list[RateLimit] = [],
479
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
480
+ backoff_factor: float | None = None,
481
+ backoff_max_seconds: int | None = None,
482
+ ) -> Callable[
483
+ [Callable[[TWorkflowInput, DurableContext], R]], Standalone[TWorkflowInput, R]
484
+ ]: ...
277
485
 
278
- def declare_workflow(
486
+ def durable_task(
279
487
  self,
280
488
  *,
281
- name: str = "",
489
+ name: str,
490
+ description: str | None = None,
491
+ input_validator: Type[TWorkflowInput] | None = None,
282
492
  on_events: list[str] = [],
283
493
  on_crons: list[str] = [],
284
- version: str = "",
285
- timeout: str = "60m",
286
- schedule_timeout: str = "5m",
494
+ version: str | None = None,
287
495
  sticky: StickyStrategy | None = None,
288
496
  default_priority: int = 1,
289
497
  concurrency: ConcurrencyExpression | None = None,
290
- input_validator: Type[TWorkflowInput] | None = None,
291
- ) -> WorkflowDeclaration[EmptyModel] | WorkflowDeclaration[TWorkflowInput]:
292
- return WorkflowDeclaration[TWorkflowInput](
498
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
499
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
500
+ retries: int = 0,
501
+ rate_limits: list[RateLimit] = [],
502
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
503
+ backoff_factor: float | None = None,
504
+ backoff_max_seconds: int | None = None,
505
+ ) -> (
506
+ Callable[[Callable[[EmptyModel, DurableContext], R]], Standalone[EmptyModel, R]]
507
+ | Callable[
508
+ [Callable[[TWorkflowInput, DurableContext], R]],
509
+ Standalone[TWorkflowInput, R],
510
+ ]
511
+ ):
512
+ """
513
+ A decorator to transform a function into a standalone Hatchet _durable_ task that runs as part of a workflow.
514
+
515
+ :param name: The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator.
516
+ :type name: str
517
+
518
+ :param description: An optional description for the task. Default: None
519
+ :type description: str | None
520
+
521
+ :param input_validator: A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`.
522
+ :type input_validator: Type[BaseModel]
523
+
524
+ :param on_events: A list of event triggers for the task - events which cause the task to be run. Defaults to an empty list.
525
+ :type on_events: list[str]
526
+
527
+ :param on_crons: A list of cron triggers for the task. Defaults to an empty list.
528
+ :type on_crons: list[str]
529
+
530
+ :param version: A version for the task. Default: None
531
+ :type version: str | None
532
+
533
+ :param sticky: A sticky strategy for the task. Default: None
534
+ :type sticky: StickyStrategy | None
535
+
536
+ :param default_priority: The priority of the task. Higher values will cause this task to have priority in scheduling. Default: 1
537
+ :type default_priority: int
538
+
539
+ :param concurrency: A concurrency object controlling the concurrency settings for this task.
540
+ :type concurrency: ConcurrencyExpression | None
541
+
542
+ :param schedule_timeout: The maximum time allowed for scheduling the task. Default: DEFAULT_SCHEDULE_TIMEOUT
543
+ :type schedule_timeout: Duration
544
+
545
+ :param execution_timeout: The maximum time allowed for executing the task. Default: DEFAULT_EXECUTION_TIMEOUT
546
+ :type execution_timeout: Duration
547
+
548
+ :param retries: The number of times to retry the task before failing. Default: 0
549
+ :type retries: int
550
+
551
+ :param rate_limits: A list of rate limit configurations for the task. Defaults to an empty list.
552
+ :type rate_limits: list[RateLimit]
553
+
554
+ :param desired_worker_labels: A dictionary of desired worker labels that determine to which worker the task should be assigned.
555
+ :type desired_worker_labels: dict[str, DesiredWorkerLabel]
556
+
557
+ :param backoff_factor: The backoff factor for controlling exponential backoff in retries. Default: None
558
+ :type backoff_factor: float | None
559
+
560
+ :param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue. Default: None
561
+ :type backoff_max_seconds: int | None
562
+
563
+ :returns: A decorator which creates a `Standalone` task object.
564
+ :rtype: Callable[[Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]]
565
+ """
566
+
567
+ workflow = Workflow[TWorkflowInput](
293
568
  WorkflowConfig(
294
569
  name=name,
570
+ version=version,
571
+ description=description,
295
572
  on_events=on_events,
296
573
  on_crons=on_crons,
297
- version=version,
298
- timeout=timeout,
299
- schedule_timeout=schedule_timeout,
300
574
  sticky=sticky,
301
- default_priority=default_priority,
302
575
  concurrency=concurrency,
303
576
  input_validator=input_validator
304
577
  or cast(Type[TWorkflowInput], EmptyModel),
305
578
  ),
306
579
  self,
307
580
  )
581
+
582
+ task_wrapper = workflow.durable_task(
583
+ name=name,
584
+ schedule_timeout=schedule_timeout,
585
+ execution_timeout=execution_timeout,
586
+ parents=[],
587
+ retries=retries,
588
+ rate_limits=rate_limits,
589
+ desired_worker_labels=desired_worker_labels,
590
+ backoff_factor=backoff_factor,
591
+ backoff_max_seconds=backoff_max_seconds,
592
+ concurrency=[concurrency] if concurrency else [],
593
+ )
594
+
595
+ def inner(
596
+ func: Callable[[TWorkflowInput, DurableContext], R]
597
+ ) -> Standalone[TWorkflowInput, R]:
598
+ created_task = task_wrapper(func)
599
+
600
+ return Standalone[TWorkflowInput, R](
601
+ workflow=workflow,
602
+ task=created_task,
603
+ )
604
+
605
+ return inner