hatchet-sdk 1.0.0__py3-none-any.whl → 1.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

Files changed (65) hide show
  1. hatchet_sdk/__init__.py +27 -16
  2. hatchet_sdk/client.py +13 -63
  3. hatchet_sdk/clients/admin.py +203 -124
  4. hatchet_sdk/clients/dispatcher/action_listener.py +42 -42
  5. hatchet_sdk/clients/dispatcher/dispatcher.py +18 -16
  6. hatchet_sdk/clients/durable_event_listener.py +327 -0
  7. hatchet_sdk/clients/rest/__init__.py +12 -1
  8. hatchet_sdk/clients/rest/api/log_api.py +258 -0
  9. hatchet_sdk/clients/rest/api/task_api.py +32 -6
  10. hatchet_sdk/clients/rest/api/workflow_runs_api.py +626 -0
  11. hatchet_sdk/clients/rest/models/__init__.py +12 -1
  12. hatchet_sdk/clients/rest/models/v1_log_line.py +94 -0
  13. hatchet_sdk/clients/rest/models/v1_log_line_level.py +39 -0
  14. hatchet_sdk/clients/rest/models/v1_log_line_list.py +110 -0
  15. hatchet_sdk/clients/rest/models/v1_task_summary.py +80 -64
  16. hatchet_sdk/clients/rest/models/v1_trigger_workflow_run_request.py +95 -0
  17. hatchet_sdk/clients/rest/models/v1_workflow_run_display_name.py +98 -0
  18. hatchet_sdk/clients/rest/models/v1_workflow_run_display_name_list.py +114 -0
  19. hatchet_sdk/clients/rest/models/workflow_run_shape_item_for_workflow_run_details.py +9 -4
  20. hatchet_sdk/clients/rest_client.py +21 -0
  21. hatchet_sdk/clients/run_event_listener.py +0 -1
  22. hatchet_sdk/context/context.py +85 -147
  23. hatchet_sdk/contracts/dispatcher_pb2_grpc.py +1 -1
  24. hatchet_sdk/contracts/events_pb2.py +2 -2
  25. hatchet_sdk/contracts/events_pb2_grpc.py +1 -1
  26. hatchet_sdk/contracts/v1/dispatcher_pb2.py +36 -0
  27. hatchet_sdk/contracts/v1/dispatcher_pb2.pyi +38 -0
  28. hatchet_sdk/contracts/v1/dispatcher_pb2_grpc.py +145 -0
  29. hatchet_sdk/contracts/v1/shared/condition_pb2.py +39 -0
  30. hatchet_sdk/contracts/v1/shared/condition_pb2.pyi +72 -0
  31. hatchet_sdk/contracts/v1/shared/condition_pb2_grpc.py +29 -0
  32. hatchet_sdk/contracts/v1/workflows_pb2.py +67 -0
  33. hatchet_sdk/contracts/v1/workflows_pb2.pyi +228 -0
  34. hatchet_sdk/contracts/v1/workflows_pb2_grpc.py +234 -0
  35. hatchet_sdk/contracts/workflows_pb2_grpc.py +1 -1
  36. hatchet_sdk/features/cron.py +3 -3
  37. hatchet_sdk/features/scheduled.py +2 -2
  38. hatchet_sdk/hatchet.py +427 -151
  39. hatchet_sdk/opentelemetry/instrumentor.py +8 -13
  40. hatchet_sdk/rate_limit.py +33 -39
  41. hatchet_sdk/runnables/contextvars.py +12 -0
  42. hatchet_sdk/runnables/standalone.py +194 -0
  43. hatchet_sdk/runnables/task.py +144 -0
  44. hatchet_sdk/runnables/types.py +138 -0
  45. hatchet_sdk/runnables/workflow.py +764 -0
  46. hatchet_sdk/utils/aio_utils.py +0 -79
  47. hatchet_sdk/utils/proto_enums.py +0 -7
  48. hatchet_sdk/utils/timedelta_to_expression.py +23 -0
  49. hatchet_sdk/utils/typing.py +2 -2
  50. hatchet_sdk/v0/clients/rest_client.py +9 -0
  51. hatchet_sdk/v0/worker/action_listener_process.py +18 -2
  52. hatchet_sdk/waits.py +120 -0
  53. hatchet_sdk/worker/action_listener_process.py +64 -30
  54. hatchet_sdk/worker/runner/run_loop_manager.py +35 -25
  55. hatchet_sdk/worker/runner/runner.py +72 -49
  56. hatchet_sdk/worker/runner/utils/capture_logs.py +3 -11
  57. hatchet_sdk/worker/worker.py +155 -118
  58. hatchet_sdk/workflow_run.py +4 -5
  59. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.0a1.dist-info}/METADATA +1 -2
  60. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.0a1.dist-info}/RECORD +62 -42
  61. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.0a1.dist-info}/entry_points.txt +2 -0
  62. hatchet_sdk/semver.py +0 -30
  63. hatchet_sdk/worker/runner/utils/error_with_traceback.py +0 -6
  64. hatchet_sdk/workflow.py +0 -527
  65. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.0a1.dist-info}/WHEEL +0 -0
hatchet_sdk/hatchet.py CHANGED
@@ -1,46 +1,35 @@
1
1
  import asyncio
2
2
  import logging
3
- from typing import Any, Callable, Type, TypeVar, cast, overload
3
+ from typing import Any, Callable, Type, cast, overload
4
4
 
5
- from hatchet_sdk.client import Client, new_client, new_client_raw
5
+ from hatchet_sdk import Context, DurableContext
6
+ from hatchet_sdk.client import Client
6
7
  from hatchet_sdk.clients.admin import AdminClient
7
8
  from hatchet_sdk.clients.dispatcher.dispatcher import DispatcherClient
8
9
  from hatchet_sdk.clients.events import EventClient
9
10
  from hatchet_sdk.clients.rest_client import RestApi
10
11
  from hatchet_sdk.clients.run_event_listener import RunEventListenerClient
11
12
  from hatchet_sdk.config import ClientConfig
12
- from hatchet_sdk.context.context import Context
13
- from hatchet_sdk.contracts.workflows_pb2 import DesiredWorkerLabels
14
13
  from hatchet_sdk.features.cron import CronClient
15
14
  from hatchet_sdk.features.scheduled import ScheduledClient
16
15
  from hatchet_sdk.labels import DesiredWorkerLabel
17
16
  from hatchet_sdk.logger import logger
18
17
  from hatchet_sdk.rate_limit import RateLimit
19
- from hatchet_sdk.worker.worker import Worker
20
- from hatchet_sdk.workflow import (
18
+ from hatchet_sdk.runnables.standalone import Standalone
19
+ from hatchet_sdk.runnables.types import (
20
+ DEFAULT_EXECUTION_TIMEOUT,
21
+ DEFAULT_SCHEDULE_TIMEOUT,
21
22
  ConcurrencyExpression,
22
23
  EmptyModel,
23
- Step,
24
- StepType,
24
+ R,
25
25
  StickyStrategy,
26
- Task,
26
+ TaskDefaults,
27
27
  TWorkflowInput,
28
28
  WorkflowConfig,
29
- WorkflowDeclaration,
30
29
  )
31
-
32
- R = TypeVar("R")
33
-
34
-
35
- def transform_desired_worker_label(d: DesiredWorkerLabel) -> DesiredWorkerLabels:
36
- value = d.value
37
- return DesiredWorkerLabels(
38
- strValue=value if not isinstance(value, int) else None,
39
- intValue=value if isinstance(value, int) else None,
40
- required=d.required,
41
- weight=d.weight,
42
- comparator=d.comparator, # type: ignore[arg-type]
43
- )
30
+ from hatchet_sdk.runnables.workflow import BaseWorkflow, Workflow
31
+ from hatchet_sdk.utils.timedelta_to_expression import Duration
32
+ from hatchet_sdk.worker.worker import Worker
44
33
 
45
34
 
46
35
  class Hatchet:
@@ -63,16 +52,6 @@ class Hatchet:
63
52
  cron: CronClient
64
53
  scheduled: ScheduledClient
65
54
 
66
- @classmethod
67
- def from_environment(
68
- cls, defaults: ClientConfig = ClientConfig(), **kwargs: Any
69
- ) -> "Hatchet":
70
- return cls(client=new_client(defaults), **kwargs)
71
-
72
- @classmethod
73
- def from_config(cls, config: ClientConfig, **kwargs: Any) -> "Hatchet":
74
- return cls(client=new_client_raw(config), **kwargs)
75
-
76
55
  def __init__(
77
56
  self,
78
57
  debug: bool = False,
@@ -82,16 +61,20 @@ class Hatchet:
82
61
  """
83
62
  Initialize a new Hatchet instance.
84
63
 
85
- Args:
86
- debug (bool, optional): Enable debug logging. Defaults to False.
87
- client (Optional[Client], optional): A pre-configured Client instance. Defaults to None.
88
- config (ClientConfig, optional): Configuration for creating a new Client. Defaults to ClientConfig().
64
+ :param debug: Enable debug logging. Default: `False`
65
+ :type debug: bool
66
+
67
+ :param client: A pre-configured `Client` instance. Default: `None`.
68
+ :type client: Client | None
69
+
70
+ :param config: Configuration for creating a new Client. Defaults to ClientConfig()
71
+ :type config: ClientConfig
89
72
  """
90
73
 
91
74
  if debug:
92
75
  logger.setLevel(logging.DEBUG)
93
76
 
94
- self._client = client if client else new_client(config, debug)
77
+ self._client = client if client else Client(config=config, debug=debug)
95
78
  self.cron = CronClient(self._client)
96
79
  self.scheduled = ScheduledClient(self._client)
97
80
 
@@ -123,185 +106,478 @@ class Hatchet:
123
106
  def tenant_id(self) -> str:
124
107
  return self._client.config.tenant_id
125
108
 
126
- def step(
109
+ def worker(
110
+ self,
111
+ name: str,
112
+ slots: int = 100,
113
+ labels: dict[str, str | int] = {},
114
+ workflows: list[BaseWorkflow[Any]] = [],
115
+ ) -> Worker:
116
+ """
117
+ Create a Hatchet worker on which to run workflows.
118
+
119
+ :param name: The name of the worker.
120
+ :type name: str
121
+
122
+ :param slots: The number of workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time. Default: 100
123
+ :type slots: int
124
+
125
+ :param labels: A dictionary of labels to assign to the worker. For more details, view examples on affinity and worker labels. Defaults to an empty dictionary (no labels)
126
+ :type labels: dict[str, str | int]
127
+
128
+ :param workflows: A list of workflows to register on the worker, as a shorthand for calling `register_workflow` on each or `register_workflows` on all of them. Defaults to an empty list
129
+ :type workflows: list[Workflow]
130
+
131
+
132
+ :returns: The created `Worker` object, which exposes an instance method `start` which can be called to start the worker.
133
+ :rtype: Worker
134
+ """
135
+
136
+ try:
137
+ loop = asyncio.get_running_loop()
138
+ except RuntimeError:
139
+ loop = None
140
+
141
+ return Worker(
142
+ name=name,
143
+ slots=slots,
144
+ labels=labels,
145
+ config=self._client.config,
146
+ debug=self._client.debug,
147
+ owned_loop=loop is None,
148
+ workflows=workflows,
149
+ )
150
+
151
+ @overload
152
+ def workflow(
153
+ self,
154
+ *,
155
+ name: str,
156
+ description: str | None = None,
157
+ input_validator: None = None,
158
+ on_events: list[str] = [],
159
+ on_crons: list[str] = [],
160
+ version: str | None = None,
161
+ sticky: StickyStrategy | None = None,
162
+ default_priority: int = 1,
163
+ concurrency: ConcurrencyExpression | None = None,
164
+ task_defaults: TaskDefaults = TaskDefaults(),
165
+ ) -> Workflow[EmptyModel]: ...
166
+
167
+ @overload
168
+ def workflow(
169
+ self,
170
+ *,
171
+ name: str,
172
+ description: str | None = None,
173
+ input_validator: Type[TWorkflowInput],
174
+ on_events: list[str] = [],
175
+ on_crons: list[str] = [],
176
+ version: str | None = None,
177
+ sticky: StickyStrategy | None = None,
178
+ default_priority: int = 1,
179
+ concurrency: ConcurrencyExpression | None = None,
180
+ task_defaults: TaskDefaults = TaskDefaults(),
181
+ ) -> Workflow[TWorkflowInput]: ...
182
+
183
+ def workflow(
127
184
  self,
128
- name: str = "",
129
- timeout: str = "60m",
130
- parents: list[str] = [],
185
+ *,
186
+ name: str,
187
+ description: str | None = None,
188
+ input_validator: Type[TWorkflowInput] | None = None,
189
+ on_events: list[str] = [],
190
+ on_crons: list[str] = [],
191
+ version: str | None = None,
192
+ sticky: StickyStrategy | None = None,
193
+ default_priority: int = 1,
194
+ concurrency: ConcurrencyExpression | None = None,
195
+ task_defaults: TaskDefaults = TaskDefaults(),
196
+ ) -> Workflow[EmptyModel] | Workflow[TWorkflowInput]:
197
+ """
198
+ Define a Hatchet workflow, which can then declare `task`s and be `run`, `schedule`d, and so on.
199
+
200
+ :param name: The name of the workflow.
201
+ :type name: str
202
+
203
+ :param description: A description for the workflow. Default: None
204
+ :type description: str | None
205
+
206
+ :param version: A version for the workflow. Default: None
207
+ :type version: str | None
208
+
209
+ :param input_validator: A Pydantic model to use as a validator for the `input` to the tasks in the workflow. If no validator is provided, defaults to an `EmptyModel` under the hood. The `EmptyModel` is a Pydantic model with no fields specified, and with the `extra` config option set to `"allow"`.
210
+ :type input_validator: Type[BaseModel]
211
+
212
+ :param on_events: A list of event triggers for the workflow - events which cause the workflow to be run. Defaults to an empty list, meaning the workflow will not be run on any event pushes.
213
+ :type on_events: list[str]
214
+
215
+ :param on_crons: A list of cron triggers for the workflow. Defaults to an empty list, meaning the workflow will not be run on any cron schedules.
216
+ :type on_crons: list[str]
217
+
218
+ :param sticky: A sticky strategy for the workflow. Default: `None`
219
+ :type sticky: StickyStategy
220
+
221
+ :param default_priority: The priority of the workflow. Higher values will cause this workflow to have priority in scheduling over other, lower priority ones. Default: `1`
222
+ :type default_priority: int
223
+
224
+ :param concurrency: A concurrency object controlling the concurrency settings for this workflow.
225
+ :type concurrency: ConcurrencyExpression | None
226
+
227
+ :param task_defaults: A `TaskDefaults` object controlling the default task settings for this workflow.
228
+ :type task_defaults: TaskDefaults
229
+
230
+ :returns: The created `Workflow` object, which can be used to declare tasks, run the workflow, and so on.
231
+ :rtype: Workflow
232
+ """
233
+
234
+ return Workflow[TWorkflowInput](
235
+ WorkflowConfig(
236
+ name=name,
237
+ version=version,
238
+ description=description,
239
+ on_events=on_events,
240
+ on_crons=on_crons,
241
+ sticky=sticky,
242
+ concurrency=concurrency,
243
+ input_validator=input_validator
244
+ or cast(Type[TWorkflowInput], EmptyModel),
245
+ task_defaults=task_defaults,
246
+ ),
247
+ self,
248
+ )
249
+
250
+ @overload
251
+ def task(
252
+ self,
253
+ *,
254
+ name: str,
255
+ description: str | None = None,
256
+ input_validator: None = None,
257
+ on_events: list[str] = [],
258
+ on_crons: list[str] = [],
259
+ version: str | None = None,
260
+ sticky: StickyStrategy | None = None,
261
+ default_priority: int = 1,
262
+ concurrency: ConcurrencyExpression | None = None,
263
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
264
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
131
265
  retries: int = 0,
132
266
  rate_limits: list[RateLimit] = [],
133
267
  desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
134
268
  backoff_factor: float | None = None,
135
269
  backoff_max_seconds: int | None = None,
136
- ) -> Callable[[Callable[[Any, Context], R]], Step[R]]:
137
- def inner(func: Callable[[Any, Context], R]) -> Step[R]:
138
- return Step(
139
- fn=func,
140
- type=StepType.DEFAULT,
141
- name=name.lower() or str(func.__name__).lower(),
142
- timeout=timeout,
143
- parents=parents,
144
- retries=retries,
145
- rate_limits=[r for rate_limit in rate_limits if (r := rate_limit._req)],
146
- desired_worker_labels={
147
- key: transform_desired_worker_label(d)
148
- for key, d in desired_worker_labels.items()
149
- },
150
- backoff_factor=backoff_factor,
151
- backoff_max_seconds=backoff_max_seconds,
152
- )
153
-
154
- return inner
270
+ ) -> Callable[[Callable[[EmptyModel, Context], R]], Standalone[EmptyModel, R]]: ...
155
271
 
156
- def on_failure_step(
272
+ @overload
273
+ def task(
157
274
  self,
158
- name: str = "",
159
- timeout: str = "60m",
160
- parents: list[str] = [],
275
+ *,
276
+ name: str,
277
+ description: str | None = None,
278
+ input_validator: Type[TWorkflowInput],
279
+ on_events: list[str] = [],
280
+ on_crons: list[str] = [],
281
+ version: str | None = None,
282
+ sticky: StickyStrategy | None = None,
283
+ default_priority: int = 1,
284
+ concurrency: ConcurrencyExpression | None = None,
285
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
286
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
161
287
  retries: int = 0,
162
288
  rate_limits: list[RateLimit] = [],
163
289
  desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
164
290
  backoff_factor: float | None = None,
165
291
  backoff_max_seconds: int | None = None,
166
- ) -> Callable[[Callable[[Any, Context], R]], Step[R]]:
167
- def inner(func: Callable[[Any, Context], R]) -> Step[R]:
168
- return Step(
169
- fn=func,
170
- type=StepType.ON_FAILURE,
171
- name=name.lower() or str(func.__name__).lower(),
172
- timeout=timeout,
173
- parents=parents,
174
- retries=retries,
175
- rate_limits=[r for rate_limit in rate_limits if (r := rate_limit._req)],
176
- desired_worker_labels={
177
- key: transform_desired_worker_label(d)
178
- for key, d in desired_worker_labels.items()
179
- },
180
- backoff_factor=backoff_factor,
181
- backoff_max_seconds=backoff_max_seconds,
182
- )
183
-
184
- return inner
292
+ ) -> Callable[
293
+ [Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]
294
+ ]: ...
185
295
 
186
296
  def task(
187
297
  self,
188
- name: str = "",
298
+ *,
299
+ name: str,
300
+ description: str | None = None,
301
+ input_validator: Type[TWorkflowInput] | None = None,
189
302
  on_events: list[str] = [],
190
303
  on_crons: list[str] = [],
191
- version: str = "",
192
- timeout: str = "60m",
193
- schedule_timeout: str = "5m",
304
+ version: str | None = None,
194
305
  sticky: StickyStrategy | None = None,
306
+ default_priority: int = 1,
307
+ concurrency: ConcurrencyExpression | None = None,
308
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
309
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
195
310
  retries: int = 0,
196
311
  rate_limits: list[RateLimit] = [],
197
312
  desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
198
- concurrency: ConcurrencyExpression | None = None,
199
- on_failure: Task[Any, Any] | None = None,
200
- default_priority: int = 1,
201
- input_validator: Type[TWorkflowInput] | None = None,
202
313
  backoff_factor: float | None = None,
203
314
  backoff_max_seconds: int | None = None,
204
- ) -> Callable[[Callable[[Context], R]], Task[R, TWorkflowInput]]:
205
- def inner(func: Callable[[Context], R]) -> Task[R, TWorkflowInput]:
206
- return Task[R, TWorkflowInput](
207
- func,
208
- hatchet=self,
315
+ ) -> (
316
+ Callable[[Callable[[EmptyModel, Context], R]], Standalone[EmptyModel, R]]
317
+ | Callable[
318
+ [Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]
319
+ ]
320
+ ):
321
+ """
322
+ A decorator to transform a function into a standalone Hatchet task that runs as part of a workflow.
323
+
324
+ :param name: The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator.
325
+ :type name: str
326
+
327
+ :param description: An optional description for the task. Default: None
328
+ :type description: str | None
329
+
330
+ :param input_validator: A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`.
331
+ :type input_validator: Type[BaseModel]
332
+
333
+ :param on_events: A list of event triggers for the task - events which cause the task to be run. Defaults to an empty list.
334
+ :type on_events: list[str]
335
+
336
+ :param on_crons: A list of cron triggers for the task. Defaults to an empty list.
337
+ :type on_crons: list[str]
338
+
339
+ :param version: A version for the task. Default: None
340
+ :type version: str | None
341
+
342
+ :param sticky: A sticky strategy for the task. Default: None
343
+ :type sticky: StickyStrategy | None
344
+
345
+ :param default_priority: The priority of the task. Higher values will cause this task to have priority in scheduling. Default: 1
346
+ :type default_priority: int
347
+
348
+ :param concurrency: A concurrency object controlling the concurrency settings for this task.
349
+ :type concurrency: ConcurrencyExpression | None
350
+
351
+ :param schedule_timeout: The maximum time allowed for scheduling the task. Default: DEFAULT_SCHEDULE_TIMEOUT
352
+ :type schedule_timeout: Duration
353
+
354
+ :param execution_timeout: The maximum time allowed for executing the task. Default: DEFAULT_EXECUTION_TIMEOUT
355
+ :type execution_timeout: Duration
356
+
357
+ :param retries: The number of times to retry the task before failing. Default: 0
358
+ :type retries: int
359
+
360
+ :param rate_limits: A list of rate limit configurations for the task. Defaults to an empty list.
361
+ :type rate_limits: list[RateLimit]
362
+
363
+ :param desired_worker_labels: A dictionary of desired worker labels that determine to which worker the task should be assigned.
364
+ :type desired_worker_labels: dict[str, DesiredWorkerLabel]
365
+
366
+ :param backoff_factor: The backoff factor for controlling exponential backoff in retries. Default: None
367
+ :type backoff_factor: float | None
368
+
369
+ :param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue. Default: None
370
+ :type backoff_max_seconds: int | None
371
+
372
+ :returns: A decorator which creates a `Standalone` task object.
373
+ :rtype: Callable[[Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]]
374
+ """
375
+
376
+ workflow = Workflow[TWorkflowInput](
377
+ WorkflowConfig(
209
378
  name=name,
379
+ version=version,
380
+ description=description,
210
381
  on_events=on_events,
211
382
  on_crons=on_crons,
212
- version=version,
213
- timeout=timeout,
214
- schedule_timeout=schedule_timeout,
215
383
  sticky=sticky,
216
- retries=retries,
217
- rate_limits=rate_limits,
218
- desired_worker_labels=desired_worker_labels,
219
384
  concurrency=concurrency,
220
- on_failure=on_failure,
221
- default_priority=default_priority,
222
- input_validator=input_validator,
223
- backoff_factor=backoff_factor,
224
- backoff_max_seconds=backoff_max_seconds,
225
- )
226
-
227
- return inner
228
-
229
- def worker(
230
- self, name: str, max_runs: int | None = None, labels: dict[str, str | int] = {}
231
- ) -> Worker:
232
- try:
233
- loop = asyncio.get_running_loop()
234
- except RuntimeError:
235
- loop = None
385
+ input_validator=input_validator
386
+ or cast(Type[TWorkflowInput], EmptyModel),
387
+ ),
388
+ self,
389
+ )
236
390
 
237
- return Worker(
391
+ task_wrapper = workflow.task(
238
392
  name=name,
239
- max_runs=max_runs,
240
- labels=labels,
241
- config=self._client.config,
242
- debug=self._client.debug,
243
- owned_loop=loop is None,
393
+ schedule_timeout=schedule_timeout,
394
+ execution_timeout=execution_timeout,
395
+ parents=[],
396
+ retries=retries,
397
+ rate_limits=rate_limits,
398
+ desired_worker_labels=desired_worker_labels,
399
+ backoff_factor=backoff_factor,
400
+ backoff_max_seconds=backoff_max_seconds,
401
+ concurrency=[concurrency] if concurrency else [],
244
402
  )
245
403
 
404
+ def inner(
405
+ func: Callable[[TWorkflowInput, Context], R]
406
+ ) -> Standalone[TWorkflowInput, R]:
407
+ created_task = task_wrapper(func)
408
+
409
+ return Standalone[TWorkflowInput, R](
410
+ workflow=workflow,
411
+ task=created_task,
412
+ )
413
+
414
+ return inner
415
+
246
416
  @overload
247
- def declare_workflow(
417
+ def durable_task(
248
418
  self,
249
419
  *,
250
- name: str = "",
420
+ name: str,
421
+ description: str | None = None,
422
+ input_validator: None = None,
251
423
  on_events: list[str] = [],
252
424
  on_crons: list[str] = [],
253
- version: str = "",
254
- timeout: str = "60m",
255
- schedule_timeout: str = "5m",
425
+ version: str | None = None,
256
426
  sticky: StickyStrategy | None = None,
257
427
  default_priority: int = 1,
258
428
  concurrency: ConcurrencyExpression | None = None,
259
- input_validator: None = None,
260
- ) -> WorkflowDeclaration[EmptyModel]: ...
429
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
430
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
431
+ retries: int = 0,
432
+ rate_limits: list[RateLimit] = [],
433
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
434
+ backoff_factor: float | None = None,
435
+ backoff_max_seconds: int | None = None,
436
+ ) -> Callable[
437
+ [Callable[[EmptyModel, DurableContext], R]], Standalone[EmptyModel, R]
438
+ ]: ...
261
439
 
262
440
  @overload
263
- def declare_workflow(
441
+ def durable_task(
264
442
  self,
265
443
  *,
266
- name: str = "",
444
+ name: str,
445
+ description: str | None = None,
446
+ input_validator: Type[TWorkflowInput],
267
447
  on_events: list[str] = [],
268
448
  on_crons: list[str] = [],
269
- version: str = "",
270
- timeout: str = "60m",
271
- schedule_timeout: str = "5m",
449
+ version: str | None = None,
272
450
  sticky: StickyStrategy | None = None,
273
451
  default_priority: int = 1,
274
452
  concurrency: ConcurrencyExpression | None = None,
275
- input_validator: Type[TWorkflowInput],
276
- ) -> WorkflowDeclaration[TWorkflowInput]: ...
453
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
454
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
455
+ retries: int = 0,
456
+ rate_limits: list[RateLimit] = [],
457
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
458
+ backoff_factor: float | None = None,
459
+ backoff_max_seconds: int | None = None,
460
+ ) -> Callable[
461
+ [Callable[[TWorkflowInput, DurableContext], R]], Standalone[TWorkflowInput, R]
462
+ ]: ...
277
463
 
278
- def declare_workflow(
464
+ def durable_task(
279
465
  self,
280
466
  *,
281
- name: str = "",
467
+ name: str,
468
+ description: str | None = None,
469
+ input_validator: Type[TWorkflowInput] | None = None,
282
470
  on_events: list[str] = [],
283
471
  on_crons: list[str] = [],
284
- version: str = "",
285
- timeout: str = "60m",
286
- schedule_timeout: str = "5m",
472
+ version: str | None = None,
287
473
  sticky: StickyStrategy | None = None,
288
474
  default_priority: int = 1,
289
475
  concurrency: ConcurrencyExpression | None = None,
290
- input_validator: Type[TWorkflowInput] | None = None,
291
- ) -> WorkflowDeclaration[EmptyModel] | WorkflowDeclaration[TWorkflowInput]:
292
- return WorkflowDeclaration[TWorkflowInput](
476
+ schedule_timeout: Duration = DEFAULT_SCHEDULE_TIMEOUT,
477
+ execution_timeout: Duration = DEFAULT_EXECUTION_TIMEOUT,
478
+ retries: int = 0,
479
+ rate_limits: list[RateLimit] = [],
480
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
481
+ backoff_factor: float | None = None,
482
+ backoff_max_seconds: int | None = None,
483
+ ) -> (
484
+ Callable[[Callable[[EmptyModel, DurableContext], R]], Standalone[EmptyModel, R]]
485
+ | Callable[
486
+ [Callable[[TWorkflowInput, DurableContext], R]],
487
+ Standalone[TWorkflowInput, R],
488
+ ]
489
+ ):
490
+ """
491
+ A decorator to transform a function into a standalone Hatchet _durable_ task that runs as part of a workflow.
492
+
493
+ :param name: The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator.
494
+ :type name: str
495
+
496
+ :param description: An optional description for the task. Default: None
497
+ :type description: str | None
498
+
499
+ :param input_validator: A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`.
500
+ :type input_validator: Type[BaseModel]
501
+
502
+ :param on_events: A list of event triggers for the task - events which cause the task to be run. Defaults to an empty list.
503
+ :type on_events: list[str]
504
+
505
+ :param on_crons: A list of cron triggers for the task. Defaults to an empty list.
506
+ :type on_crons: list[str]
507
+
508
+ :param version: A version for the task. Default: None
509
+ :type version: str | None
510
+
511
+ :param sticky: A sticky strategy for the task. Default: None
512
+ :type sticky: StickyStrategy | None
513
+
514
+ :param default_priority: The priority of the task. Higher values will cause this task to have priority in scheduling. Default: 1
515
+ :type default_priority: int
516
+
517
+ :param concurrency: A concurrency object controlling the concurrency settings for this task.
518
+ :type concurrency: ConcurrencyExpression | None
519
+
520
+ :param schedule_timeout: The maximum time allowed for scheduling the task. Default: DEFAULT_SCHEDULE_TIMEOUT
521
+ :type schedule_timeout: Duration
522
+
523
+ :param execution_timeout: The maximum time allowed for executing the task. Default: DEFAULT_EXECUTION_TIMEOUT
524
+ :type execution_timeout: Duration
525
+
526
+ :param retries: The number of times to retry the task before failing. Default: 0
527
+ :type retries: int
528
+
529
+ :param rate_limits: A list of rate limit configurations for the task. Defaults to an empty list.
530
+ :type rate_limits: list[RateLimit]
531
+
532
+ :param desired_worker_labels: A dictionary of desired worker labels that determine to which worker the task should be assigned.
533
+ :type desired_worker_labels: dict[str, DesiredWorkerLabel]
534
+
535
+ :param backoff_factor: The backoff factor for controlling exponential backoff in retries. Default: None
536
+ :type backoff_factor: float | None
537
+
538
+ :param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue. Default: None
539
+ :type backoff_max_seconds: int | None
540
+
541
+ :returns: A decorator which creates a `Standalone` task object.
542
+ :rtype: Callable[[Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]]
543
+ """
544
+
545
+ workflow = Workflow[TWorkflowInput](
293
546
  WorkflowConfig(
294
547
  name=name,
548
+ version=version,
549
+ description=description,
295
550
  on_events=on_events,
296
551
  on_crons=on_crons,
297
- version=version,
298
- timeout=timeout,
299
- schedule_timeout=schedule_timeout,
300
552
  sticky=sticky,
301
- default_priority=default_priority,
302
553
  concurrency=concurrency,
303
554
  input_validator=input_validator
304
555
  or cast(Type[TWorkflowInput], EmptyModel),
305
556
  ),
306
557
  self,
307
558
  )
559
+
560
+ task_wrapper = workflow.durable_task(
561
+ name=name,
562
+ schedule_timeout=schedule_timeout,
563
+ execution_timeout=execution_timeout,
564
+ parents=[],
565
+ retries=retries,
566
+ rate_limits=rate_limits,
567
+ desired_worker_labels=desired_worker_labels,
568
+ backoff_factor=backoff_factor,
569
+ backoff_max_seconds=backoff_max_seconds,
570
+ concurrency=[concurrency] if concurrency else [],
571
+ )
572
+
573
+ def inner(
574
+ func: Callable[[TWorkflowInput, DurableContext], R]
575
+ ) -> Standalone[TWorkflowInput, R]:
576
+ created_task = task_wrapper(func)
577
+
578
+ return Standalone[TWorkflowInput, R](
579
+ workflow=workflow,
580
+ task=created_task,
581
+ )
582
+
583
+ return inner