hatchet-sdk 1.20.2__py3-none-any.whl → 1.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

@@ -4,6 +4,8 @@ from datetime import timedelta
4
4
  from typing import TYPE_CHECKING, Any, cast
5
5
  from warnings import warn
6
6
 
7
+ from pydantic import TypeAdapter
8
+
7
9
  from hatchet_sdk.clients.admin import AdminClient
8
10
  from hatchet_sdk.clients.dispatcher.dispatcher import ( # type: ignore[attr-defined]
9
11
  Action,
@@ -25,7 +27,13 @@ from hatchet_sdk.exceptions import TaskRunError
25
27
  from hatchet_sdk.features.runs import RunsClient
26
28
  from hatchet_sdk.logger import logger
27
29
  from hatchet_sdk.utils.timedelta_to_expression import Duration, timedelta_to_expr
28
- from hatchet_sdk.utils.typing import JSONSerializableMapping, LogLevel
30
+ from hatchet_sdk.utils.typing import (
31
+ JSONSerializableMapping,
32
+ LogLevel,
33
+ classify_output_validator,
34
+ is_basemodel_validator,
35
+ is_dataclass_validator,
36
+ )
29
37
  from hatchet_sdk.worker.runner.utils.capture_logs import AsyncLogSender, LogRecord
30
38
 
31
39
  if TYPE_CHECKING:
@@ -106,7 +114,21 @@ class Context:
106
114
  raise ValueError(f"Step output for '{task.name}' not found") from e
107
115
 
108
116
  if parent_step_data and (v := task.validators.step_output):
109
- return cast(R, v.model_validate(parent_step_data))
117
+ validator = classify_output_validator(v)
118
+
119
+ if is_dataclass_validator(validator):
120
+ return cast(
121
+ R,
122
+ TypeAdapter(validator.validator_type).validate_python(
123
+ parent_step_data
124
+ ),
125
+ )
126
+
127
+ if is_basemodel_validator(validator):
128
+ return cast(
129
+ R,
130
+ validator.validator_type.model_validate(parent_step_data),
131
+ )
110
132
 
111
133
  return parent_step_data
112
134
 
hatchet_sdk/exceptions.py CHANGED
@@ -167,3 +167,7 @@ class LoopAlreadyRunningError(Exception):
167
167
 
168
168
  class IllegalTaskOutputError(Exception):
169
169
  pass
170
+
171
+
172
+ class LifespanSetupError(Exception):
173
+ pass
hatchet_sdk/hatchet.py CHANGED
@@ -4,6 +4,8 @@ from collections.abc import Callable
4
4
  from datetime import timedelta
5
5
  from typing import Any, Concatenate, ParamSpec, cast, overload
6
6
 
7
+ from pydantic import BaseModel
8
+
7
9
  from hatchet_sdk import Context, DurableContext
8
10
  from hatchet_sdk.client import Client
9
11
  from hatchet_sdk.clients.dispatcher.dispatcher import DispatcherClient
@@ -36,7 +38,7 @@ from hatchet_sdk.runnables.types import (
36
38
  )
37
39
  from hatchet_sdk.runnables.workflow import BaseWorkflow, Standalone, Workflow
38
40
  from hatchet_sdk.utils.timedelta_to_expression import Duration
39
- from hatchet_sdk.utils.typing import CoroutineLike
41
+ from hatchet_sdk.utils.typing import CoroutineLike, DataclassInstance
40
42
  from hatchet_sdk.worker.worker import LifespanFn, Worker
41
43
 
42
44
  P = ParamSpec("P")
@@ -302,8 +304,10 @@ class Hatchet:
302
304
  on_crons=on_crons or [],
303
305
  sticky=sticky,
304
306
  concurrency=concurrency,
305
- input_validator=input_validator
306
- or cast(type[TWorkflowInput], EmptyModel),
307
+ input_validator=cast(
308
+ type[BaseModel] | type[DataclassInstance],
309
+ input_validator or EmptyModel,
310
+ ),
307
311
  task_defaults=task_defaults,
308
312
  default_priority=default_priority,
309
313
  default_filters=default_filters or [],
@@ -449,8 +453,10 @@ class Hatchet:
449
453
  on_crons=on_crons or [],
450
454
  sticky=sticky,
451
455
  default_priority=default_priority,
452
- input_validator=input_validator
453
- or cast(type[TWorkflowInput], EmptyModel),
456
+ input_validator=cast(
457
+ type[BaseModel] | type[DataclassInstance],
458
+ input_validator or EmptyModel,
459
+ ),
454
460
  default_filters=default_filters or [],
455
461
  ),
456
462
  self,
@@ -633,8 +639,10 @@ class Hatchet:
633
639
  on_events=on_events or [],
634
640
  on_crons=on_crons or [],
635
641
  sticky=sticky,
636
- input_validator=input_validator
637
- or cast(type[TWorkflowInput], EmptyModel),
642
+ input_validator=cast(
643
+ type[BaseModel] | type[DataclassInstance],
644
+ input_validator or EmptyModel,
645
+ ),
638
646
  default_priority=default_priority,
639
647
  default_filters=default_filters or [],
640
648
  ),
@@ -1,5 +1,6 @@
1
1
  import asyncio
2
2
  from collections.abc import Callable
3
+ from dataclasses import asdict, is_dataclass
3
4
  from inspect import Parameter, iscoroutinefunction, signature
4
5
  from typing import (
5
6
  TYPE_CHECKING,
@@ -37,7 +38,6 @@ from hatchet_sdk.contracts.v1.workflows_pb2 import (
37
38
  from hatchet_sdk.exceptions import InvalidDependencyError
38
39
  from hatchet_sdk.runnables.types import (
39
40
  ConcurrencyExpression,
40
- EmptyModel,
41
41
  R,
42
42
  StepType,
43
43
  TWorkflowInput,
@@ -289,11 +289,14 @@ class Task(Generic[TWorkflowInput, R]):
289
289
 
290
290
  additional_metadata = additional_metadata or {}
291
291
  parent_outputs = parent_outputs or {}
292
+ serialized_input: dict[str, Any] = {}
292
293
 
293
- if input is None:
294
- input = cast(TWorkflowInput, EmptyModel())
294
+ if is_dataclass(input):
295
+ serialized_input = asdict(input)
296
+ elif isinstance(input, BaseModel):
297
+ serialized_input = input.model_dump()
295
298
 
296
- action_payload = ActionPayload(input=input.model_dump(), parents=parent_outputs)
299
+ action_payload = ActionPayload(input=serialized_input, parents=parent_outputs)
297
300
 
298
301
  action = Action(
299
302
  tenant_id=self.workflow.client.config.tenant_id,
@@ -4,15 +4,19 @@ from collections.abc import Callable, Mapping
4
4
  from enum import Enum
5
5
  from typing import Any, ParamSpec, TypeGuard, TypeVar
6
6
 
7
- from pydantic import BaseModel, ConfigDict, Field
7
+ from pydantic import BaseModel, ConfigDict, Field, SkipValidation
8
8
 
9
9
  from hatchet_sdk.context.context import Context, DurableContext
10
10
  from hatchet_sdk.contracts.v1.workflows_pb2 import Concurrency
11
11
  from hatchet_sdk.contracts.v1.workflows_pb2 import DefaultFilter as DefaultFilterProto
12
12
  from hatchet_sdk.utils.timedelta_to_expression import Duration
13
- from hatchet_sdk.utils.typing import AwaitableLike, JSONSerializableMapping
13
+ from hatchet_sdk.utils.typing import (
14
+ AwaitableLike,
15
+ DataclassInstance,
16
+ JSONSerializableMapping,
17
+ )
14
18
 
15
- ValidTaskReturnType = BaseModel | Mapping[str, Any] | None
19
+ ValidTaskReturnType = BaseModel | Mapping[str, Any] | DataclassInstance | None
16
20
 
17
21
  R = TypeVar("R", bound=ValidTaskReturnType)
18
22
  P = ParamSpec("P")
@@ -56,7 +60,7 @@ class ConcurrencyExpression(BaseModel):
56
60
  )
57
61
 
58
62
 
59
- TWorkflowInput = TypeVar("TWorkflowInput", bound=BaseModel)
63
+ TWorkflowInput = TypeVar("TWorkflowInput", bound=BaseModel | DataclassInstance)
60
64
 
61
65
 
62
66
  class TaskDefaults(BaseModel):
@@ -93,7 +97,9 @@ class WorkflowConfig(BaseModel):
93
97
  on_crons: list[str] = Field(default_factory=list)
94
98
  sticky: StickyStrategy | None = None
95
99
  concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None
96
- input_validator: type[BaseModel] = EmptyModel
100
+ input_validator: SkipValidation[type[BaseModel] | type[DataclassInstance]] = (
101
+ EmptyModel
102
+ )
97
103
  default_priority: int | None = None
98
104
 
99
105
  task_defaults: TaskDefaults = TaskDefaults()
@@ -1,5 +1,6 @@
1
1
  import asyncio
2
2
  from collections.abc import Callable
3
+ from dataclasses import asdict
3
4
  from datetime import datetime, timedelta
4
5
  from functools import cached_property
5
6
  from typing import (
@@ -16,7 +17,7 @@ from typing import (
16
17
  )
17
18
 
18
19
  from google.protobuf import timestamp_pb2
19
- from pydantic import BaseModel, model_validator
20
+ from pydantic import BaseModel, ConfigDict, SkipValidation, TypeAdapter, model_validator
20
21
 
21
22
  from hatchet_sdk.clients.admin import (
22
23
  ScheduleTriggerWorkflowOptions,
@@ -52,8 +53,12 @@ from hatchet_sdk.utils.proto_enums import convert_python_enum_to_proto
52
53
  from hatchet_sdk.utils.timedelta_to_expression import Duration
53
54
  from hatchet_sdk.utils.typing import (
54
55
  CoroutineLike,
56
+ DataclassInstance,
55
57
  JSONSerializableMapping,
56
- is_basemodel_subclass,
58
+ classify_output_validator,
59
+ is_basemodel_validator,
60
+ is_dataclass_validator,
61
+ is_no_validator,
57
62
  )
58
63
  from hatchet_sdk.workflow_run import WorkflowRunRef
59
64
 
@@ -130,7 +135,8 @@ def transform_desired_worker_label(d: DesiredWorkerLabel) -> DesiredWorkerLabels
130
135
 
131
136
 
132
137
  class TypedTriggerWorkflowRunConfig(BaseModel, Generic[TWorkflowInput]):
133
- input: TWorkflowInput
138
+ model_config = ConfigDict(arbitrary_types_allowed=True)
139
+ input: SkipValidation[TWorkflowInput]
134
140
  options: TriggerWorkflowOptions
135
141
 
136
142
 
@@ -217,10 +223,26 @@ class BaseWorkflow(Generic[TWorkflowInput]):
217
223
  )
218
224
 
219
225
  def _get_workflow_input(self, ctx: Context) -> TWorkflowInput:
220
- return cast(
221
- TWorkflowInput,
222
- self.config.input_validator.model_validate(ctx.workflow_input),
223
- )
226
+ validator = classify_output_validator(self.config.input_validator)
227
+
228
+ if is_dataclass_validator(validator):
229
+ return cast(
230
+ TWorkflowInput,
231
+ TypeAdapter(validator.validator_type).validate_python(
232
+ ctx.workflow_input
233
+ ),
234
+ )
235
+
236
+ if is_basemodel_validator(validator):
237
+ return cast(
238
+ TWorkflowInput,
239
+ validator.validator_type.model_validate(ctx.workflow_input),
240
+ )
241
+
242
+ ## impossible to reach here since the input validator has to be either a BaseModel or dataclass
243
+
244
+ self.client.config.logger.error("input validator is of an unknown type")
245
+ return cast(TWorkflowInput, EmptyModel())
224
246
 
225
247
  @property
226
248
  def input_validator(self) -> type[TWorkflowInput]:
@@ -271,11 +293,16 @@ class BaseWorkflow(Generic[TWorkflowInput]):
271
293
  if not input:
272
294
  return {}
273
295
 
274
- if isinstance(input, BaseModel):
275
- return input.model_dump(mode="json")
296
+ validator = classify_output_validator(self.config.input_validator)
297
+
298
+ if is_dataclass_validator(validator):
299
+ return asdict(cast(DataclassInstance, input))
300
+
301
+ if is_basemodel_validator(validator):
302
+ return cast(BaseModel, input).model_dump(mode="json")
276
303
 
277
304
  raise ValueError(
278
- f"Input must be a BaseModel or `None`, got {type(input)} instead."
305
+ f"Input must be a BaseModel or dataclass, got {type(input)} instead."
279
306
  )
280
307
 
281
308
  @cached_property
@@ -1203,12 +1230,26 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
1203
1230
 
1204
1231
  return_type = get_type_hints(self._task.fn).get("return")
1205
1232
 
1206
- self._output_validator = (
1207
- return_type if is_basemodel_subclass(return_type) else None
1208
- )
1233
+ self._output_validator = self.get_output_validator(return_type)
1209
1234
 
1210
1235
  self.config = self._workflow.config
1211
1236
 
1237
+ def get_output_validator(
1238
+ self, return_type: Any | None
1239
+ ) -> type[BaseModel] | type[DataclassInstance] | None:
1240
+ validator = classify_output_validator(return_type)
1241
+
1242
+ if is_basemodel_validator(validator):
1243
+ return validator.validator_type
1244
+
1245
+ if is_dataclass_validator(validator):
1246
+ return validator.validator_type
1247
+
1248
+ if is_no_validator(validator):
1249
+ return None
1250
+
1251
+ raise TypeError(f"Unhandled validator type: {validator}")
1252
+
1212
1253
  @overload
1213
1254
  def _extract_result(self, result: dict[str, Any]) -> R: ...
1214
1255
 
@@ -1223,10 +1264,21 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
1223
1264
 
1224
1265
  output = result.get(self._task.name)
1225
1266
 
1226
- if not self._output_validator:
1267
+ validator = classify_output_validator(self._output_validator)
1268
+
1269
+ if is_basemodel_validator(validator):
1270
+ return cast(R, validator.validator_type.model_validate(output))
1271
+
1272
+ if is_dataclass_validator(validator):
1273
+ return cast(
1274
+ R,
1275
+ TypeAdapter(validator.validator_type).validate_python(output),
1276
+ )
1277
+
1278
+ if is_no_validator(validator):
1227
1279
  return cast(R, output)
1228
1280
 
1229
- return cast(R, self._output_validator.model_validate(output))
1281
+ raise TypeError(f"Unhandled validator type: {validator}")
1230
1282
 
1231
1283
  def run(
1232
1284
  self,
@@ -1,9 +1,15 @@
1
1
  import sys
2
2
  from collections.abc import Awaitable, Coroutine, Generator
3
+ from dataclasses import Field as DataclassField
4
+ from dataclasses import dataclass, is_dataclass
3
5
  from enum import Enum
4
- from typing import Any, Literal, TypeAlias, TypeGuard, TypeVar
6
+ from typing import Any, ClassVar, Literal, Protocol, TypeAlias, TypeGuard, TypeVar
5
7
 
6
- from pydantic import BaseModel
8
+ from pydantic import BaseModel, SkipValidation
9
+
10
+
11
+ class DataclassInstance(Protocol):
12
+ __dataclass_fields__: ClassVar[dict[str, DataclassField[Any]]]
7
13
 
8
14
 
9
15
  def is_basemodel_subclass(model: Any) -> TypeGuard[type[BaseModel]]:
@@ -13,9 +19,55 @@ def is_basemodel_subclass(model: Any) -> TypeGuard[type[BaseModel]]:
13
19
  return False
14
20
 
15
21
 
22
+ @dataclass
23
+ class PydanticModelValidator:
24
+ validator_type: type[BaseModel]
25
+ kind: Literal["basemodel"] = "basemodel"
26
+
27
+
28
+ @dataclass
29
+ class DataclassValidator:
30
+ validator_type: type[DataclassInstance]
31
+ kind: Literal["dataclass"] = "dataclass"
32
+
33
+
34
+ @dataclass
35
+ class NoValidator:
36
+ kind: Literal["none"] = "none"
37
+
38
+
39
+ OutputValidator = PydanticModelValidator | DataclassValidator | NoValidator
40
+
41
+
42
+ def is_basemodel_validator(
43
+ validator: OutputValidator,
44
+ ) -> TypeGuard[PydanticModelValidator]:
45
+ return validator.kind == "basemodel"
46
+
47
+
48
+ def is_dataclass_validator(validator: OutputValidator) -> TypeGuard[DataclassValidator]:
49
+ return validator.kind == "dataclass"
50
+
51
+
52
+ def is_no_validator(validator: OutputValidator) -> TypeGuard[NoValidator]:
53
+ return validator.kind == "none"
54
+
55
+
56
+ def classify_output_validator(return_type: Any | None) -> OutputValidator:
57
+ if is_basemodel_subclass(return_type):
58
+ return PydanticModelValidator(validator_type=return_type)
59
+
60
+ if is_dataclass(return_type) and isinstance(return_type, type):
61
+ return DataclassValidator(validator_type=return_type)
62
+
63
+ return NoValidator()
64
+
65
+
16
66
  class TaskIOValidator(BaseModel):
17
- workflow_input: type[BaseModel] | None = None
18
- step_output: type[BaseModel] | None = None
67
+ workflow_input: SkipValidation[type[BaseModel] | type[DataclassInstance] | None] = (
68
+ None
69
+ )
70
+ step_output: SkipValidation[type[BaseModel] | type[DataclassInstance] | None] = None
19
71
 
20
72
 
21
73
  JSONSerializableMapping = dict[str, Any]
@@ -4,6 +4,7 @@ import functools
4
4
  import json
5
5
  from collections.abc import Callable
6
6
  from concurrent.futures import ThreadPoolExecutor
7
+ from dataclasses import asdict, is_dataclass
7
8
  from enum import Enum
8
9
  from multiprocessing import Queue
9
10
  from textwrap import dedent
@@ -48,6 +49,7 @@ from hatchet_sdk.runnables.contextvars import (
48
49
  from hatchet_sdk.runnables.task import Task
49
50
  from hatchet_sdk.runnables.types import R, TWorkflowInput
50
51
  from hatchet_sdk.utils.serde import remove_null_unicode_character
52
+ from hatchet_sdk.utils.typing import DataclassInstance
51
53
  from hatchet_sdk.worker.action_listener_process import ActionEvent
52
54
  from hatchet_sdk.worker.runner.utils.capture_logs import (
53
55
  AsyncLogSender,
@@ -479,10 +481,12 @@ class Runner:
479
481
 
480
482
  if isinstance(output, BaseModel):
481
483
  output = output.model_dump(mode="json")
484
+ elif is_dataclass(output):
485
+ output = asdict(cast(DataclassInstance, output))
482
486
 
483
487
  if not isinstance(output, dict):
484
488
  raise IllegalTaskOutputError(
485
- f"Tasks must return either a dictionary or a Pydantic BaseModel which can be serialized to a JSON object. Got object of type {type(output)} instead."
489
+ f"Tasks must return either a dictionary, a Pydantic BaseModel, or a dataclass which can be serialized to a JSON object. Got object of type {type(output)} instead."
486
490
  )
487
491
 
488
492
  if output is None:
@@ -24,7 +24,7 @@ from pydantic import BaseModel
24
24
  from hatchet_sdk.client import Client
25
25
  from hatchet_sdk.config import ClientConfig
26
26
  from hatchet_sdk.contracts.v1.workflows_pb2 import CreateWorkflowVersionRequest
27
- from hatchet_sdk.exceptions import LoopAlreadyRunningError
27
+ from hatchet_sdk.exceptions import LifespanSetupError, LoopAlreadyRunningError
28
28
  from hatchet_sdk.logger import logger
29
29
  from hatchet_sdk.runnables.action import Action
30
30
  from hatchet_sdk.runnables.contextvars import task_count
@@ -273,12 +273,18 @@ class Worker:
273
273
  "no actions registered, register workflows or actions before starting worker"
274
274
  )
275
275
 
276
- if self.config.healthcheck.enabled:
277
- await self._start_health_server()
278
-
279
276
  lifespan_context = None
280
277
  if self.lifespan:
281
- lifespan_context = await self._setup_lifespan()
278
+ try:
279
+ lifespan_context = await self._setup_lifespan()
280
+ except LifespanSetupError as e:
281
+ logger.exception("lifespan setup failed")
282
+ if self.loop:
283
+ self.loop.stop()
284
+ raise e
285
+
286
+ if self.config.healthcheck.enabled:
287
+ await self._start_health_server()
282
288
 
283
289
  if self.has_any_non_durable:
284
290
  self.action_listener_process = self._start_action_listener(is_durable=False)
@@ -328,8 +334,8 @@ class Worker:
328
334
 
329
335
  self.lifespan_stack = AsyncExitStack()
330
336
 
331
- lifespan_gen = self.lifespan()
332
337
  try:
338
+ lifespan_gen = self.lifespan()
333
339
  context = await anext(lifespan_gen)
334
340
  await self.lifespan_stack.enter_async_context(
335
341
  _create_async_context_manager(lifespan_gen)
@@ -337,10 +343,16 @@ class Worker:
337
343
  return context
338
344
  except StopAsyncIteration:
339
345
  return None
346
+ except Exception as e:
347
+ raise LifespanSetupError("An error occurred during lifespan setup") from e
340
348
 
341
349
  async def _cleanup_lifespan(self) -> None:
342
- if self.lifespan_stack is not None:
343
- await self.lifespan_stack.aclose()
350
+ try:
351
+ if self.lifespan_stack is not None:
352
+ await self.lifespan_stack.aclose()
353
+ except Exception as e:
354
+ logger.exception("error during lifespan cleanup")
355
+ raise LifespanSetupError("An error occurred during lifespan cleanup") from e
344
356
 
345
357
  def _start_action_listener(
346
358
  self, is_durable: bool
@@ -472,7 +484,10 @@ class Worker:
472
484
  ):
473
485
  self.durable_action_listener_process.kill()
474
486
 
475
- await self._cleanup_lifespan()
487
+ try:
488
+ await self._cleanup_lifespan()
489
+ except LifespanSetupError:
490
+ logger.exception("lifespan cleanup failed")
476
491
 
477
492
  await self._close()
478
493
  if self.loop and self.owned_loop:
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hatchet-sdk
3
- Version: 1.20.2
4
- Summary:
3
+ Version: 1.21.0
4
+ Summary: This is the official Python SDK for Hatchet, a distributed, fault-tolerant task queue. The SDK allows you to easily integrate Hatchet's task scheduling and workflow orchestration capabilities into your Python applications.
5
5
  License: MIT
6
6
  Author: Alexander Belanger
7
7
  Author-email: alexander@hatchet.run
@@ -266,7 +266,7 @@ hatchet_sdk/conditions.py,sha256=CnhpkXgVXM3wc0kAX8KZQA6tp8NFAbdzAN2xFbw7Hb0,452
266
266
  hatchet_sdk/config.py,sha256=QVHFVPmqNxc7PFb2Mvll1BGDEmIo9h587N44W7KoYxE,5349
267
267
  hatchet_sdk/connection.py,sha256=XCBY9-UxaN3blakgZ59AhDpjb1ilLOOlmNNM6QaDtMM,2961
268
268
  hatchet_sdk/context/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
269
- hatchet_sdk/context/context.py,sha256=KE-GsaosBar8USXSj1lkWHdsZXfXFf6tI9RwGTOF-aM,16278
269
+ hatchet_sdk/context/context.py,sha256=8RivrpS0h4BHSXvscjFj933zHNk8OUBDnp1j3CagOu0,16862
270
270
  hatchet_sdk/context/worker_context.py,sha256=3lGkOYmDixeuSmqxXbsYav2gErcjP8cDa2m0t0iomjI,884
271
271
  hatchet_sdk/contracts/dispatcher_pb2.py,sha256=W9aGh-wctZhLjUXUdeQTxH4qArsw6D0kIAWM9SVCX5o,14786
272
272
  hatchet_sdk/contracts/dispatcher_pb2.pyi,sha256=9Qoz88G-btdlTuxvk4knqfnYdcIXy3oR9DTh6MwIdP4,18923
@@ -286,7 +286,7 @@ hatchet_sdk/contracts/v1/workflows_pb2_grpc.py,sha256=XytYpV2kJQZT8iAs14z4SWsv-9
286
286
  hatchet_sdk/contracts/workflows_pb2.py,sha256=daEsUwZnlDQ5GGLJ8WHgLdI1Tgr3lBXxGV1mJ6go0nE,11812
287
287
  hatchet_sdk/contracts/workflows_pb2.pyi,sha256=WJ3b45pWvoNmmWTWjBJt61IiAoVn61F62AG5OrRsnd8,15538
288
288
  hatchet_sdk/contracts/workflows_pb2_grpc.py,sha256=2V8E72DlJx5qlH2yiQpVCu5cQbKUba5X7T1yNrQDF_s,10819
289
- hatchet_sdk/exceptions.py,sha256=4NC_3CgJhZVF4RYJ6zAb2i4mGqFUL4TiKGSTz99bV-w,4656
289
+ hatchet_sdk/exceptions.py,sha256=jROkq-ZJZteXgK30iqKwtDibqeVqoUG04c_7L9Y2kP4,4704
290
290
  hatchet_sdk/features/cel.py,sha256=Uefvm2Du3SJCHiHsp12-URPxXJLe40uv0wK7guFucsE,4002
291
291
  hatchet_sdk/features/cron.py,sha256=k6Y-JJBPaf2Dtx-fwvNA2j7lTzHLBZpwVMA_u-p6Lvw,9723
292
292
  hatchet_sdk/features/filters.py,sha256=n6PPeRiqd5SOFlcx8V2strUaCGma9JPRAOLx44XpC0o,6443
@@ -299,7 +299,7 @@ hatchet_sdk/features/stubs.py,sha256=5NF43cgZKzh7qzYv_lLae4Xkh_zrz2wMj8M_OoTAAF8
299
299
  hatchet_sdk/features/tenant.py,sha256=xkhh5mRKCWbunk_S1iBmGR-DYR-F4mjxk8jLyYUyzNE,886
300
300
  hatchet_sdk/features/workers.py,sha256=DVdno28RmtlfhMJUkaPcOMHNKXCPV0RFrXtLqV6zWyE,2600
301
301
  hatchet_sdk/features/workflows.py,sha256=WTt58imAFRrEEB3M5hEEIBwNtrzdWbITFpgtsIqJNSM,4770
302
- hatchet_sdk/hatchet.py,sha256=8p5fjuI7VFlbYths0PyMDMfihcGSMmM4cBAjLm5JZHc,25702
302
+ hatchet_sdk/hatchet.py,sha256=s-R9RQdFz-P8ABnotkcY_cM8XPMzAcFnPOZPKHX_Wlc,25961
303
303
  hatchet_sdk/labels.py,sha256=nATgxWE3lFxRTnfISEpoIRLGbMfAZsHF4lZTuG4Mfic,182
304
304
  hatchet_sdk/logger.py,sha256=5uOr52T4mImSQm1QvWT8HvZFK5WfPNh3Y1cBQZRFgUQ,333
305
305
  hatchet_sdk/metadata.py,sha256=XkRbhnghJJGCdVvF-uzyGBcNaTqpeQ3uiQvNNP1wyBc,107
@@ -308,9 +308,9 @@ hatchet_sdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
308
308
  hatchet_sdk/rate_limit.py,sha256=ptFvHJU9rCzxfcITZEnRkKtJM-SY12WP84FzBwCKAPE,3277
309
309
  hatchet_sdk/runnables/action.py,sha256=B9fAKmpseENVjwqL-quHbBmoqbvPgwnypsFfFlDmyeY,4146
310
310
  hatchet_sdk/runnables/contextvars.py,sha256=jHrrewUlFPAT9f2u3VCsuSlDBtBoagEUtUzJOSmm4yk,1118
311
- hatchet_sdk/runnables/task.py,sha256=tNXsUFuua1_DYNeJHodwgOtm_4uX4-5dOp0apGq5h2g,15968
312
- hatchet_sdk/runnables/types.py,sha256=M23xSMTBPl12CXCCXZ0wbnqZ_sePB6CJKtOdipiNDlg,4362
313
- hatchet_sdk/runnables/workflow.py,sha256=-oz0q76N-NAi3Xon1VXkzDvZe0ZjbXR_O-3sTCGfokk,58876
311
+ hatchet_sdk/runnables/task.py,sha256=iZjSiHRikh59xzicLibcEYaGNDS3oI8An3f0xhoWw74,16130
312
+ hatchet_sdk/runnables/types.py,sha256=XHLHKAK_4dQlCc-M5nj83LCuAEjtRUrcUJH9P1L-5UM,4512
313
+ hatchet_sdk/runnables/workflow.py,sha256=S_QHwLSVbd_AD9g2ZL9zJWRfwOGeVpDLupsuB99oJl0,60782
314
314
  hatchet_sdk/token.py,sha256=KjIiInwG5Kqd_FO4BSW1x_5Uc7PFbnzIVJqr50-ZldE,779
315
315
  hatchet_sdk/utils/aio.py,sha256=cu1rD_UZkShtfzi7iXMYwBBaCRdxJQTdUC0_mf8nU2E,499
316
316
  hatchet_sdk/utils/backoff.py,sha256=6B5Rb5nLKw_TqqgpJMYjIBV1PTTtbOMRZCveisVhg_I,353
@@ -320,15 +320,15 @@ hatchet_sdk/utils/opentelemetry.py,sha256=Ei_Xbb175O6hwM657uKES9MiHcJa0dyxjA5mzr
320
320
  hatchet_sdk/utils/proto_enums.py,sha256=v2gp_ZmIhPxURVXwz5lscllXwZXDl5XGXeL6gezw3o0,1241
321
321
  hatchet_sdk/utils/serde.py,sha256=5edZsFddc5KjfbBjHVizPKW6PGgzM5guaLQ5FAFrog8,1769
322
322
  hatchet_sdk/utils/timedelta_to_expression.py,sha256=YujnBnGn7lxtkUdKIeqmOiN_ZCGBpRPjCCSzcD3jxzA,644
323
- hatchet_sdk/utils/typing.py,sha256=zyRsfF-HO_aVhNx_vun-BRCbMWYDBps8aV0NczGUcho,1534
323
+ hatchet_sdk/utils/typing.py,sha256=oHJa8D_-pMWb4OWIevy03hB90woyzRz7Rhs9iNMox7o,2998
324
324
  hatchet_sdk/worker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
325
325
  hatchet_sdk/worker/action_listener_process.py,sha256=B-geWWFL3fmCmONaSydOBYpc5T_ctpSzNog9NNPddqE,11528
326
326
  hatchet_sdk/worker/runner/run_loop_manager.py,sha256=BcdfxSvZdrxbeTZSUASwCTMKJe6pwLorHVKPTprkM2k,4176
327
- hatchet_sdk/worker/runner/runner.py,sha256=B546JN14g7RtRe7qeXs2cSgfosO3bJa8AkZ1a2NLe1k,19170
327
+ hatchet_sdk/worker/runner/runner.py,sha256=sqCI99oTs8nfdiQGVzc9Rwik4lJOprN5doPSBnPIkSU,19380
328
328
  hatchet_sdk/worker/runner/utils/capture_logs.py,sha256=hNELuNHS0HmoMZJ7F7yIjZuahPEx9cOx9JZro618W74,4675
329
- hatchet_sdk/worker/worker.py,sha256=BP8A70hQxNvJ8VG8Osb5NTE4mwdcmEkiLwdtwkkNbuE,16868
329
+ hatchet_sdk/worker/worker.py,sha256=bYr9wwl_YtmGlGMzBnc8H4E7peOTftJkUqf5HLmTLWs,17535
330
330
  hatchet_sdk/workflow_run.py,sha256=KcylcqRwKADtnzOTjoiVr1vdr7qTZFtDeD5aRS6A4Y8,2823
331
- hatchet_sdk-1.20.2.dist-info/METADATA,sha256=XmaVFuL278B4_a8Dn2U49u2EU5Jngd7AeSKoRLV-FPk,3343
332
- hatchet_sdk-1.20.2.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
333
- hatchet_sdk-1.20.2.dist-info/entry_points.txt,sha256=Un_76pcLse-ZGBlwebhQpnTPyQrripeHW8J7qmEpGOk,1400
334
- hatchet_sdk-1.20.2.dist-info/RECORD,,
331
+ hatchet_sdk-1.21.0.dist-info/METADATA,sha256=6vsYh3ygnkf9CAuxTu0UCLyv5-JKP-T_kR__J9BlWZs,3565
332
+ hatchet_sdk-1.21.0.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
333
+ hatchet_sdk-1.21.0.dist-info/entry_points.txt,sha256=Un_76pcLse-ZGBlwebhQpnTPyQrripeHW8J7qmEpGOk,1400
334
+ hatchet_sdk-1.21.0.dist-info/RECORD,,