hatchet-sdk 1.8.1__py3-none-any.whl → 1.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

hatchet_sdk/config.py CHANGED
@@ -64,13 +64,20 @@ class ClientConfig(BaseSettings):
64
64
  )
65
65
 
66
66
  worker_preset_labels: dict[str, str] = Field(default_factory=dict)
67
+
67
68
  enable_force_kill_sync_threads: bool = False
69
+ enable_thread_pool_monitoring: bool = False
68
70
 
69
71
  @model_validator(mode="after")
70
72
  def validate_token_and_tenant(self) -> "ClientConfig":
71
73
  if not self.token:
72
74
  raise ValueError("Token must be set")
73
75
 
76
+ if not self.token.startswith("ey"):
77
+ raise ValueError(
78
+ f"Token must be a valid JWT. Hint: These are the first few characters of the token provided: {self.token[:5]}"
79
+ )
80
+
74
81
  if not self.tenant_id:
75
82
  self.tenant_id = get_tenant_id_from_jwt(self.token)
76
83
 
@@ -69,7 +69,7 @@ class Context:
69
69
  def trigger_data(self) -> JSONSerializableMapping:
70
70
  return self.data.triggers
71
71
 
72
- def _task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
72
+ def task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
73
73
  from hatchet_sdk.runnables.types import R
74
74
 
75
75
  if self.was_skipped(task):
@@ -85,19 +85,6 @@ class Context:
85
85
 
86
86
  return parent_step_data
87
87
 
88
- def task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
89
- from hatchet_sdk.runnables.types import R
90
-
91
- ## If the task is async, we need to wrap its output in a coroutine
92
- ## so that the type checker behaves right
93
- async def _aio_output() -> "R":
94
- return self._task_output(task)
95
-
96
- if task.is_async_function:
97
- return cast(R, _aio_output())
98
-
99
- return self._task_output(task)
100
-
101
88
  def aio_task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
102
89
  warn(
103
90
  "`aio_task_output` is deprecated. Use `task_output` instead.",
hatchet_sdk/hatchet.py CHANGED
@@ -32,6 +32,7 @@ from hatchet_sdk.runnables.types import (
32
32
  )
33
33
  from hatchet_sdk.runnables.workflow import BaseWorkflow, Workflow
34
34
  from hatchet_sdk.utils.timedelta_to_expression import Duration
35
+ from hatchet_sdk.utils.typing import CoroutineLike
35
36
  from hatchet_sdk.worker.worker import LifespanFn, Worker
36
37
 
37
38
 
@@ -300,7 +301,10 @@ class Hatchet:
300
301
  desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
301
302
  backoff_factor: float | None = None,
302
303
  backoff_max_seconds: int | None = None,
303
- ) -> Callable[[Callable[[EmptyModel, Context], R]], Standalone[EmptyModel, R]]: ...
304
+ ) -> Callable[
305
+ [Callable[[EmptyModel, Context], R | CoroutineLike[R]]],
306
+ Standalone[EmptyModel, R],
307
+ ]: ...
304
308
 
305
309
  @overload
306
310
  def task(
@@ -323,7 +327,8 @@ class Hatchet:
323
327
  backoff_factor: float | None = None,
324
328
  backoff_max_seconds: int | None = None,
325
329
  ) -> Callable[
326
- [Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]
330
+ [Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
331
+ Standalone[TWorkflowInput, R],
327
332
  ]: ...
328
333
 
329
334
  def task(
@@ -346,9 +351,13 @@ class Hatchet:
346
351
  backoff_factor: float | None = None,
347
352
  backoff_max_seconds: int | None = None,
348
353
  ) -> (
349
- Callable[[Callable[[EmptyModel, Context], R]], Standalone[EmptyModel, R]]
354
+ Callable[
355
+ [Callable[[EmptyModel, Context], R | CoroutineLike[R]]],
356
+ Standalone[EmptyModel, R],
357
+ ]
350
358
  | Callable[
351
- [Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]
359
+ [Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
360
+ Standalone[TWorkflowInput, R],
352
361
  ]
353
362
  ):
354
363
  """
@@ -426,7 +435,7 @@ class Hatchet:
426
435
  )
427
436
 
428
437
  def inner(
429
- func: Callable[[TWorkflowInput, Context], R]
438
+ func: Callable[[TWorkflowInput, Context], R | CoroutineLike[R]],
430
439
  ) -> Standalone[TWorkflowInput, R]:
431
440
  created_task = task_wrapper(func)
432
441
 
@@ -458,7 +467,8 @@ class Hatchet:
458
467
  backoff_factor: float | None = None,
459
468
  backoff_max_seconds: int | None = None,
460
469
  ) -> Callable[
461
- [Callable[[EmptyModel, DurableContext], R]], Standalone[EmptyModel, R]
470
+ [Callable[[EmptyModel, DurableContext], R | CoroutineLike[R]]],
471
+ Standalone[EmptyModel, R],
462
472
  ]: ...
463
473
 
464
474
  @overload
@@ -482,7 +492,8 @@ class Hatchet:
482
492
  backoff_factor: float | None = None,
483
493
  backoff_max_seconds: int | None = None,
484
494
  ) -> Callable[
485
- [Callable[[TWorkflowInput, DurableContext], R]], Standalone[TWorkflowInput, R]
495
+ [Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]],
496
+ Standalone[TWorkflowInput, R],
486
497
  ]: ...
487
498
 
488
499
  def durable_task(
@@ -505,9 +516,12 @@ class Hatchet:
505
516
  backoff_factor: float | None = None,
506
517
  backoff_max_seconds: int | None = None,
507
518
  ) -> (
508
- Callable[[Callable[[EmptyModel, DurableContext], R]], Standalone[EmptyModel, R]]
519
+ Callable[
520
+ [Callable[[EmptyModel, DurableContext], R | CoroutineLike[R]]],
521
+ Standalone[EmptyModel, R],
522
+ ]
509
523
  | Callable[
510
- [Callable[[TWorkflowInput, DurableContext], R]],
524
+ [Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]],
511
525
  Standalone[TWorkflowInput, R],
512
526
  ]
513
527
  ):
@@ -579,7 +593,7 @@ class Hatchet:
579
593
  )
580
594
 
581
595
  def inner(
582
- func: Callable[[TWorkflowInput, DurableContext], R]
596
+ func: Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]],
583
597
  ) -> Standalone[TWorkflowInput, R]:
584
598
  created_task = task_wrapper(func)
585
599
 
@@ -1,14 +1,5 @@
1
1
  from datetime import timedelta
2
- from typing import (
3
- TYPE_CHECKING,
4
- Any,
5
- Awaitable,
6
- Callable,
7
- Generic,
8
- Union,
9
- cast,
10
- get_type_hints,
11
- )
2
+ from typing import TYPE_CHECKING, Any, Callable, Generic, Union, cast, get_type_hints
12
3
 
13
4
  from hatchet_sdk.context.context import Context, DurableContext
14
5
  from hatchet_sdk.contracts.v1.shared.condition_pb2 import TaskConditions
@@ -27,7 +18,12 @@ from hatchet_sdk.runnables.types import (
27
18
  is_sync_fn,
28
19
  )
29
20
  from hatchet_sdk.utils.timedelta_to_expression import Duration, timedelta_to_expr
30
- from hatchet_sdk.utils.typing import TaskIOValidator, is_basemodel_subclass
21
+ from hatchet_sdk.utils.typing import (
22
+ AwaitableLike,
23
+ CoroutineLike,
24
+ TaskIOValidator,
25
+ is_basemodel_subclass,
26
+ )
31
27
  from hatchet_sdk.waits import (
32
28
  Action,
33
29
  Condition,
@@ -45,10 +41,10 @@ class Task(Generic[TWorkflowInput, R]):
45
41
  def __init__(
46
42
  self,
47
43
  _fn: Union[
48
- Callable[[TWorkflowInput, Context], R]
49
- | Callable[[TWorkflowInput, Context], Awaitable[R]],
50
- Callable[[TWorkflowInput, DurableContext], R]
51
- | Callable[[TWorkflowInput, DurableContext], Awaitable[R]],
44
+ Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]
45
+ | Callable[[TWorkflowInput, Context], AwaitableLike[R]],
46
+ Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]
47
+ | Callable[[TWorkflowInput, DurableContext], AwaitableLike[R]],
52
48
  ],
53
49
  is_durable: bool,
54
50
  type: StepType,
@@ -1,17 +1,17 @@
1
1
  import asyncio
2
2
  from enum import Enum
3
- from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeGuard, TypeVar, Union
3
+ from typing import Any, Callable, ParamSpec, Type, TypeGuard, TypeVar, Union
4
4
 
5
- from pydantic import BaseModel, ConfigDict, Field, model_validator
5
+ from pydantic import BaseModel, ConfigDict, Field
6
6
 
7
7
  from hatchet_sdk.context.context import Context, DurableContext
8
8
  from hatchet_sdk.contracts.v1.workflows_pb2 import Concurrency
9
9
  from hatchet_sdk.utils.timedelta_to_expression import Duration
10
- from hatchet_sdk.utils.typing import JSONSerializableMapping
10
+ from hatchet_sdk.utils.typing import AwaitableLike, JSONSerializableMapping
11
11
 
12
12
  ValidTaskReturnType = Union[BaseModel, JSONSerializableMapping, None]
13
13
 
14
- R = TypeVar("R", bound=Union[ValidTaskReturnType, Awaitable[ValidTaskReturnType]])
14
+ R = TypeVar("R", bound=ValidTaskReturnType)
15
15
  P = ParamSpec("P")
16
16
 
17
17
 
@@ -26,8 +26,6 @@ class StickyStrategy(str, Enum):
26
26
 
27
27
  class ConcurrencyLimitStrategy(str, Enum):
28
28
  CANCEL_IN_PROGRESS = "CANCEL_IN_PROGRESS"
29
- DROP_NEWEST = "DROP_NEWEST"
30
- QUEUE_NEWEST = "QUEUE_NEWEST"
31
29
  GROUP_ROUND_ROBIN = "GROUP_ROUND_ROBIN"
32
30
  CANCEL_NEWEST = "CANCEL_NEWEST"
33
31
 
@@ -82,31 +80,6 @@ class WorkflowConfig(BaseModel):
82
80
 
83
81
  task_defaults: TaskDefaults = TaskDefaults()
84
82
 
85
- def _raise_for_invalid_expression(self, expr: str) -> None:
86
- if not expr.startswith("input."):
87
- return None
88
-
89
- _, field = expr.split(".", maxsplit=2)
90
-
91
- if field not in self.input_validator.model_fields.keys():
92
- raise ValueError(
93
- f"The concurrency expression provided relies on the `{field}` field, which was not present in `{self.input_validator.__name__}`."
94
- )
95
-
96
- @model_validator(mode="after")
97
- def validate_concurrency_expression(self) -> "WorkflowConfig":
98
- if not self.concurrency:
99
- return self
100
-
101
- if isinstance(self.concurrency, list):
102
- for item in self.concurrency:
103
- self._raise_for_invalid_expression(item.expression)
104
-
105
- if isinstance(self.concurrency, ConcurrencyExpression):
106
- self._raise_for_invalid_expression(self.concurrency.expression)
107
-
108
- return self
109
-
110
83
 
111
84
  class StepType(str, Enum):
112
85
  DEFAULT = "default"
@@ -114,7 +87,7 @@ class StepType(str, Enum):
114
87
  ON_SUCCESS = "on_success"
115
88
 
116
89
 
117
- AsyncFunc = Callable[[TWorkflowInput, Context], Awaitable[R]]
90
+ AsyncFunc = Callable[[TWorkflowInput, Context], AwaitableLike[R]]
118
91
  SyncFunc = Callable[[TWorkflowInput, Context], R]
119
92
  TaskFunc = Union[AsyncFunc[TWorkflowInput, R], SyncFunc[TWorkflowInput, R]]
120
93
 
@@ -131,7 +104,7 @@ def is_sync_fn(
131
104
  return not asyncio.iscoroutinefunction(fn)
132
105
 
133
106
 
134
- DurableAsyncFunc = Callable[[TWorkflowInput, DurableContext], Awaitable[R]]
107
+ DurableAsyncFunc = Callable[[TWorkflowInput, DurableContext], AwaitableLike[R]]
135
108
  DurableSyncFunc = Callable[[TWorkflowInput, DurableContext], R]
136
109
  DurableTaskFunc = Union[
137
110
  DurableAsyncFunc[TWorkflowInput, R], DurableSyncFunc[TWorkflowInput, R]
@@ -35,7 +35,7 @@ from hatchet_sdk.runnables.types import (
35
35
  )
36
36
  from hatchet_sdk.utils.proto_enums import convert_python_enum_to_proto
37
37
  from hatchet_sdk.utils.timedelta_to_expression import Duration
38
- from hatchet_sdk.utils.typing import JSONSerializableMapping
38
+ from hatchet_sdk.utils.typing import CoroutineLike, JSONSerializableMapping
39
39
  from hatchet_sdk.waits import Condition, OrGroup
40
40
  from hatchet_sdk.workflow_run import WorkflowRunRef
41
41
 
@@ -132,34 +132,6 @@ class BaseWorkflow(Generic[TWorkflowInput]):
132
132
  def _create_action_name(self, step: Task[TWorkflowInput, Any]) -> str:
133
133
  return self.service_name + ":" + step.name
134
134
 
135
- def _raise_for_invalid_concurrency(
136
- self, concurrency: ConcurrencyExpression
137
- ) -> bool:
138
- expr = concurrency.expression
139
-
140
- if not expr.startswith("input."):
141
- return True
142
-
143
- _, field = expr.split(".", maxsplit=2)
144
-
145
- if field not in self.config.input_validator.model_fields.keys():
146
- raise ValueError(
147
- f"The concurrency expression provided relies on the `{field}` field, which was not present in `{self.config.input_validator.__name__}`."
148
- )
149
-
150
- return True
151
-
152
- def _validate_priority(self, default_priority: int | None) -> int | None:
153
- validated_priority = (
154
- max(1, min(3, default_priority)) if default_priority else None
155
- )
156
- if validated_priority != default_priority:
157
- logger.warning(
158
- "Warning: Default Priority Must be between 1 and 3 -- inclusively. Adjusted to be within the range."
159
- )
160
-
161
- return validated_priority
162
-
163
135
  def _is_leaf_task(self, task: Task[TWorkflowInput, Any]) -> bool:
164
136
  return not any(task in t.parents for t in self.tasks if task != t)
165
137
 
@@ -573,10 +545,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
573
545
  def _parse_task_name(
574
546
  self,
575
547
  name: str | None,
576
- func: (
577
- Callable[[TWorkflowInput, Context], R]
578
- | Callable[[TWorkflowInput, DurableContext], R]
579
- ),
548
+ func: Callable[..., Any],
580
549
  ) -> str:
581
550
  non_null_name = name or func.__name__
582
551
 
@@ -597,7 +566,10 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
597
566
  wait_for: list[Condition | OrGroup] = [],
598
567
  skip_if: list[Condition | OrGroup] = [],
599
568
  cancel_if: list[Condition | OrGroup] = [],
600
- ) -> Callable[[Callable[[TWorkflowInput, Context], R]], Task[TWorkflowInput, R]]:
569
+ ) -> Callable[
570
+ [Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
571
+ Task[TWorkflowInput, R],
572
+ ]:
601
573
  """
602
574
  A decorator to transform a function into a Hatchet task that runs as part of a workflow.
603
575
 
@@ -640,7 +612,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
640
612
  )
641
613
 
642
614
  def inner(
643
- func: Callable[[TWorkflowInput, Context], R]
615
+ func: Callable[[TWorkflowInput, Context], R | CoroutineLike[R]],
644
616
  ) -> Task[TWorkflowInput, R]:
645
617
  task = Task(
646
618
  _fn=func,
@@ -687,7 +659,8 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
687
659
  skip_if: list[Condition | OrGroup] = [],
688
660
  cancel_if: list[Condition | OrGroup] = [],
689
661
  ) -> Callable[
690
- [Callable[[TWorkflowInput, DurableContext], R]], Task[TWorkflowInput, R]
662
+ [Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]],
663
+ Task[TWorkflowInput, R],
691
664
  ]:
692
665
  """
693
666
  A decorator to transform a function into a durable Hatchet task that runs as part of a workflow.
@@ -735,7 +708,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
735
708
  )
736
709
 
737
710
  def inner(
738
- func: Callable[[TWorkflowInput, DurableContext], R]
711
+ func: Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]],
739
712
  ) -> Task[TWorkflowInput, R]:
740
713
  task = Task(
741
714
  _fn=func,
@@ -776,7 +749,10 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
776
749
  backoff_factor: float | None = None,
777
750
  backoff_max_seconds: int | None = None,
778
751
  concurrency: list[ConcurrencyExpression] = [],
779
- ) -> Callable[[Callable[[TWorkflowInput, Context], R]], Task[TWorkflowInput, R]]:
752
+ ) -> Callable[
753
+ [Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
754
+ Task[TWorkflowInput, R],
755
+ ]:
780
756
  """
781
757
  A decorator to transform a function into a Hatchet on-failure task that runs as the last step in a workflow that had at least one task fail.
782
758
 
@@ -800,7 +776,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
800
776
  """
801
777
 
802
778
  def inner(
803
- func: Callable[[TWorkflowInput, Context], R]
779
+ func: Callable[[TWorkflowInput, Context], R | CoroutineLike[R]],
804
780
  ) -> Task[TWorkflowInput, R]:
805
781
  task = Task(
806
782
  is_durable=False,
@@ -836,7 +812,10 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
836
812
  backoff_factor: float | None = None,
837
813
  backoff_max_seconds: int | None = None,
838
814
  concurrency: list[ConcurrencyExpression] = [],
839
- ) -> Callable[[Callable[[TWorkflowInput, Context], R]], Task[TWorkflowInput, R]]:
815
+ ) -> Callable[
816
+ [Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
817
+ Task[TWorkflowInput, R],
818
+ ]:
840
819
  """
841
820
  A decorator to transform a function into a Hatchet on-success task that runs as the last step in a workflow that had all upstream tasks succeed.
842
821
 
@@ -860,7 +839,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
860
839
  """
861
840
 
862
841
  def inner(
863
- func: Callable[[TWorkflowInput, Context], R]
842
+ func: Callable[[TWorkflowInput, Context], R | CoroutineLike[R]],
864
843
  ) -> Task[TWorkflowInput, R]:
865
844
  task = Task(
866
845
  is_durable=False,
@@ -1,4 +1,15 @@
1
- from typing import Any, Mapping, Type, TypeGuard
1
+ import sys
2
+ from typing import (
3
+ Any,
4
+ Awaitable,
5
+ Coroutine,
6
+ Generator,
7
+ Mapping,
8
+ Type,
9
+ TypeAlias,
10
+ TypeGuard,
11
+ TypeVar,
12
+ )
2
13
 
3
14
  from pydantic import BaseModel
4
15
 
@@ -16,3 +27,13 @@ class TaskIOValidator(BaseModel):
16
27
 
17
28
 
18
29
  JSONSerializableMapping = Mapping[str, Any]
30
+
31
+
32
+ _T_co = TypeVar("_T_co", covariant=True)
33
+
34
+ if sys.version_info >= (3, 12):
35
+ AwaitableLike: TypeAlias = Awaitable[_T_co] # noqa: Y047
36
+ CoroutineLike: TypeAlias = Coroutine[Any, Any, _T_co] # noqa: Y047
37
+ else:
38
+ AwaitableLike: TypeAlias = Generator[Any, None, _T_co] | Awaitable[_T_co]
39
+ CoroutineLike: TypeAlias = Generator[Any, None, _T_co] | Coroutine[Any, Any, _T_co]
@@ -291,6 +291,9 @@ class WorkerActionListenerProcess:
291
291
  self.event_queue.put(STOP_LOOP)
292
292
 
293
293
  async def exit_gracefully(self) -> None:
294
+ if self.listener:
295
+ self.listener.stop_signal = True
296
+
294
297
  await self.pause_task_assignment()
295
298
 
296
299
  if self.killing:
@@ -105,6 +105,9 @@ class Runner:
105
105
 
106
106
  self.lifespan_context = lifespan_context
107
107
 
108
+ if self.config.enable_thread_pool_monitoring:
109
+ self.start_background_monitoring()
110
+
108
111
  def create_workflow_run_url(self, action: Action) -> str:
109
112
  return f"{self.config.server_url}/workflow-runs/{action.workflow_run_id}?tenant={action.tenant_id}"
110
113
 
@@ -270,6 +273,47 @@ class Runner:
270
273
  finally:
271
274
  self.cleanup_run_id(action.key)
272
275
 
276
+ async def log_thread_pool_status(self) -> None:
277
+ thread_pool_details = {
278
+ "max_workers": self.slots,
279
+ "total_threads": len(self.thread_pool._threads),
280
+ "idle_threads": self.thread_pool._idle_semaphore._value,
281
+ "active_threads": len(self.threads),
282
+ "pending_tasks": len(self.tasks),
283
+ "queue_size": self.thread_pool._work_queue.qsize(),
284
+ "threads_alive": sum(1 for t in self.thread_pool._threads if t.is_alive()),
285
+ "threads_daemon": sum(1 for t in self.thread_pool._threads if t.daemon),
286
+ }
287
+
288
+ logger.warning("Thread pool detailed status %s", thread_pool_details)
289
+
290
+ async def _start_monitoring(self) -> None:
291
+ logger.debug("Thread pool monitoring started")
292
+ try:
293
+ while True:
294
+ await self.log_thread_pool_status()
295
+
296
+ for key in self.threads.keys():
297
+ if key not in self.tasks:
298
+ logger.debug(f"Potential zombie thread found for key {key}")
299
+
300
+ for key, task in self.tasks.items():
301
+ if task.done() and key in self.threads:
302
+ logger.debug(
303
+ f"Task is done but thread still exists for key {key}"
304
+ )
305
+
306
+ await asyncio.sleep(60)
307
+ except asyncio.CancelledError:
308
+ logger.warning("Thread pool monitoring task cancelled")
309
+ except Exception as e:
310
+ logger.exception(f"Error in thread pool monitoring: {e}")
311
+
312
+ def start_background_monitoring(self) -> None:
313
+ loop = asyncio.get_event_loop()
314
+ self.monitoring_task = loop.create_task(self._start_monitoring())
315
+ logger.debug("Started thread pool monitoring background task")
316
+
273
317
  def cleanup_run_id(self, key: ActionKey) -> None:
274
318
  if key in self.tasks:
275
319
  del self.tasks[key]
@@ -419,23 +463,18 @@ class Runner:
419
463
  try:
420
464
  # call cancel to signal the context to stop
421
465
  if key in self.contexts:
422
- context = self.contexts.get(key)
423
-
424
- if context:
425
- context._set_cancellation_flag()
466
+ self.contexts[key]._set_cancellation_flag()
426
467
 
427
468
  await asyncio.sleep(1)
428
469
 
429
470
  if key in self.tasks:
430
- future = self.tasks.get(key)
431
-
432
- if future:
433
- future.cancel()
471
+ self.tasks[key].cancel()
434
472
 
435
473
  # check if thread is still running, if so, print a warning
436
474
  if key in self.threads:
437
- thread = self.threads.get(key)
438
- if thread and self.config.enable_force_kill_sync_threads:
475
+ thread = self.threads[key]
476
+
477
+ if self.config.enable_force_kill_sync_threads:
439
478
  self.force_kill_thread(thread)
440
479
  await asyncio.sleep(1)
441
480
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hatchet-sdk
3
- Version: 1.8.1
3
+ Version: 1.9.0
4
4
  Summary:
5
5
  License: MIT
6
6
  Author: Alexander Belanger
@@ -220,10 +220,10 @@ hatchet_sdk/clients/rest/models/workflow_workers_count.py,sha256=qhzqfvjjIDyARki
220
220
  hatchet_sdk/clients/rest/rest.py,sha256=zZHTzgl-NBdcK6XhG23m_s9RKRONGPPItzGe407s7GA,9262
221
221
  hatchet_sdk/clients/rest/tenacity_utils.py,sha256=n6QvwuGwinLQpiWNU5GxrDNhFBE8_wZdg3WNur21rJ0,1055
222
222
  hatchet_sdk/clients/v1/api_client.py,sha256=mJQUZ3cOxlFJiwWKK5F8jBxcpNZ7A2292HucrBqurbg,1205
223
- hatchet_sdk/config.py,sha256=jJA76BOvVdfOQHy6TKclAvr2qyblcM-Pz5J-hVAdpQ4,3588
223
+ hatchet_sdk/config.py,sha256=Jm-3ja29cpDf2MwfJAJSpL1qCLmYg_CPC29GhN-eUBY,3853
224
224
  hatchet_sdk/connection.py,sha256=B5gT5NL9BBB5-l9U_cN6pMlraQk880rEYMnqaK_dgL0,2590
225
225
  hatchet_sdk/context/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
226
- hatchet_sdk/context/context.py,sha256=c45AadcE4mm-1ahSoj2khjcdUcYZzqYBQkndxbs2ock,9519
226
+ hatchet_sdk/context/context.py,sha256=CGFqfdaEflG4_um618o8Lo_WPu0UJCJBYZLV8PDibw8,9073
227
227
  hatchet_sdk/context/worker_context.py,sha256=OVcEWvdT_Kpd0nlg61VAPUgIPSFzSLs0aSrXWj-1GX4,974
228
228
  hatchet_sdk/contracts/dispatcher_pb2.py,sha256=wO-x-NiHjMuC55RArpcvQX7QAjpSrjq1n70mFVwOD0Q,14543
229
229
  hatchet_sdk/contracts/dispatcher_pb2.pyi,sha256=iOcGfGtoyvmT58yCrQDvst_o0VPbq1-9rir-0jH_140,18559
@@ -252,7 +252,7 @@ hatchet_sdk/features/runs.py,sha256=9kmn2AM1XVFZS9P_lR-hh6SXwW42Kh73l0WUBl5VJh8,
252
252
  hatchet_sdk/features/scheduled.py,sha256=1kNR8AxN1UlabNroU9TtVbptZEXfqTVE25Gxmh2lABs,8928
253
253
  hatchet_sdk/features/workers.py,sha256=vD6j7GCttu0fm23_XmBMdE0IuX4mUbL0adgMoC8Sk_E,2571
254
254
  hatchet_sdk/features/workflows.py,sha256=PFJsGXTHVfdDBDQ9WcQXcAzr7u-dN7vUpwbZ9fH0ZD8,3976
255
- hatchet_sdk/hatchet.py,sha256=HRa__owQMeSRhtXzEfObap3ZKxUeNq2FJKOZEUVZ_CI,21795
255
+ hatchet_sdk/hatchet.py,sha256=GYiIAOXUmtVCNCOvWkIOeAGfboxCdiiwPTmd6e5Q9s8,22171
256
256
  hatchet_sdk/labels.py,sha256=nATgxWE3lFxRTnfISEpoIRLGbMfAZsHF4lZTuG4Mfic,182
257
257
  hatchet_sdk/logger.py,sha256=5uOr52T4mImSQm1QvWT8HvZFK5WfPNh3Y1cBQZRFgUQ,333
258
258
  hatchet_sdk/metadata.py,sha256=XkRbhnghJJGCdVvF-uzyGBcNaTqpeQ3uiQvNNP1wyBc,107
@@ -261,14 +261,14 @@ hatchet_sdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
261
261
  hatchet_sdk/rate_limit.py,sha256=TwbCuggiZaWpYuo4mjVLlE-z1OfQ2mRBiVvCSaG3lv4,3919
262
262
  hatchet_sdk/runnables/contextvars.py,sha256=6MDocAMmlyiRW37oQ1jyx10tAlJs-xgDjR3xPoPz05g,426
263
263
  hatchet_sdk/runnables/standalone.py,sha256=pCAIS40q9ltkK7K97ff79MzE73_k22ymZdrLdFdFwD8,15233
264
- hatchet_sdk/runnables/task.py,sha256=5VOgi413eH8Gz9_XBxFTfbfLITTpPJYwRB2ZXshysW8,7014
265
- hatchet_sdk/runnables/types.py,sha256=OBhqa6rvEaY4ypKtUpKHHyQxOXKYvxao_-Hknu5jVns,4802
266
- hatchet_sdk/runnables/workflow.py,sha256=Ucjguf3SxyJzYe7R427U76iUOLDoHQMzwOQ7g9xYuQA,39765
264
+ hatchet_sdk/runnables/task.py,sha256=AOpULMr3hqxn4W58Lh9oEvsXn_7PPB_c_sIqHRfQn5Q,7063
265
+ hatchet_sdk/runnables/types.py,sha256=5jf1c7_0QVUFh0bcXi4hIiaOdUiyhspU4LltoAFCwlM,3776
266
+ hatchet_sdk/runnables/workflow.py,sha256=6DjM8Qt4ALbxl3_3BQgxdt3FR8tqGCv_JzRIYtrQjzU,38920
267
267
  hatchet_sdk/token.py,sha256=KjIiInwG5Kqd_FO4BSW1x_5Uc7PFbnzIVJqr50-ZldE,779
268
268
  hatchet_sdk/utils/backoff.py,sha256=6B5Rb5nLKw_TqqgpJMYjIBV1PTTtbOMRZCveisVhg_I,353
269
269
  hatchet_sdk/utils/proto_enums.py,sha256=0UybwE3s7TcqmzoQSO8YnhgAKOS8WZXsyPchB8-eksw,1247
270
270
  hatchet_sdk/utils/timedelta_to_expression.py,sha256=kwuYZ51JdDdc3h9Sw4vgBFmJBMPkgbGJA4v9uO4_NGk,660
271
- hatchet_sdk/utils/typing.py,sha256=huflXWR7fvRfIFYdqQIrQmn9jtukzOWoTpW3AXGk5c0,427
271
+ hatchet_sdk/utils/typing.py,sha256=P6-Nd5K_Hk-VhEkGj6LYki_9ddw05rJtzRA56qGXHag,914
272
272
  hatchet_sdk/v0/__init__.py,sha256=YNh-0rPHS0rcphmykJ1N2NMfgvERF4oJpBtx3IH_E_M,9657
273
273
  hatchet_sdk/v0/client.py,sha256=G1RDZln9Og7tRQulogXkZw8TsVlx7f0VvmtFI_VAe6E,3495
274
274
  hatchet_sdk/v0/clients/admin.py,sha256=l6fW21p_3pROz8mVB2QOXX0Pg5poeLXcBNEm6Uids30,18071
@@ -502,13 +502,13 @@ hatchet_sdk/v0/workflow.py,sha256=d4o425efk7J3JgLIge34MW_A3pzwnwSRtwEOgIqM2pc,93
502
502
  hatchet_sdk/v0/workflow_run.py,sha256=jsEZprXshrSV7i_TtL5uoCL03D18zQ3NeJCq7mp97Dg,1752
503
503
  hatchet_sdk/waits.py,sha256=L2xZUcmrQX-pTVXWv1W8suMoYU_eA0uowpollauQmOM,3893
504
504
  hatchet_sdk/worker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
505
- hatchet_sdk/worker/action_listener_process.py,sha256=KxS7-wBpfKnsq0LNSvk-MG442Lh60iQMy3VpD1FW3mU,11703
505
+ hatchet_sdk/worker/action_listener_process.py,sha256=t6COI8KmYoYooFLMZY5KLNPQmJrIrs4luoVZxPnKN_I,11775
506
506
  hatchet_sdk/worker/runner/run_loop_manager.py,sha256=RNWKDCjR57nJ0LCoLUMi0_3pnmpqyo80mz_RaxHYGIc,3812
507
- hatchet_sdk/worker/runner/runner.py,sha256=z8ri-viK_avAfF6zgbVNBc-rztFDbxSwng3RHsof92w,17063
507
+ hatchet_sdk/worker/runner/runner.py,sha256=CdsWl0l4tFp8Yy35uLsR74jNsBs-fIHiDAJZwPSGrKg,18805
508
508
  hatchet_sdk/worker/runner/utils/capture_logs.py,sha256=nHRPSiDBqzhObM7i2X7t03OupVFnE7kQBdR2Ckgg-2w,2709
509
509
  hatchet_sdk/worker/worker.py,sha256=SfUeYYGfPDVa7Hr1Tdgrzn_A0T-e_apIzW26BhsiB70,16101
510
510
  hatchet_sdk/workflow_run.py,sha256=ZwH0HLFGFVXz6jbiqSv4w0Om2XuR52Tzzw6LH4y65jQ,2765
511
- hatchet_sdk-1.8.1.dist-info/METADATA,sha256=OGkuC1TmJprUiDEHQ9i1eDjhDS6SxMsaX7A22oApNSA,3635
512
- hatchet_sdk-1.8.1.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
513
- hatchet_sdk-1.8.1.dist-info/entry_points.txt,sha256=Un_76pcLse-ZGBlwebhQpnTPyQrripeHW8J7qmEpGOk,1400
514
- hatchet_sdk-1.8.1.dist-info/RECORD,,
511
+ hatchet_sdk-1.9.0.dist-info/METADATA,sha256=sxi4zDvyHWlKB65XhEpehELIj0J6-ugHiR9egq78fhU,3635
512
+ hatchet_sdk-1.9.0.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
513
+ hatchet_sdk-1.9.0.dist-info/entry_points.txt,sha256=Un_76pcLse-ZGBlwebhQpnTPyQrripeHW8J7qmEpGOk,1400
514
+ hatchet_sdk-1.9.0.dist-info/RECORD,,