hatchet-sdk 1.12.3__py3-none-any.whl → 1.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

Files changed (83) hide show
  1. hatchet_sdk/__init__.py +54 -40
  2. hatchet_sdk/clients/admin.py +18 -23
  3. hatchet_sdk/clients/dispatcher/action_listener.py +4 -3
  4. hatchet_sdk/clients/dispatcher/dispatcher.py +1 -4
  5. hatchet_sdk/clients/event_ts.py +2 -1
  6. hatchet_sdk/clients/events.py +16 -12
  7. hatchet_sdk/clients/listeners/durable_event_listener.py +4 -2
  8. hatchet_sdk/clients/listeners/pooled_listener.py +2 -2
  9. hatchet_sdk/clients/listeners/run_event_listener.py +7 -8
  10. hatchet_sdk/clients/listeners/workflow_listener.py +14 -6
  11. hatchet_sdk/clients/rest/api_response.py +3 -2
  12. hatchet_sdk/clients/rest/models/semaphore_slots.py +1 -1
  13. hatchet_sdk/clients/rest/models/v1_task_summary.py +5 -0
  14. hatchet_sdk/clients/rest/models/v1_workflow_run_details.py +11 -1
  15. hatchet_sdk/clients/rest/models/workflow_version.py +5 -0
  16. hatchet_sdk/clients/rest/tenacity_utils.py +6 -8
  17. hatchet_sdk/config.py +2 -0
  18. hatchet_sdk/connection.py +10 -4
  19. hatchet_sdk/context/context.py +170 -46
  20. hatchet_sdk/context/worker_context.py +4 -7
  21. hatchet_sdk/contracts/dispatcher_pb2.py +38 -38
  22. hatchet_sdk/contracts/dispatcher_pb2.pyi +4 -2
  23. hatchet_sdk/contracts/events_pb2.py +13 -13
  24. hatchet_sdk/contracts/events_pb2.pyi +4 -2
  25. hatchet_sdk/contracts/v1/workflows_pb2.py +1 -1
  26. hatchet_sdk/contracts/v1/workflows_pb2.pyi +2 -2
  27. hatchet_sdk/exceptions.py +103 -1
  28. hatchet_sdk/features/cron.py +2 -2
  29. hatchet_sdk/features/filters.py +12 -21
  30. hatchet_sdk/features/runs.py +4 -4
  31. hatchet_sdk/features/scheduled.py +8 -9
  32. hatchet_sdk/hatchet.py +65 -64
  33. hatchet_sdk/opentelemetry/instrumentor.py +20 -20
  34. hatchet_sdk/runnables/action.py +1 -2
  35. hatchet_sdk/runnables/contextvars.py +19 -0
  36. hatchet_sdk/runnables/task.py +37 -29
  37. hatchet_sdk/runnables/types.py +9 -8
  38. hatchet_sdk/runnables/workflow.py +57 -42
  39. hatchet_sdk/utils/proto_enums.py +4 -4
  40. hatchet_sdk/utils/timedelta_to_expression.py +2 -3
  41. hatchet_sdk/utils/typing.py +11 -17
  42. hatchet_sdk/v0/__init__.py +7 -7
  43. hatchet_sdk/v0/clients/admin.py +7 -7
  44. hatchet_sdk/v0/clients/dispatcher/action_listener.py +8 -8
  45. hatchet_sdk/v0/clients/dispatcher/dispatcher.py +9 -9
  46. hatchet_sdk/v0/clients/events.py +3 -3
  47. hatchet_sdk/v0/clients/rest/tenacity_utils.py +1 -1
  48. hatchet_sdk/v0/clients/run_event_listener.py +3 -3
  49. hatchet_sdk/v0/clients/workflow_listener.py +5 -5
  50. hatchet_sdk/v0/context/context.py +6 -6
  51. hatchet_sdk/v0/hatchet.py +4 -4
  52. hatchet_sdk/v0/opentelemetry/instrumentor.py +1 -1
  53. hatchet_sdk/v0/rate_limit.py +1 -1
  54. hatchet_sdk/v0/v2/callable.py +4 -4
  55. hatchet_sdk/v0/v2/concurrency.py +2 -2
  56. hatchet_sdk/v0/v2/hatchet.py +3 -3
  57. hatchet_sdk/v0/worker/action_listener_process.py +6 -6
  58. hatchet_sdk/v0/worker/runner/run_loop_manager.py +1 -1
  59. hatchet_sdk/v0/worker/runner/runner.py +10 -10
  60. hatchet_sdk/v0/worker/runner/utils/capture_logs.py +1 -1
  61. hatchet_sdk/v0/worker/worker.py +2 -2
  62. hatchet_sdk/v0/workflow.py +3 -3
  63. hatchet_sdk/waits.py +6 -5
  64. hatchet_sdk/worker/action_listener_process.py +33 -13
  65. hatchet_sdk/worker/runner/run_loop_manager.py +15 -11
  66. hatchet_sdk/worker/runner/runner.py +142 -80
  67. hatchet_sdk/worker/runner/utils/capture_logs.py +72 -31
  68. hatchet_sdk/worker/worker.py +30 -26
  69. hatchet_sdk/workflow_run.py +4 -2
  70. {hatchet_sdk-1.12.3.dist-info → hatchet_sdk-1.14.0.dist-info}/METADATA +1 -1
  71. {hatchet_sdk-1.12.3.dist-info → hatchet_sdk-1.14.0.dist-info}/RECORD +73 -83
  72. hatchet_sdk/v0/contracts/dispatcher_pb2.py +0 -102
  73. hatchet_sdk/v0/contracts/dispatcher_pb2.pyi +0 -387
  74. hatchet_sdk/v0/contracts/dispatcher_pb2_grpc.py +0 -621
  75. hatchet_sdk/v0/contracts/events_pb2.py +0 -46
  76. hatchet_sdk/v0/contracts/events_pb2.pyi +0 -87
  77. hatchet_sdk/v0/contracts/events_pb2_grpc.py +0 -274
  78. hatchet_sdk/v0/contracts/workflows_pb2.py +0 -80
  79. hatchet_sdk/v0/contracts/workflows_pb2.pyi +0 -312
  80. hatchet_sdk/v0/contracts/workflows_pb2_grpc.py +0 -277
  81. hatchet_sdk/v0/logger.py +0 -13
  82. {hatchet_sdk-1.12.3.dist-info → hatchet_sdk-1.14.0.dist-info}/WHEEL +0 -0
  83. {hatchet_sdk-1.12.3.dist-info → hatchet_sdk-1.14.0.dist-info}/entry_points.txt +0 -0
hatchet_sdk/hatchet.py CHANGED
@@ -1,8 +1,9 @@
1
1
  import asyncio
2
2
  import logging
3
+ from collections.abc import Callable
3
4
  from datetime import timedelta
4
5
  from functools import cached_property
5
- from typing import Any, Callable, Type, Union, cast, overload
6
+ from typing import Any, cast, overload
6
7
 
7
8
  from hatchet_sdk import Context, DurableContext
8
9
  from hatchet_sdk.client import Client
@@ -181,8 +182,8 @@ class Hatchet:
181
182
  name: str,
182
183
  slots: int = 100,
183
184
  durable_slots: int = 1_000,
184
- labels: dict[str, Union[str, int]] = {},
185
- workflows: list[BaseWorkflow[Any]] = [],
185
+ labels: dict[str, str | int] | None = None,
186
+ workflows: list[BaseWorkflow[Any]] | None = None,
186
187
  lifespan: LifespanFn | None = None,
187
188
  ) -> Worker:
188
189
  """
@@ -227,14 +228,14 @@ class Hatchet:
227
228
  name: str,
228
229
  description: str | None = None,
229
230
  input_validator: None = None,
230
- on_events: list[str] = [],
231
- on_crons: list[str] = [],
231
+ on_events: list[str] | None = None,
232
+ on_crons: list[str] | None = None,
232
233
  version: str | None = None,
233
234
  sticky: StickyStrategy | None = None,
234
235
  default_priority: int = 1,
235
236
  concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None,
236
237
  task_defaults: TaskDefaults = TaskDefaults(),
237
- default_filters: list[DefaultFilter] = [],
238
+ default_filters: list[DefaultFilter] | None = None,
238
239
  ) -> Workflow[EmptyModel]: ...
239
240
 
240
241
  @overload
@@ -243,15 +244,15 @@ class Hatchet:
243
244
  *,
244
245
  name: str,
245
246
  description: str | None = None,
246
- input_validator: Type[TWorkflowInput],
247
- on_events: list[str] = [],
248
- on_crons: list[str] = [],
247
+ input_validator: type[TWorkflowInput],
248
+ on_events: list[str] | None = None,
249
+ on_crons: list[str] | None = None,
249
250
  version: str | None = None,
250
251
  sticky: StickyStrategy | None = None,
251
252
  default_priority: int = 1,
252
253
  concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None,
253
254
  task_defaults: TaskDefaults = TaskDefaults(),
254
- default_filters: list[DefaultFilter] = [],
255
+ default_filters: list[DefaultFilter] | None = None,
255
256
  ) -> Workflow[TWorkflowInput]: ...
256
257
 
257
258
  def workflow(
@@ -259,15 +260,15 @@ class Hatchet:
259
260
  *,
260
261
  name: str,
261
262
  description: str | None = None,
262
- input_validator: Type[TWorkflowInput] | None = None,
263
- on_events: list[str] = [],
264
- on_crons: list[str] = [],
263
+ input_validator: type[TWorkflowInput] | None = None,
264
+ on_events: list[str] | None = None,
265
+ on_crons: list[str] | None = None,
265
266
  version: str | None = None,
266
267
  sticky: StickyStrategy | None = None,
267
268
  default_priority: int = 1,
268
269
  concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None,
269
270
  task_defaults: TaskDefaults = TaskDefaults(),
270
- default_filters: list[DefaultFilter] = [],
271
+ default_filters: list[DefaultFilter] | None = None,
271
272
  ) -> Workflow[EmptyModel] | Workflow[TWorkflowInput]:
272
273
  """
273
274
  Define a Hatchet workflow, which can then declare `task`s and be `run`, `schedule`d, and so on.
@@ -302,15 +303,15 @@ class Hatchet:
302
303
  name=name,
303
304
  version=version,
304
305
  description=description,
305
- on_events=on_events,
306
- on_crons=on_crons,
306
+ on_events=on_events or [],
307
+ on_crons=on_crons or [],
307
308
  sticky=sticky,
308
309
  concurrency=concurrency,
309
310
  input_validator=input_validator
310
- or cast(Type[TWorkflowInput], EmptyModel),
311
+ or cast(type[TWorkflowInput], EmptyModel),
311
312
  task_defaults=task_defaults,
312
313
  default_priority=default_priority,
313
- default_filters=default_filters,
314
+ default_filters=default_filters or [],
314
315
  ),
315
316
  self,
316
317
  )
@@ -322,8 +323,8 @@ class Hatchet:
322
323
  name: str | None = None,
323
324
  description: str | None = None,
324
325
  input_validator: None = None,
325
- on_events: list[str] = [],
326
- on_crons: list[str] = [],
326
+ on_events: list[str] | None = None,
327
+ on_crons: list[str] | None = None,
327
328
  version: str | None = None,
328
329
  sticky: StickyStrategy | None = None,
329
330
  default_priority: int = 1,
@@ -331,11 +332,11 @@ class Hatchet:
331
332
  schedule_timeout: Duration = timedelta(minutes=5),
332
333
  execution_timeout: Duration = timedelta(seconds=60),
333
334
  retries: int = 0,
334
- rate_limits: list[RateLimit] = [],
335
- desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
335
+ rate_limits: list[RateLimit] | None = None,
336
+ desired_worker_labels: dict[str, DesiredWorkerLabel] | None = None,
336
337
  backoff_factor: float | None = None,
337
338
  backoff_max_seconds: int | None = None,
338
- default_filters: list[DefaultFilter] = [],
339
+ default_filters: list[DefaultFilter] | None = None,
339
340
  ) -> Callable[
340
341
  [Callable[[EmptyModel, Context], R | CoroutineLike[R]]],
341
342
  Standalone[EmptyModel, R],
@@ -347,9 +348,9 @@ class Hatchet:
347
348
  *,
348
349
  name: str | None = None,
349
350
  description: str | None = None,
350
- input_validator: Type[TWorkflowInput],
351
- on_events: list[str] = [],
352
- on_crons: list[str] = [],
351
+ input_validator: type[TWorkflowInput],
352
+ on_events: list[str] | None = None,
353
+ on_crons: list[str] | None = None,
353
354
  version: str | None = None,
354
355
  sticky: StickyStrategy | None = None,
355
356
  default_priority: int = 1,
@@ -357,11 +358,11 @@ class Hatchet:
357
358
  schedule_timeout: Duration = timedelta(minutes=5),
358
359
  execution_timeout: Duration = timedelta(seconds=60),
359
360
  retries: int = 0,
360
- rate_limits: list[RateLimit] = [],
361
- desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
361
+ rate_limits: list[RateLimit] | None = None,
362
+ desired_worker_labels: dict[str, DesiredWorkerLabel] | None = None,
362
363
  backoff_factor: float | None = None,
363
364
  backoff_max_seconds: int | None = None,
364
- default_filters: list[DefaultFilter] = [],
365
+ default_filters: list[DefaultFilter] | None = None,
365
366
  ) -> Callable[
366
367
  [Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
367
368
  Standalone[TWorkflowInput, R],
@@ -372,9 +373,9 @@ class Hatchet:
372
373
  *,
373
374
  name: str | None = None,
374
375
  description: str | None = None,
375
- input_validator: Type[TWorkflowInput] | None = None,
376
- on_events: list[str] = [],
377
- on_crons: list[str] = [],
376
+ input_validator: type[TWorkflowInput] | None = None,
377
+ on_events: list[str] | None = None,
378
+ on_crons: list[str] | None = None,
378
379
  version: str | None = None,
379
380
  sticky: StickyStrategy | None = None,
380
381
  default_priority: int = 1,
@@ -382,11 +383,11 @@ class Hatchet:
382
383
  schedule_timeout: Duration = timedelta(minutes=5),
383
384
  execution_timeout: Duration = timedelta(seconds=60),
384
385
  retries: int = 0,
385
- rate_limits: list[RateLimit] = [],
386
- desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
386
+ rate_limits: list[RateLimit] | None = None,
387
+ desired_worker_labels: dict[str, DesiredWorkerLabel] | None = None,
387
388
  backoff_factor: float | None = None,
388
389
  backoff_max_seconds: int | None = None,
389
- default_filters: list[DefaultFilter] = [],
390
+ default_filters: list[DefaultFilter] | None = None,
390
391
  ) -> (
391
392
  Callable[
392
393
  [Callable[[EmptyModel, Context], R | CoroutineLike[R]]],
@@ -447,13 +448,13 @@ class Hatchet:
447
448
  name=inferred_name,
448
449
  version=version,
449
450
  description=description,
450
- on_events=on_events,
451
- on_crons=on_crons,
451
+ on_events=on_events or [],
452
+ on_crons=on_crons or [],
452
453
  sticky=sticky,
453
454
  default_priority=default_priority,
454
455
  input_validator=input_validator
455
- or cast(Type[TWorkflowInput], EmptyModel),
456
- default_filters=default_filters,
456
+ or cast(type[TWorkflowInput], EmptyModel),
457
+ default_filters=default_filters or [],
457
458
  ),
458
459
  self,
459
460
  )
@@ -471,8 +472,8 @@ class Hatchet:
471
472
  execution_timeout=execution_timeout,
472
473
  parents=[],
473
474
  retries=retries,
474
- rate_limits=rate_limits,
475
- desired_worker_labels=desired_worker_labels,
475
+ rate_limits=rate_limits or [],
476
+ desired_worker_labels=desired_worker_labels or {},
476
477
  backoff_factor=backoff_factor,
477
478
  backoff_max_seconds=backoff_max_seconds,
478
479
  concurrency=_concurrency,
@@ -494,8 +495,8 @@ class Hatchet:
494
495
  name: str | None = None,
495
496
  description: str | None = None,
496
497
  input_validator: None = None,
497
- on_events: list[str] = [],
498
- on_crons: list[str] = [],
498
+ on_events: list[str] | None = None,
499
+ on_crons: list[str] | None = None,
499
500
  version: str | None = None,
500
501
  sticky: StickyStrategy | None = None,
501
502
  default_priority: int = 1,
@@ -503,11 +504,11 @@ class Hatchet:
503
504
  schedule_timeout: Duration = timedelta(minutes=5),
504
505
  execution_timeout: Duration = timedelta(seconds=60),
505
506
  retries: int = 0,
506
- rate_limits: list[RateLimit] = [],
507
- desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
507
+ rate_limits: list[RateLimit] | None = None,
508
+ desired_worker_labels: dict[str, DesiredWorkerLabel] | None = None,
508
509
  backoff_factor: float | None = None,
509
510
  backoff_max_seconds: int | None = None,
510
- default_filters: list[DefaultFilter] = [],
511
+ default_filters: list[DefaultFilter] | None = None,
511
512
  ) -> Callable[
512
513
  [Callable[[EmptyModel, DurableContext], R | CoroutineLike[R]]],
513
514
  Standalone[EmptyModel, R],
@@ -519,9 +520,9 @@ class Hatchet:
519
520
  *,
520
521
  name: str | None = None,
521
522
  description: str | None = None,
522
- input_validator: Type[TWorkflowInput],
523
- on_events: list[str] = [],
524
- on_crons: list[str] = [],
523
+ input_validator: type[TWorkflowInput],
524
+ on_events: list[str] | None = None,
525
+ on_crons: list[str] | None = None,
525
526
  version: str | None = None,
526
527
  sticky: StickyStrategy | None = None,
527
528
  default_priority: int = 1,
@@ -529,11 +530,11 @@ class Hatchet:
529
530
  schedule_timeout: Duration = timedelta(minutes=5),
530
531
  execution_timeout: Duration = timedelta(seconds=60),
531
532
  retries: int = 0,
532
- rate_limits: list[RateLimit] = [],
533
- desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
533
+ rate_limits: list[RateLimit] | None = None,
534
+ desired_worker_labels: dict[str, DesiredWorkerLabel] | None = None,
534
535
  backoff_factor: float | None = None,
535
536
  backoff_max_seconds: int | None = None,
536
- default_filters: list[DefaultFilter] = [],
537
+ default_filters: list[DefaultFilter] | None = None,
537
538
  ) -> Callable[
538
539
  [Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]],
539
540
  Standalone[TWorkflowInput, R],
@@ -544,9 +545,9 @@ class Hatchet:
544
545
  *,
545
546
  name: str | None = None,
546
547
  description: str | None = None,
547
- input_validator: Type[TWorkflowInput] | None = None,
548
- on_events: list[str] = [],
549
- on_crons: list[str] = [],
548
+ input_validator: type[TWorkflowInput] | None = None,
549
+ on_events: list[str] | None = None,
550
+ on_crons: list[str] | None = None,
550
551
  version: str | None = None,
551
552
  sticky: StickyStrategy | None = None,
552
553
  default_priority: int = 1,
@@ -554,11 +555,11 @@ class Hatchet:
554
555
  schedule_timeout: Duration = timedelta(minutes=5),
555
556
  execution_timeout: Duration = timedelta(seconds=60),
556
557
  retries: int = 0,
557
- rate_limits: list[RateLimit] = [],
558
- desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
558
+ rate_limits: list[RateLimit] | None = None,
559
+ desired_worker_labels: dict[str, DesiredWorkerLabel] | None = None,
559
560
  backoff_factor: float | None = None,
560
561
  backoff_max_seconds: int | None = None,
561
- default_filters: list[DefaultFilter] = [],
562
+ default_filters: list[DefaultFilter] | None = None,
562
563
  ) -> (
563
564
  Callable[
564
565
  [Callable[[EmptyModel, DurableContext], R | CoroutineLike[R]]],
@@ -618,13 +619,13 @@ class Hatchet:
618
619
  name=inferred_name,
619
620
  version=version,
620
621
  description=description,
621
- on_events=on_events,
622
- on_crons=on_crons,
622
+ on_events=on_events or [],
623
+ on_crons=on_crons or [],
623
624
  sticky=sticky,
624
625
  input_validator=input_validator
625
- or cast(Type[TWorkflowInput], EmptyModel),
626
+ or cast(type[TWorkflowInput], EmptyModel),
626
627
  default_priority=default_priority,
627
- default_filters=default_filters,
628
+ default_filters=default_filters or [],
628
629
  ),
629
630
  self,
630
631
  )
@@ -642,8 +643,8 @@ class Hatchet:
642
643
  execution_timeout=execution_timeout,
643
644
  parents=[],
644
645
  retries=retries,
645
- rate_limits=rate_limits,
646
- desired_worker_labels=desired_worker_labels,
646
+ rate_limits=rate_limits or [],
647
+ desired_worker_labels=desired_worker_labels or {},
647
648
  backoff_factor=backoff_factor,
648
649
  backoff_max_seconds=backoff_max_seconds,
649
650
  concurrency=_concurrency,
@@ -1,6 +1,7 @@
1
1
  import json
2
+ from collections.abc import Callable, Collection, Coroutine
2
3
  from importlib.metadata import version
3
- from typing import Any, Callable, Collection, Coroutine, Union, cast
4
+ from typing import Any, cast
4
5
 
5
6
  from hatchet_sdk.contracts import workflows_pb2 as v0_workflow_protos
6
7
  from hatchet_sdk.utils.typing import JSONSerializableMapping
@@ -23,10 +24,10 @@ try:
23
24
  TraceContextTextMapPropagator,
24
25
  )
25
26
  from wrapt import wrap_function_wrapper # type: ignore[import-untyped]
26
- except (RuntimeError, ImportError, ModuleNotFoundError):
27
+ except (RuntimeError, ImportError, ModuleNotFoundError) as e:
27
28
  raise ModuleNotFoundError(
28
29
  "To use the HatchetInstrumentor, you must install Hatchet's `otel` extra using (e.g.) `pip install hatchet-sdk[otel]`"
29
- )
30
+ ) from e
30
31
 
31
32
  import inspect
32
33
  from datetime import datetime
@@ -204,7 +205,7 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
204
205
  super().__init__()
205
206
 
206
207
  def instrumentation_dependencies(self) -> Collection[str]:
207
- return tuple()
208
+ return ()
208
209
 
209
210
  def _instrument(self, **kwargs: InstrumentKwargs) -> None:
210
211
  self._tracer = get_tracer(__name__, hatchet_sdk_version, self.tracer_provider)
@@ -394,11 +395,11 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
394
395
  options = PushEventOptions(
395
396
  **options.model_dump(exclude={"additional_metadata"}),
396
397
  additional_metadata=_inject_traceparent_into_metadata(
397
- dict(options.additional_metadata),
398
+ options.additional_metadata,
398
399
  ),
399
400
  )
400
401
 
401
- return wrapped(event_key, dict(payload), options)
402
+ return wrapped(event_key, payload, options)
402
403
 
403
404
  ## IMPORTANT: Keep these types in sync with the wrapped method's signature
404
405
  def _wrap_bulk_push_event(
@@ -432,7 +433,7 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
432
433
  BulkPushEventWithMetadata(
433
434
  **event.model_dump(exclude={"additional_metadata"}),
434
435
  additional_metadata=_inject_traceparent_into_metadata(
435
- dict(event.additional_metadata),
436
+ event.additional_metadata,
436
437
  ),
437
438
  )
438
439
  for event in bulk_events
@@ -494,7 +495,7 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
494
495
  options = TriggerWorkflowOptions(
495
496
  **options.model_dump(exclude={"additional_metadata"}),
496
497
  additional_metadata=_inject_traceparent_into_metadata(
497
- dict(options.additional_metadata),
498
+ options.additional_metadata,
498
499
  ),
499
500
  )
500
501
 
@@ -551,19 +552,18 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
551
552
  options = TriggerWorkflowOptions(
552
553
  **options.model_dump(exclude={"additional_metadata"}),
553
554
  additional_metadata=_inject_traceparent_into_metadata(
554
- dict(options.additional_metadata),
555
+ options.additional_metadata,
555
556
  ),
556
557
  )
557
558
 
558
559
  return await wrapped(workflow_name, payload, options)
559
560
 
560
- def _ts_to_iso(self, ts: Union[datetime, timestamp_pb2.Timestamp]) -> str:
561
+ def _ts_to_iso(self, ts: datetime | timestamp_pb2.Timestamp) -> str:
561
562
  if isinstance(ts, datetime):
562
563
  return ts.isoformat()
563
- elif isinstance(ts, timestamp_pb2.Timestamp):
564
+ if isinstance(ts, timestamp_pb2.Timestamp):
564
565
  return ts.ToJsonString()
565
- else:
566
- raise TypeError(f"Unsupported type for timestamp conversion: {type(ts)}")
566
+ raise TypeError(f"Unsupported type for timestamp conversion: {type(ts)}")
567
567
 
568
568
  ## IMPORTANT: Keep these types in sync with the wrapped method's signature
569
569
  def _wrap_schedule_workflow(
@@ -571,7 +571,7 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
571
571
  wrapped: Callable[
572
572
  [
573
573
  str,
574
- list[Union[datetime, timestamp_pb2.Timestamp]],
574
+ list[datetime | timestamp_pb2.Timestamp],
575
575
  JSONSerializableMapping,
576
576
  ScheduleTriggerWorkflowOptions,
577
577
  ],
@@ -580,14 +580,14 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
580
580
  instance: AdminClient,
581
581
  args: tuple[
582
582
  str,
583
- list[Union[datetime, timestamp_pb2.Timestamp]],
583
+ list[datetime | timestamp_pb2.Timestamp],
584
584
  JSONSerializableMapping,
585
585
  ScheduleTriggerWorkflowOptions,
586
586
  ],
587
587
  kwargs: dict[
588
588
  str,
589
589
  str
590
- | list[Union[datetime, timestamp_pb2.Timestamp]]
590
+ | list[datetime | timestamp_pb2.Timestamp]
591
591
  | JSONSerializableMapping
592
592
  | ScheduleTriggerWorkflowOptions,
593
593
  ],
@@ -595,7 +595,7 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
595
595
  params = self.extract_bound_args(wrapped, args, kwargs)
596
596
 
597
597
  workflow_name = cast(str, params[0])
598
- schedules = cast(list[Union[datetime, timestamp_pb2.Timestamp]], params[1])
598
+ schedules = cast(list[datetime | timestamp_pb2.Timestamp], params[1])
599
599
  input = cast(JSONSerializableMapping, params[2])
600
600
  options = cast(
601
601
  ScheduleTriggerWorkflowOptions,
@@ -633,7 +633,7 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
633
633
  options = ScheduleTriggerWorkflowOptions(
634
634
  **options.model_dump(exclude={"additional_metadata"}),
635
635
  additional_metadata=_inject_traceparent_into_metadata(
636
- dict(options.additional_metadata),
636
+ options.additional_metadata,
637
637
  ),
638
638
  )
639
639
 
@@ -673,7 +673,7 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
673
673
  options=TriggerWorkflowOptions(
674
674
  **config.options.model_dump(exclude={"additional_metadata"}),
675
675
  additional_metadata=_inject_traceparent_into_metadata(
676
- dict(config.options.additional_metadata),
676
+ config.options.additional_metadata,
677
677
  ),
678
678
  ),
679
679
  )
@@ -705,7 +705,7 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
705
705
  options=TriggerWorkflowOptions(
706
706
  **config.options.model_dump(exclude={"additional_metadata"}),
707
707
  additional_metadata=_inject_traceparent_into_metadata(
708
- dict(config.options.additional_metadata),
708
+ config.options.additional_metadata,
709
709
  ),
710
710
  ),
711
711
  )
@@ -121,5 +121,4 @@ class Action(BaseModel):
121
121
  """
122
122
  if self.action_type == ActionType.START_GET_GROUP_KEY:
123
123
  return f"{self.get_group_key_run_id}/{self.retry_count}"
124
- else:
125
- return f"{self.step_run_id}/{self.retry_count}"
124
+ return f"{self.step_run_id}/{self.retry_count}"
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import threading
2
3
  from collections import Counter
3
4
  from contextvars import ContextVar
4
5
 
@@ -15,3 +16,21 @@ ctx_worker_id: ContextVar[str | None] = ContextVar("ctx_worker_id", default=None
15
16
 
16
17
  workflow_spawn_indices = Counter[ActionKey]()
17
18
  spawn_index_lock = asyncio.Lock()
19
+
20
+
21
+ class TaskCounter:
22
+ def __init__(self) -> None:
23
+ self._count = 0
24
+ self._lock = threading.Lock()
25
+
26
+ def increment(self) -> int:
27
+ with self._lock:
28
+ self._count += 1
29
+ return self._count
30
+
31
+ @property
32
+ def value(self) -> int:
33
+ return self._count
34
+
35
+
36
+ task_count = TaskCounter()
@@ -1,5 +1,5 @@
1
- from datetime import timedelta
2
- from typing import TYPE_CHECKING, Any, Callable, Generic, Union, cast, get_type_hints
1
+ from collections.abc import Callable
2
+ from typing import TYPE_CHECKING, Any, Generic, cast, get_type_hints
3
3
 
4
4
  from hatchet_sdk.context.context import Context, DurableContext
5
5
  from hatchet_sdk.contracts.v1.shared.condition_pb2 import TaskConditions
@@ -40,28 +40,30 @@ if TYPE_CHECKING:
40
40
  class Task(Generic[TWorkflowInput, R]):
41
41
  def __init__(
42
42
  self,
43
- _fn: Union[
43
+ _fn: (
44
44
  Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]
45
- | Callable[[TWorkflowInput, Context], AwaitableLike[R]],
46
- Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]
47
- | Callable[[TWorkflowInput, DurableContext], AwaitableLike[R]],
48
- ],
45
+ | Callable[[TWorkflowInput, Context], AwaitableLike[R]]
46
+ | (
47
+ Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]
48
+ | Callable[[TWorkflowInput, DurableContext], AwaitableLike[R]]
49
+ )
50
+ ),
49
51
  is_durable: bool,
50
52
  type: StepType,
51
53
  workflow: "Workflow[TWorkflowInput]",
52
54
  name: str,
53
- execution_timeout: Duration = timedelta(seconds=60),
54
- schedule_timeout: Duration = timedelta(minutes=5),
55
- parents: "list[Task[TWorkflowInput, Any]]" = [],
56
- retries: int = 0,
57
- rate_limits: list[CreateTaskRateLimit] = [],
58
- desired_worker_labels: dict[str, DesiredWorkerLabels] = {},
59
- backoff_factor: float | None = None,
60
- backoff_max_seconds: int | None = None,
61
- concurrency: list[ConcurrencyExpression] = [],
62
- wait_for: list[Condition | OrGroup] = [],
63
- skip_if: list[Condition | OrGroup] = [],
64
- cancel_if: list[Condition | OrGroup] = [],
55
+ execution_timeout: Duration,
56
+ schedule_timeout: Duration,
57
+ parents: "list[Task[TWorkflowInput, Any]] | None",
58
+ retries: int,
59
+ rate_limits: list[CreateTaskRateLimit] | None,
60
+ desired_worker_labels: dict[str, DesiredWorkerLabels] | None,
61
+ backoff_factor: float | None,
62
+ backoff_max_seconds: int | None,
63
+ concurrency: list[ConcurrencyExpression] | None,
64
+ wait_for: list[Condition | OrGroup] | None,
65
+ skip_if: list[Condition | OrGroup] | None,
66
+ cancel_if: list[Condition | OrGroup] | None,
65
67
  ) -> None:
66
68
  self.is_durable = is_durable
67
69
 
@@ -74,17 +76,17 @@ class Task(Generic[TWorkflowInput, R]):
74
76
  self.execution_timeout = execution_timeout
75
77
  self.schedule_timeout = schedule_timeout
76
78
  self.name = name
77
- self.parents = parents
79
+ self.parents = parents or []
78
80
  self.retries = retries
79
- self.rate_limits = rate_limits
80
- self.desired_worker_labels = desired_worker_labels
81
+ self.rate_limits = rate_limits or []
82
+ self.desired_worker_labels = desired_worker_labels or {}
81
83
  self.backoff_factor = backoff_factor
82
84
  self.backoff_max_seconds = backoff_max_seconds
83
- self.concurrency = concurrency
85
+ self.concurrency = concurrency or []
84
86
 
85
- self.wait_for = self._flatten_conditions(wait_for)
86
- self.skip_if = self._flatten_conditions(skip_if)
87
- self.cancel_if = self._flatten_conditions(cancel_if)
87
+ self.wait_for = self._flatten_conditions(wait_for or [])
88
+ self.skip_if = self._flatten_conditions(skip_if or [])
89
+ self.cancel_if = self._flatten_conditions(cancel_if or [])
88
90
 
89
91
  return_type = get_type_hints(_fn).get("return")
90
92
 
@@ -179,13 +181,19 @@ class Task(Generic[TWorkflowInput, R]):
179
181
  raise ValueError("Conditions must have unique readable data keys.")
180
182
 
181
183
  user_events = [
182
- c.to_proto() for c in conditions if isinstance(c, UserEventCondition)
184
+ c.to_proto(self.workflow.client.config)
185
+ for c in conditions
186
+ if isinstance(c, UserEventCondition)
183
187
  ]
184
188
  parent_overrides = [
185
- c.to_proto() for c in conditions if isinstance(c, ParentCondition)
189
+ c.to_proto(self.workflow.client.config)
190
+ for c in conditions
191
+ if isinstance(c, ParentCondition)
186
192
  ]
187
193
  sleep_conditions = [
188
- c.to_proto() for c in conditions if isinstance(c, SleepCondition)
194
+ c.to_proto(self.workflow.client.config)
195
+ for c in conditions
196
+ if isinstance(c, SleepCondition)
189
197
  ]
190
198
 
191
199
  return TaskConditions(
@@ -1,7 +1,8 @@
1
1
  import asyncio
2
2
  import json
3
+ from collections.abc import Callable
3
4
  from enum import Enum
4
- from typing import Any, Callable, ParamSpec, Type, TypeGuard, TypeVar, Union
5
+ from typing import Any, ParamSpec, TypeGuard, TypeVar
5
6
 
6
7
  from pydantic import BaseModel, ConfigDict, Field
7
8
 
@@ -11,7 +12,7 @@ from hatchet_sdk.contracts.v1.workflows_pb2 import DefaultFilter as DefaultFilte
11
12
  from hatchet_sdk.utils.timedelta_to_expression import Duration
12
13
  from hatchet_sdk.utils.typing import AwaitableLike, JSONSerializableMapping
13
14
 
14
- ValidTaskReturnType = Union[BaseModel, JSONSerializableMapping, None]
15
+ ValidTaskReturnType = BaseModel | JSONSerializableMapping | None
15
16
 
16
17
  R = TypeVar("R", bound=ValidTaskReturnType)
17
18
  P = ParamSpec("P")
@@ -78,7 +79,7 @@ class DefaultFilter(BaseModel):
78
79
  return DefaultFilterProto(
79
80
  expression=self.expression,
80
81
  scope=self.scope,
81
- payload=payload_json,
82
+ payload=payload_json.encode("utf-8"),
82
83
  )
83
84
 
84
85
 
@@ -92,7 +93,7 @@ class WorkflowConfig(BaseModel):
92
93
  on_crons: list[str] = Field(default_factory=list)
93
94
  sticky: StickyStrategy | None = None
94
95
  concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None
95
- input_validator: Type[BaseModel] = EmptyModel
96
+ input_validator: type[BaseModel] = EmptyModel
96
97
  default_priority: int | None = None
97
98
 
98
99
  task_defaults: TaskDefaults = TaskDefaults()
@@ -107,7 +108,7 @@ class StepType(str, Enum):
107
108
 
108
109
  AsyncFunc = Callable[[TWorkflowInput, Context], AwaitableLike[R]]
109
110
  SyncFunc = Callable[[TWorkflowInput, Context], R]
110
- TaskFunc = Union[AsyncFunc[TWorkflowInput, R], SyncFunc[TWorkflowInput, R]]
111
+ TaskFunc = AsyncFunc[TWorkflowInput, R] | SyncFunc[TWorkflowInput, R]
111
112
 
112
113
 
113
114
  def is_async_fn(
@@ -124,9 +125,9 @@ def is_sync_fn(
124
125
 
125
126
  DurableAsyncFunc = Callable[[TWorkflowInput, DurableContext], AwaitableLike[R]]
126
127
  DurableSyncFunc = Callable[[TWorkflowInput, DurableContext], R]
127
- DurableTaskFunc = Union[
128
- DurableAsyncFunc[TWorkflowInput, R], DurableSyncFunc[TWorkflowInput, R]
129
- ]
128
+ DurableTaskFunc = (
129
+ DurableAsyncFunc[TWorkflowInput, R] | DurableSyncFunc[TWorkflowInput, R]
130
+ )
130
131
 
131
132
 
132
133
  def is_durable_async_fn(