hatchet-sdk 0.46.1__py3-none-any.whl → 0.47.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

@@ -9,6 +9,7 @@ from pydantic import StrictInt
9
9
  from hatchet_sdk.clients.rest.api.event_api import EventApi
10
10
  from hatchet_sdk.clients.rest.api.log_api import LogApi
11
11
  from hatchet_sdk.clients.rest.api.step_run_api import StepRunApi
12
+ from hatchet_sdk.clients.rest.api.worker_api import WorkerApi
12
13
  from hatchet_sdk.clients.rest.api.workflow_api import WorkflowApi
13
14
  from hatchet_sdk.clients.rest.api.workflow_run_api import WorkflowRunApi
14
15
  from hatchet_sdk.clients.rest.api.workflow_runs_api import WorkflowRunsApi
@@ -83,6 +84,7 @@ class AsyncRestApi:
83
84
  self._step_run_api = None
84
85
  self._event_api = None
85
86
  self._log_api = None
87
+ self._worker_api: WorkerApi | None = None
86
88
 
87
89
  @property
88
90
  def api_client(self):
@@ -102,6 +104,13 @@ class AsyncRestApi:
102
104
  self._workflow_run_api = WorkflowRunApi(self.api_client)
103
105
  return self._workflow_run_api
104
106
 
107
+ @property
108
+ def worker_api(self):
109
+ if self._worker_api is None:
110
+ self._worker_api = WorkerApi(self.api_client)
111
+
112
+ return self._worker_api
113
+
105
114
  @property
106
115
  def step_run_api(self):
107
116
  if self._step_run_api is None:
@@ -75,6 +75,12 @@ class PooledWorkflowRunListener:
75
75
  interrupter: asyncio.Task = None
76
76
 
77
77
  def __init__(self, config: ClientConfig):
78
+ try:
79
+ asyncio.get_running_loop()
80
+ except RuntimeError:
81
+ loop = asyncio.new_event_loop()
82
+ asyncio.set_event_loop(loop)
83
+
78
84
  conn = new_conn(config, True)
79
85
  self.client = DispatcherStub(conn)
80
86
  self.token = config.token
@@ -260,12 +266,10 @@ class PooledWorkflowRunListener:
260
266
  if self.curr_requester != 0:
261
267
  self.requests.put_nowait(self.curr_requester)
262
268
 
263
- listener = self.client.SubscribeToWorkflowRuns(
269
+ return self.client.SubscribeToWorkflowRuns(
264
270
  self._request(),
265
271
  metadata=get_metadata(self.token),
266
272
  )
267
-
268
- return listener
269
273
  except grpc.RpcError as e:
270
274
  if e.code() == grpc.StatusCode.UNAVAILABLE:
271
275
  retries = retries + 1
@@ -403,3 +403,44 @@ class Context(BaseContext):
403
403
  for step_run in job_run.step_runs
404
404
  if step_run.error and step_run.step
405
405
  ]
406
+
407
+ @tenacity_retry
408
+ def spawn_workflow(
409
+ self,
410
+ workflow_name: str,
411
+ input: dict[str, Any] = {},
412
+ key: str | None = None,
413
+ options: ChildTriggerWorkflowOptions | None = None,
414
+ ) -> WorkflowRunRef:
415
+ worker_id = self.worker.id()
416
+ trigger_options = self._prepare_workflow_options(key, options, worker_id)
417
+
418
+ return self.admin_client.run_workflow(workflow_name, input, trigger_options)
419
+
420
+ @tenacity_retry
421
+ def spawn_workflows(
422
+ self, child_workflow_runs: list[ChildWorkflowRunDict]
423
+ ) -> list[WorkflowRunRef]:
424
+
425
+ if len(child_workflow_runs) == 0:
426
+ raise Exception("no child workflows to spawn")
427
+
428
+ worker_id = self.worker.id()
429
+
430
+ bulk_trigger_workflow_runs: list[WorkflowRunDict] = []
431
+ for child_workflow_run in child_workflow_runs:
432
+ workflow_name = child_workflow_run["workflow_name"]
433
+ input = child_workflow_run["input"]
434
+
435
+ key = child_workflow_run.get("key")
436
+ options = child_workflow_run.get("options", {})
437
+
438
+ trigger_options = self._prepare_workflow_options(key, options, worker_id)
439
+
440
+ bulk_trigger_workflow_runs.append(
441
+ WorkflowRunDict(
442
+ workflow_name=workflow_name, input=input, options=trigger_options
443
+ )
444
+ )
445
+
446
+ return self.admin_client.run_workflows(bulk_trigger_workflow_runs)
hatchet_sdk/loader.py CHANGED
@@ -42,6 +42,7 @@ class ClientConfig:
42
42
  worker_healthcheck_port: int | None = None,
43
43
  worker_healthcheck_enabled: bool | None = None,
44
44
  worker_preset_labels: dict[str, str] = {},
45
+ enable_force_kill_sync_threads: bool = False,
45
46
  ):
46
47
  self.tenant_id = tenant_id
47
48
  self.tls_config = tls_config
@@ -55,6 +56,7 @@ class ClientConfig:
55
56
  self.worker_healthcheck_port = worker_healthcheck_port
56
57
  self.worker_healthcheck_enabled = worker_healthcheck_enabled
57
58
  self.worker_preset_labels = worker_preset_labels
59
+ self.enable_force_kill_sync_threads = enable_force_kill_sync_threads
58
60
 
59
61
  if not self.logInterceptor:
60
62
  self.logInterceptor = getLogger()
@@ -174,6 +176,14 @@ class ConfigLoader:
174
176
  "The `otel_exporter_otlp_*` fields are no longer supported as of SDK version `0.46.0`. Please see the documentation on OpenTelemetry at https://docs.hatchet.run/home/features/opentelemetry for more information on how to migrate to the new `HatchetInstrumentor`."
175
177
  )
176
178
 
179
+ enable_force_kill_sync_threads = bool(
180
+ get_config_value(
181
+ "enable_force_kill_sync_threads",
182
+ "HATCHET_CLIENT_ENABLE_FORCE_KILL_SYNC_THREADS",
183
+ )
184
+ == "True"
185
+ or False
186
+ )
177
187
  return ClientConfig(
178
188
  tenant_id=tenant_id,
179
189
  tls_config=tls_config,
@@ -188,6 +198,7 @@ class ConfigLoader:
188
198
  worker_healthcheck_port=worker_healthcheck_port,
189
199
  worker_healthcheck_enabled=worker_healthcheck_enabled,
190
200
  worker_preset_labels=worker_preset_labels,
201
+ enable_force_kill_sync_threads=enable_force_kill_sync_threads,
191
202
  )
192
203
 
193
204
  def _load_tls_config(self, tls_data: Dict, host_port) -> ClientTLSConfig:
@@ -13,6 +13,7 @@ try:
13
13
  StatusCode,
14
14
  TracerProvider,
15
15
  get_tracer,
16
+ get_tracer_provider,
16
17
  )
17
18
  from opentelemetry.trace.propagation.tracecontext import (
18
19
  TraceContextTextMapPropagator,
@@ -43,49 +44,121 @@ hatchet_sdk_version = version("hatchet-sdk")
43
44
 
44
45
  InstrumentKwargs = TracerProvider | MeterProvider | None
45
46
 
47
+ OTEL_TRACEPARENT_KEY = "traceparent"
46
48
 
47
- class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
48
- OTEL_TRACEPARENT_KEY = "traceparent"
49
49
 
50
- def __init__(
51
- self,
52
- tracer_provider: TracerProvider,
53
- meter_provider: MeterProvider = NoOpMeterProvider(),
54
- ):
55
- self.tracer_provider = tracer_provider
56
- self.meter_provider = meter_provider
50
+ def create_traceparent() -> str | None:
51
+ """
52
+ Creates and returns a W3C traceparent header value using OpenTelemetry's context propagation.
57
53
 
58
- super().__init__()
54
+ The traceparent header is used to propagate context information across service boundaries
55
+ in distributed tracing systems. It follows the W3C Trace Context specification.
59
56
 
60
- def create_traceparent(self) -> str | None:
61
- carrier: dict[str, str] = {}
62
- TraceContextTextMapPropagator().inject(carrier)
57
+ :returns: A W3C-formatted traceparent header value if successful, None if the context
58
+ injection fails or no active span exists.\n
59
+ Example: `00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01`
60
+ :rtype: str | None:
61
+ """
63
62
 
64
- return carrier.get("traceparent")
63
+ carrier: dict[str, str] = {}
64
+ TraceContextTextMapPropagator().inject(carrier)
65
65
 
66
- def parse_carrier_from_metadata(
67
- self, metadata: dict[str, str] | None
68
- ) -> Context | None:
69
- if not metadata:
70
- return None
66
+ return carrier.get("traceparent")
71
67
 
72
- traceparent = metadata.get(self.OTEL_TRACEPARENT_KEY)
73
68
 
74
- if not traceparent:
75
- return None
69
+ def parse_carrier_from_metadata(metadata: dict[str, str] | None) -> Context | None:
70
+ """
71
+ Parses OpenTelemetry trace context from a metadata dictionary.
76
72
 
77
- return TraceContextTextMapPropagator().extract(
78
- {self.OTEL_TRACEPARENT_KEY: traceparent}
79
- )
73
+ Extracts the trace context from metadata using the W3C Trace Context format,
74
+ specifically looking for the `traceparent` header.
75
+
76
+ :param metadata: A dictionary containing metadata key-value pairs,
77
+ potentially including the `traceparent` header. Can be None.
78
+ :type metadata: dict[str, str] | None
79
+ :returns: The extracted OpenTelemetry Context object if a valid `traceparent`
80
+ is found in the metadata, otherwise None.
81
+ :rtype: Context | None
82
+
83
+ :Example:
84
+
85
+ >>> metadata = {"traceparent": "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01"}
86
+ >>> context = parse_carrier_from_metadata(metadata)
87
+ """
88
+
89
+ if not metadata:
90
+ return None
91
+
92
+ traceparent = metadata.get(OTEL_TRACEPARENT_KEY)
93
+
94
+ if not traceparent:
95
+ return None
96
+
97
+ return TraceContextTextMapPropagator().extract({OTEL_TRACEPARENT_KEY: traceparent})
98
+
99
+
100
+ def inject_traceparent_into_metadata(
101
+ metadata: dict[str, str], traceparent: str | None = None
102
+ ) -> dict[str, str]:
103
+ """
104
+ Injects OpenTelemetry `traceparent` into a metadata dictionary.
105
+
106
+ Takes a metadata dictionary and an optional `traceparent` string,
107
+ returning a new metadata dictionary with the `traceparent` added under the
108
+ `OTEL_TRACEPARENT_KEY`. If no `traceparent` is provided, it attempts to create one.
80
109
 
81
- def inject_traceparent_into_metadata(
82
- self, metadata: dict[str, str], traceparent: str | None
83
- ) -> dict[str, str]:
84
- if traceparent:
85
- metadata[self.OTEL_TRACEPARENT_KEY] = traceparent
110
+ :param metadata: The metadata dictionary to inject the `traceparent` into.
111
+ :type metadata: dict[str, str]
112
+ :param traceparent: The `traceparent` string to inject. If None, attempts to use
113
+ the current span.
114
+ :type traceparent: str | None, optional
115
+ :returns: A new metadata dictionary containing the original metadata plus
116
+ the injected `traceparent`, if one was available or could be created.
117
+ :rtype: dict[str, str]
86
118
 
119
+ :Example:
120
+
121
+ >>> metadata = {"key": "value"}
122
+ >>> new_metadata = inject_traceparent(metadata, "00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01")
123
+ >>> print(new_metadata)
124
+ {"key": "value", "traceparent": "00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01"}
125
+ """
126
+
127
+ if not traceparent:
128
+ traceparent = create_traceparent()
129
+
130
+ if not traceparent:
87
131
  return metadata
88
132
 
133
+ return {
134
+ **metadata,
135
+ OTEL_TRACEPARENT_KEY: traceparent,
136
+ }
137
+
138
+
139
+ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
140
+ def __init__(
141
+ self,
142
+ tracer_provider: TracerProvider | None = None,
143
+ meter_provider: MeterProvider | None = None,
144
+ ):
145
+ """
146
+ Hatchet OpenTelemetry instrumentor.
147
+
148
+ The instrumentor provides an OpenTelemetry integration for Hatchet by setting up
149
+ tracing and metrics collection.
150
+
151
+ :param tracer_provider: TracerProvider | None: The OpenTelemetry TracerProvider to use.
152
+ If not provided, the global tracer provider will be used.
153
+ :param meter_provider: MeterProvider | None: The OpenTelemetry MeterProvider to use.
154
+ If not provided, a no-op meter provider will be used.
155
+ """
156
+
157
+ self.tracer_provider = tracer_provider or get_tracer_provider()
158
+ self.meter_provider = meter_provider or NoOpMeterProvider()
159
+
160
+ super().__init__()
161
+
89
162
  def instrumentation_dependencies(self) -> Collection[str]:
90
163
  return tuple()
91
164
 
@@ -154,7 +227,7 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
154
227
  kwargs: Any,
155
228
  ) -> Exception | None:
156
229
  action = args[0]
157
- traceparent = self.parse_carrier_from_metadata(action.additional_metadata)
230
+ traceparent = parse_carrier_from_metadata(action.additional_metadata)
158
231
 
159
232
  with self._tracer.start_as_current_span(
160
233
  "hatchet.start_step_run",
@@ -92,7 +92,7 @@ class EventLoopThread:
92
92
  self.loop = asyncio.new_event_loop()
93
93
  self.thread = Thread(target=self.run_loop_in_thread, args=(self.loop,))
94
94
 
95
- def __enter__(self) -> asyncio.AbstractEventLoop:
95
+ def __enter__(self, *a, **kw) -> asyncio.AbstractEventLoop:
96
96
  """
97
97
  Starts the thread running the event loop when entering the context.
98
98
 
@@ -102,7 +102,7 @@ class EventLoopThread:
102
102
  self.thread.start()
103
103
  return self.loop
104
104
 
105
- def __exit__(self) -> None:
105
+ def __exit__(self, *a, **kw) -> None:
106
106
  """
107
107
  Stops the event loop and joins the thread when exiting the context.
108
108
  """
@@ -8,12 +8,14 @@ from typing import Any, List, Mapping, Optional
8
8
 
9
9
  import grpc
10
10
 
11
+ from hatchet_sdk.client import Client, new_client_raw
11
12
  from hatchet_sdk.clients.dispatcher.action_listener import Action
12
13
  from hatchet_sdk.clients.dispatcher.dispatcher import (
13
14
  ActionListener,
14
15
  GetActionListenerRequest,
15
16
  new_dispatcher,
16
17
  )
18
+ from hatchet_sdk.clients.rest.models.update_worker_request import UpdateWorkerRequest
17
19
  from hatchet_sdk.contracts.dispatcher_pb2 import (
18
20
  GROUP_KEY_EVENT_TYPE_STARTED,
19
21
  STEP_EVENT_TYPE_STARTED,
@@ -41,10 +43,6 @@ BLOCKED_THREAD_WARNING = (
41
43
  )
42
44
 
43
45
 
44
- def noop_handler():
45
- pass
46
-
47
-
48
46
  @dataclass
49
47
  class WorkerActionListenerProcess:
50
48
  name: str
@@ -70,9 +68,15 @@ class WorkerActionListenerProcess:
70
68
  if self.debug:
71
69
  logger.setLevel(logging.DEBUG)
72
70
 
71
+ self.client = new_client_raw(self.config, self.debug)
72
+
73
73
  loop = asyncio.get_event_loop()
74
- loop.add_signal_handler(signal.SIGINT, noop_handler)
75
- loop.add_signal_handler(signal.SIGTERM, noop_handler)
74
+ loop.add_signal_handler(
75
+ signal.SIGINT, lambda: asyncio.create_task(self.pause_task_assignment())
76
+ )
77
+ loop.add_signal_handler(
78
+ signal.SIGTERM, lambda: asyncio.create_task(self.pause_task_assignment())
79
+ )
76
80
  loop.add_signal_handler(
77
81
  signal.SIGQUIT, lambda: asyncio.create_task(self.exit_gracefully())
78
82
  )
@@ -249,7 +253,15 @@ class WorkerActionListenerProcess:
249
253
 
250
254
  self.event_queue.put(STOP_LOOP)
251
255
 
256
+ async def pause_task_assignment(self) -> None:
257
+ await self.client.rest.aio.worker_api.worker_update(
258
+ worker=self.listener.worker_id,
259
+ update_worker_request=UpdateWorkerRequest(isPaused=True),
260
+ )
261
+
252
262
  async def exit_gracefully(self, skip_unregister=False):
263
+ await self.pause_task_assignment()
264
+
253
265
  if self.killing:
254
266
  return
255
267
 
@@ -3,12 +3,13 @@ import contextvars
3
3
  import ctypes
4
4
  import functools
5
5
  import json
6
+ import time
6
7
  import traceback
7
8
  from concurrent.futures import ThreadPoolExecutor
8
9
  from enum import Enum
9
10
  from multiprocessing import Queue
10
11
  from threading import Thread, current_thread
11
- from typing import Any, Callable, Dict, Literal, Type, TypeVar, cast, overload
12
+ from typing import Any, Callable, Dict, cast
12
13
 
13
14
  from pydantic import BaseModel
14
15
 
@@ -421,6 +422,11 @@ class Runner:
421
422
 
422
423
  # check if thread is still running, if so, print a warning
423
424
  if run_id in self.threads:
425
+ thread = self.threads.get(run_id)
426
+ if thread and self.client.config.enable_force_kill_sync_threads:
427
+ self.force_kill_thread(thread)
428
+ await asyncio.sleep(1)
429
+
424
430
  logger.warning(
425
431
  f"Thread {self.threads[run_id].ident} with run id {run_id} is still running after cancellation. This could cause the thread pool to get blocked and prevent new tasks from running."
426
432
  )
@@ -32,16 +32,18 @@ class WorkflowRunRef:
32
32
  return self.workflow_listener.result(self.workflow_run_id)
33
33
 
34
34
  def sync_result(self) -> dict:
35
+ coro = self.workflow_listener.result(self.workflow_run_id)
35
36
  loop = get_active_event_loop()
37
+
36
38
  if loop is None:
37
- with EventLoopThread() as loop:
38
- coro = self.workflow_listener.result(self.workflow_run_id)
39
- future = asyncio.run_coroutine_threadsafe(coro, loop)
40
- return future.result()
39
+ loop = asyncio.new_event_loop()
40
+ asyncio.set_event_loop(loop)
41
+ try:
42
+ return loop.run_until_complete(coro)
43
+ finally:
44
+ asyncio.set_event_loop(None)
41
45
  else:
42
- coro = self.workflow_listener.result(self.workflow_run_id)
43
- future = asyncio.run_coroutine_threadsafe(coro, loop)
44
- return future.result()
46
+ return loop.run_until_complete(coro)
45
47
 
46
48
 
47
49
  T = TypeVar("T")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hatchet-sdk
3
- Version: 0.46.1
3
+ Version: 0.47.1
4
4
  Summary:
5
5
  Author: Alexander Belanger
6
6
  Author-email: alexander@hatchet.run
@@ -13,7 +13,7 @@ Provides-Extra: otel
13
13
  Requires-Dist: aiohttp (>=3.10.5,<4.0.0)
14
14
  Requires-Dist: aiohttp-retry (>=2.8.3,<3.0.0)
15
15
  Requires-Dist: aiostream (>=0.5.2,<0.6.0)
16
- Requires-Dist: cel-python (>=0.1.5,<0.2.0)
16
+ Requires-Dist: cel-python (>=0.2.0,<0.3.0)
17
17
  Requires-Dist: grpcio (>=1.64.1,!=1.68.*) ; python_version < "3.13"
18
18
  Requires-Dist: grpcio (>=1.69.0) ; python_version >= "3.13"
19
19
  Requires-Dist: grpcio-tools (>=1.64.1,!=1.68.*) ; python_version < "3.13"
@@ -186,12 +186,12 @@ hatchet_sdk/clients/rest/models/workflow_version_meta.py,sha256=TW4R7bAuYAg_LraN
186
186
  hatchet_sdk/clients/rest/models/workflow_workers_count.py,sha256=qhzqfvjjIDyARkiiLGluMIqEmqO-diHTsjlu0Doi0yg,2875
187
187
  hatchet_sdk/clients/rest/rest.py,sha256=G83F1k4g_ePzvXW95rApzvaRDQPcaxrj-JmZyq1LvGw,6606
188
188
  hatchet_sdk/clients/rest/tenacity_utils.py,sha256=gy500kHXQ-4ZrZH1biHGmavhfh0NarCJcaIfW-A9Qd0,1051
189
- hatchet_sdk/clients/rest_client.py,sha256=jEu9KNTaGgy64bqIIMV6r2st3EqAsyr5Vz77NipJtjU,21867
189
+ hatchet_sdk/clients/rest_client.py,sha256=Jp4qRRsQdXzrWm3Fca6X2k6nMvECTQ87ux2jGyMgqfQ,22148
190
190
  hatchet_sdk/clients/run_event_listener.py,sha256=51WTg52_aISgYPOFPHJ21rb4IO6aEd7Ugp7FCf4HnfM,10184
191
- hatchet_sdk/clients/workflow_listener.py,sha256=Q_WJcGlZNHJGSpxzDac9wELjgxhP0vLaNTXRy_xnxc8,9466
191
+ hatchet_sdk/clients/workflow_listener.py,sha256=HG7qwPaeUmsChYMhvj95IBZ6cuBDt3BDvsSi1h5PXcg,9596
192
192
  hatchet_sdk/connection.py,sha256=593aUGAj7Ouf00lcVwx_pmhdQ9NOC5ANT1Jrf8nwkHs,2165
193
193
  hatchet_sdk/context/__init__.py,sha256=Pl_seJ_SJpW34BBZp4KixuZ8GiRK9sJFfegf9u3m7zk,29
194
- hatchet_sdk/context/context.py,sha256=sRAih-dBqxkmQUxgo_IpQP2YIJJinzJ2C09RP-EM94o,13910
194
+ hatchet_sdk/context/context.py,sha256=qQfeynqKOZGUguy1_zwIsv-kgCsIT4x-32-ki-IEdO0,15336
195
195
  hatchet_sdk/context/worker_context.py,sha256=OVcEWvdT_Kpd0nlg61VAPUgIPSFzSLs0aSrXWj-1GX4,974
196
196
  hatchet_sdk/contracts/dispatcher_pb2.py,sha256=B35F3XQQkk05UA84nuZOIFtiydgPbB8gA5FhvNvSqb0,14414
197
197
  hatchet_sdk/contracts/dispatcher_pb2.pyi,sha256=JLtc615N9vNDRtQoUVynclPBbgIsRhbikcrT8b7Z-TM,18336
@@ -206,15 +206,15 @@ hatchet_sdk/features/cron.py,sha256=4lKMH0MqiN8cHJk2jhF0Ueqs6z5ozwJzlOeSeaWqvO0,
206
206
  hatchet_sdk/features/scheduled.py,sha256=YhEbNWl8dWOH61rXVjAyu8iG1BZqpSkD4kgaxkKIHgY,9504
207
207
  hatchet_sdk/hatchet.py,sha256=L6O9cTeZtGqFUxO5H8knXKdiej0iAequ1Wl7vSNO9FY,10027
208
208
  hatchet_sdk/labels.py,sha256=Axfp1yUNowzE9mL8AQA1ADqwOaNmq3QP_45wb1Ed1aI,272
209
- hatchet_sdk/loader.py,sha256=m8NFnDU-NMZQ_ay2LzRI6QThix4Dv2zKCXY9GcAMlvo,8203
209
+ hatchet_sdk/loader.py,sha256=esm-xksKKxXpkaBTYvXXKnq_DPJLoHmtSqGTiaFwSYw,8668
210
210
  hatchet_sdk/logger.py,sha256=5uOr52T4mImSQm1QvWT8HvZFK5WfPNh3Y1cBQZRFgUQ,333
211
211
  hatchet_sdk/metadata.py,sha256=M_Cb-CXRKitzVMQHeaHUtbY28ET__fAbyGX1YKaeN4I,80
212
- hatchet_sdk/opentelemetry/instrumentor.py,sha256=FKYxilEjZJvQg4BJFLveV-gg2r7-30S8_BP6hl5tIfA,11173
212
+ hatchet_sdk/opentelemetry/instrumentor.py,sha256=0GHhefnrFB3UHi_vtnaI7aWBp7k3ams1_H5AnXhGIlw,14152
213
213
  hatchet_sdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
214
214
  hatchet_sdk/rate_limit.py,sha256=IIzpe65i-518t9kQcZVEykDQ2VY8sOw2F7qlQ4wlAjw,4421
215
215
  hatchet_sdk/semver.py,sha256=PrgBL0TnyXl3p_OK1iSMk9Gpujfh5asQpJ4DHJLCW2k,998
216
216
  hatchet_sdk/token.py,sha256=Ap3jnbaPAL10F2G_D71wj7OpBcvrI3RuE0keqXx1lAE,698
217
- hatchet_sdk/utils/aio_utils.py,sha256=D92Z1lc84u8f1mVE3XzOv9XtRKfCIvwfdfL-Bo5fJA8,4336
217
+ hatchet_sdk/utils/aio_utils.py,sha256=QP8D2WstVR-MAYV5I6KvrVGBOXH3_4oLrQ8wv0_c4K4,4356
218
218
  hatchet_sdk/utils/backoff.py,sha256=6B5Rb5nLKw_TqqgpJMYjIBV1PTTtbOMRZCveisVhg_I,353
219
219
  hatchet_sdk/utils/serialization.py,sha256=P2Uq0yxg-Cea5Lmf6IOh2r7W17MNF1Hv2qxSny6BUk8,451
220
220
  hatchet_sdk/utils/types.py,sha256=qhx1OoeXh77AN6s4SMaGpB3zK3hPm7ocv-23JFa6_wE,191
@@ -223,15 +223,15 @@ hatchet_sdk/v2/callable.py,sha256=DqBc3VS7WAk5qU9Ef1HnyAzdgxUq1hDzj-CwQYQrvcA,69
223
223
  hatchet_sdk/v2/concurrency.py,sha256=mRsbCj3G2kwkAJjHOg67y226mYdXVmd0uka3ypdRKUQ,1434
224
224
  hatchet_sdk/v2/hatchet.py,sha256=mQZoloWuRCIEwDwXeTaJL2kBjgs9YLHy7UPWxBD5070,7282
225
225
  hatchet_sdk/worker/__init__.py,sha256=1Ze1seDuXx5yD1IfHmqGFgK5qrRazVW4ZcDVGl-Pddw,61
226
- hatchet_sdk/worker/action_listener_process.py,sha256=tmlzDgyHWxGl8fJWE9NKqjvhqpGi9SMmOh5dFyiVL-Q,9979
226
+ hatchet_sdk/worker/action_listener_process.py,sha256=QFGxxsSGdAp4PzcENcGRHNmhDHgfuP3omt9-kyolEyk,10566
227
227
  hatchet_sdk/worker/runner/run_loop_manager.py,sha256=nV7fhNxJKCcrBm0ci118aszF_7AxenBkOTIe1UsBEt4,3490
228
- hatchet_sdk/worker/runner/runner.py,sha256=ycua4H86hUi1Qds-L-BrB0rN2WyLmYoOlVGBsZLoihU,16519
228
+ hatchet_sdk/worker/runner/runner.py,sha256=2aT5rumcisfzliI9ghKelA7LuZir-4tLzYk_N8a6S9c,16723
229
229
  hatchet_sdk/worker/runner/utils/capture_logs.py,sha256=s_BGxeykelVbusx6u31EPx3vv9c2BHkuBnYcaLW680E,2381
230
230
  hatchet_sdk/worker/runner/utils/error_with_traceback.py,sha256=Iih_s8JNqrinXETFJ3ZS88EhaTekfM6m5fqIP7QWoIM,181
231
231
  hatchet_sdk/worker/worker.py,sha256=7UPm3qTzNYSSm9QTNX6zBBMJqVA6nKFeCbAdqLLjUBs,13007
232
232
  hatchet_sdk/workflow.py,sha256=XRj5jcCQSvPQMXxBipf-ZlARua2E8Z9igRzGcQ5alkI,9375
233
- hatchet_sdk/workflow_run.py,sha256=BwK5cefvXXvyQ1Ednj_7LeejMwQJqWnvUC_FTBmJNxk,1805
234
- hatchet_sdk-0.46.1.dist-info/METADATA,sha256=FjCNXSY41R0blWhtaATiUVpNUQIo73rfXBCvrSD9XUY,1826
235
- hatchet_sdk-0.46.1.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
236
- hatchet_sdk-0.46.1.dist-info/entry_points.txt,sha256=Rxdx7OJUxSHsSjYLUI_JcInpYIFl9vPaMvbl5TGMzdA,1079
237
- hatchet_sdk-0.46.1.dist-info/RECORD,,
233
+ hatchet_sdk/workflow_run.py,sha256=-RUhI48NOr1Z8ha9entmVu3yVlHBbr0LWZKVHUfPVzY,1743
234
+ hatchet_sdk-0.47.1.dist-info/METADATA,sha256=47a2WMiEGAa1OL7_iL0s2PiOwP2uxMahlkZ0KRLfBv0,1826
235
+ hatchet_sdk-0.47.1.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
236
+ hatchet_sdk-0.47.1.dist-info/entry_points.txt,sha256=lyWn37JwpYymrqp6Awq-Var43vhDG6rfauPS6KQa8JY,1124
237
+ hatchet_sdk-0.47.1.dist-info/RECORD,,
@@ -12,6 +12,7 @@ dynamic_rate_limit=examples.rate_limit.dynamic:main
12
12
  events=examples.events.worker:main
13
13
  existing_loop=examples.worker_existing_loop.worker:main
14
14
  fanout=examples.fanout.worker:main
15
+ fanout_sync=examples.fanout_sync.worker:main
15
16
  logger=examples.logger.worker:main
16
17
  manual_trigger=examples.manual_trigger.worker:main
17
18
  on_failure=examples.on_failure.worker:main