hatchet-sdk 1.2.6__py3-none-any.whl → 1.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

Files changed (60) hide show
  1. hatchet_sdk/__init__.py +7 -5
  2. hatchet_sdk/client.py +14 -6
  3. hatchet_sdk/clients/admin.py +57 -15
  4. hatchet_sdk/clients/dispatcher/action_listener.py +2 -2
  5. hatchet_sdk/clients/dispatcher/dispatcher.py +20 -7
  6. hatchet_sdk/clients/event_ts.py +25 -5
  7. hatchet_sdk/clients/listeners/durable_event_listener.py +125 -0
  8. hatchet_sdk/clients/listeners/pooled_listener.py +255 -0
  9. hatchet_sdk/clients/listeners/workflow_listener.py +62 -0
  10. hatchet_sdk/clients/rest/api/api_token_api.py +24 -24
  11. hatchet_sdk/clients/rest/api/default_api.py +64 -64
  12. hatchet_sdk/clients/rest/api/event_api.py +64 -64
  13. hatchet_sdk/clients/rest/api/github_api.py +8 -8
  14. hatchet_sdk/clients/rest/api/healthcheck_api.py +16 -16
  15. hatchet_sdk/clients/rest/api/log_api.py +16 -16
  16. hatchet_sdk/clients/rest/api/metadata_api.py +24 -24
  17. hatchet_sdk/clients/rest/api/rate_limits_api.py +8 -8
  18. hatchet_sdk/clients/rest/api/slack_api.py +16 -16
  19. hatchet_sdk/clients/rest/api/sns_api.py +24 -24
  20. hatchet_sdk/clients/rest/api/step_run_api.py +56 -56
  21. hatchet_sdk/clients/rest/api/task_api.py +56 -56
  22. hatchet_sdk/clients/rest/api/tenant_api.py +128 -128
  23. hatchet_sdk/clients/rest/api/user_api.py +96 -96
  24. hatchet_sdk/clients/rest/api/worker_api.py +24 -24
  25. hatchet_sdk/clients/rest/api/workflow_api.py +144 -144
  26. hatchet_sdk/clients/rest/api/workflow_run_api.py +48 -48
  27. hatchet_sdk/clients/rest/api/workflow_runs_api.py +40 -40
  28. hatchet_sdk/clients/rest/api_client.py +5 -8
  29. hatchet_sdk/clients/rest/configuration.py +7 -3
  30. hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py +2 -2
  31. hatchet_sdk/clients/rest/models/v1_task_summary.py +5 -0
  32. hatchet_sdk/clients/rest/models/v1_workflow_run.py +5 -0
  33. hatchet_sdk/clients/rest/rest.py +160 -111
  34. hatchet_sdk/clients/v1/api_client.py +2 -2
  35. hatchet_sdk/context/context.py +22 -21
  36. hatchet_sdk/features/cron.py +41 -40
  37. hatchet_sdk/features/logs.py +7 -6
  38. hatchet_sdk/features/metrics.py +19 -18
  39. hatchet_sdk/features/runs.py +88 -68
  40. hatchet_sdk/features/scheduled.py +42 -42
  41. hatchet_sdk/features/workers.py +17 -16
  42. hatchet_sdk/features/workflows.py +15 -14
  43. hatchet_sdk/hatchet.py +1 -1
  44. hatchet_sdk/runnables/standalone.py +12 -9
  45. hatchet_sdk/runnables/task.py +66 -2
  46. hatchet_sdk/runnables/types.py +8 -0
  47. hatchet_sdk/runnables/workflow.py +26 -125
  48. hatchet_sdk/waits.py +8 -8
  49. hatchet_sdk/worker/runner/run_loop_manager.py +4 -4
  50. hatchet_sdk/worker/runner/runner.py +22 -11
  51. hatchet_sdk/worker/worker.py +29 -25
  52. hatchet_sdk/workflow_run.py +58 -9
  53. {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.1.dist-info}/METADATA +1 -1
  54. {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.1.dist-info}/RECORD +57 -57
  55. hatchet_sdk/clients/durable_event_listener.py +0 -329
  56. hatchet_sdk/clients/workflow_listener.py +0 -288
  57. hatchet_sdk/utils/aio.py +0 -43
  58. /hatchet_sdk/clients/{run_event_listener.py → listeners/run_event_listener.py} +0 -0
  59. {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.1.dist-info}/WHEEL +0 -0
  60. {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.1.dist-info}/entry_points.txt +0 -0
@@ -1,288 +0,0 @@
1
- import asyncio
2
- import json
3
- from collections.abc import AsyncIterator
4
- from typing import Any, cast
5
-
6
- import grpc
7
- import grpc.aio
8
- from grpc._cython import cygrpc # type: ignore[attr-defined]
9
-
10
- from hatchet_sdk.clients.event_ts import ThreadSafeEvent, read_with_interrupt
11
- from hatchet_sdk.config import ClientConfig
12
- from hatchet_sdk.connection import new_conn
13
- from hatchet_sdk.contracts.dispatcher_pb2 import (
14
- SubscribeToWorkflowRunsRequest,
15
- WorkflowRunEvent,
16
- )
17
- from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub
18
- from hatchet_sdk.logger import logger
19
- from hatchet_sdk.metadata import get_metadata
20
-
21
- DEFAULT_WORKFLOW_LISTENER_RETRY_INTERVAL = 3 # seconds
22
- DEFAULT_WORKFLOW_LISTENER_RETRY_COUNT = 5
23
- DEFAULT_WORKFLOW_LISTENER_INTERRUPT_INTERVAL = 1800 # 30 minutes
24
-
25
- DEDUPE_MESSAGE = "DUPLICATE_WORKFLOW_RUN"
26
-
27
-
28
- class _Subscription:
29
- def __init__(self, id: int, workflow_run_id: str):
30
- self.id = id
31
- self.workflow_run_id = workflow_run_id
32
- self.queue: asyncio.Queue[WorkflowRunEvent | None] = asyncio.Queue()
33
-
34
- async def __aiter__(self) -> "_Subscription":
35
- return self
36
-
37
- async def __anext__(self) -> WorkflowRunEvent | None:
38
- return await self.queue.get()
39
-
40
- async def get(self) -> WorkflowRunEvent:
41
- event = await self.queue.get()
42
-
43
- if event is None:
44
- raise StopAsyncIteration
45
-
46
- return event
47
-
48
- async def put(self, item: WorkflowRunEvent) -> None:
49
- await self.queue.put(item)
50
-
51
- async def close(self) -> None:
52
- await self.queue.put(None)
53
-
54
-
55
- class PooledWorkflowRunListener:
56
- def __init__(self, config: ClientConfig):
57
- self.token = config.token
58
- self.config = config
59
-
60
- # list of all active subscriptions, mapping from a subscription id to a workflow run id
61
- self.subscriptions_to_workflows: dict[int, str] = {}
62
-
63
- # list of workflow run ids mapped to an array of subscription ids
64
- self.workflows_to_subscriptions: dict[str, list[int]] = {}
65
-
66
- self.subscription_counter: int = 0
67
- self.subscription_counter_lock: asyncio.Lock = asyncio.Lock()
68
-
69
- self.requests: asyncio.Queue[SubscribeToWorkflowRunsRequest | int] = (
70
- asyncio.Queue()
71
- )
72
-
73
- self.listener: (
74
- grpc.aio.UnaryStreamCall[SubscribeToWorkflowRunsRequest, WorkflowRunEvent]
75
- | None
76
- ) = None
77
- self.listener_task: asyncio.Task[None] | None = None
78
-
79
- self.curr_requester: int = 0
80
-
81
- # events have keys of the format workflow_run_id + subscription_id
82
- self.events: dict[int, _Subscription] = {}
83
-
84
- self.interrupter: asyncio.Task[None] | None = None
85
-
86
- ## IMPORTANT: This needs to be created lazily so we don't require
87
- ## an event loop to instantiate the client.
88
- self.client: DispatcherStub | None = None
89
-
90
- async def _interrupter(self) -> None:
91
- """
92
- _interrupter runs in a separate thread and interrupts the listener according to a configurable duration.
93
- """
94
- await asyncio.sleep(DEFAULT_WORKFLOW_LISTENER_INTERRUPT_INTERVAL)
95
-
96
- if self.interrupt is not None:
97
- self.interrupt.set()
98
-
99
- async def _init_producer(self) -> None:
100
- try:
101
- if not self.listener:
102
- while True:
103
- try:
104
- self.listener = await self._retry_subscribe()
105
-
106
- logger.debug("Workflow run listener connected.")
107
-
108
- # spawn an interrupter task
109
- if self.interrupter is not None and not self.interrupter.done():
110
- self.interrupter.cancel()
111
-
112
- self.interrupter = asyncio.create_task(self._interrupter())
113
-
114
- while True:
115
- self.interrupt = ThreadSafeEvent()
116
- if self.listener is None:
117
- continue
118
-
119
- t = asyncio.create_task(
120
- read_with_interrupt(self.listener, self.interrupt)
121
- )
122
- await self.interrupt.wait()
123
-
124
- if not t.done():
125
- # print a warning
126
- logger.warning(
127
- "Interrupted read_with_interrupt task of workflow run listener"
128
- )
129
-
130
- t.cancel()
131
- self.listener.cancel()
132
-
133
- await asyncio.sleep(
134
- DEFAULT_WORKFLOW_LISTENER_RETRY_INTERVAL
135
- )
136
- break
137
-
138
- workflow_event: WorkflowRunEvent = t.result()
139
-
140
- if workflow_event is cygrpc.EOF:
141
- break
142
-
143
- # get a list of subscriptions for this workflow
144
- subscriptions = self.workflows_to_subscriptions.get(
145
- workflow_event.workflowRunId, []
146
- )
147
-
148
- for subscription_id in subscriptions:
149
- await self.events[subscription_id].put(workflow_event)
150
-
151
- except grpc.RpcError as e:
152
- logger.debug(f"grpc error in workflow run listener: {e}")
153
- await asyncio.sleep(DEFAULT_WORKFLOW_LISTENER_RETRY_INTERVAL)
154
- continue
155
-
156
- except Exception as e:
157
- logger.error(f"Error in workflow run listener: {e}")
158
-
159
- self.listener = None
160
-
161
- # close all subscriptions
162
- for subscription_id in self.events:
163
- await self.events[subscription_id].close()
164
-
165
- raise e
166
-
167
- async def _request(self) -> AsyncIterator[SubscribeToWorkflowRunsRequest]:
168
- self.curr_requester = self.curr_requester + 1
169
-
170
- # replay all existing subscriptions
171
- workflow_run_set = set(self.subscriptions_to_workflows.values())
172
-
173
- for workflow_run_id in workflow_run_set:
174
- yield SubscribeToWorkflowRunsRequest(
175
- workflowRunId=workflow_run_id,
176
- )
177
-
178
- while True:
179
- request = await self.requests.get()
180
-
181
- # if the request is an int which matches the current requester, then we should stop
182
- if request == self.curr_requester:
183
- break
184
-
185
- # if we've gotten an int that doesn't match the current requester, then we should ignore it
186
- if isinstance(request, int):
187
- continue
188
-
189
- yield request
190
- self.requests.task_done()
191
-
192
- def cleanup_subscription(self, subscription_id: int) -> None:
193
- workflow_run_id = self.subscriptions_to_workflows[subscription_id]
194
-
195
- if workflow_run_id in self.workflows_to_subscriptions:
196
- self.workflows_to_subscriptions[workflow_run_id].remove(subscription_id)
197
-
198
- del self.subscriptions_to_workflows[subscription_id]
199
- del self.events[subscription_id]
200
-
201
- async def subscribe(self, workflow_run_id: str) -> WorkflowRunEvent:
202
- subscription_id: int | None = None
203
-
204
- try:
205
- # create a new subscription id, place a mutex on the counter
206
- await self.subscription_counter_lock.acquire()
207
- self.subscription_counter += 1
208
- subscription_id = self.subscription_counter
209
- self.subscription_counter_lock.release()
210
-
211
- self.subscriptions_to_workflows[subscription_id] = workflow_run_id
212
-
213
- if workflow_run_id not in self.workflows_to_subscriptions:
214
- self.workflows_to_subscriptions[workflow_run_id] = [subscription_id]
215
- else:
216
- self.workflows_to_subscriptions[workflow_run_id].append(subscription_id)
217
-
218
- self.events[subscription_id] = _Subscription(
219
- subscription_id, workflow_run_id
220
- )
221
-
222
- await self.requests.put(
223
- SubscribeToWorkflowRunsRequest(
224
- workflowRunId=workflow_run_id,
225
- )
226
- )
227
-
228
- if not self.listener_task or self.listener_task.done():
229
- self.listener_task = asyncio.create_task(self._init_producer())
230
-
231
- return await self.events[subscription_id].get()
232
- except asyncio.CancelledError:
233
- raise
234
- finally:
235
- if subscription_id:
236
- self.cleanup_subscription(subscription_id)
237
-
238
- async def aio_result(self, workflow_run_id: str) -> dict[str, Any]:
239
- from hatchet_sdk.clients.admin import DedupeViolationErr
240
-
241
- event = await self.subscribe(workflow_run_id)
242
- errors = [result.error for result in event.results if result.error]
243
-
244
- if errors:
245
- if DEDUPE_MESSAGE in errors[0]:
246
- raise DedupeViolationErr(errors[0])
247
- else:
248
- raise Exception(f"Workflow Errors: {errors}")
249
-
250
- return {
251
- result.stepReadableId: json.loads(result.output)
252
- for result in event.results
253
- if result.output
254
- }
255
-
256
- async def _retry_subscribe(
257
- self,
258
- ) -> grpc.aio.UnaryStreamCall[SubscribeToWorkflowRunsRequest, WorkflowRunEvent]:
259
- retries = 0
260
- if self.client is None:
261
- conn = new_conn(self.config, True)
262
- self.client = DispatcherStub(conn)
263
-
264
- while retries < DEFAULT_WORKFLOW_LISTENER_RETRY_COUNT:
265
- try:
266
- if retries > 0:
267
- await asyncio.sleep(DEFAULT_WORKFLOW_LISTENER_RETRY_INTERVAL)
268
-
269
- # signal previous async iterator to stop
270
- if self.curr_requester != 0:
271
- self.requests.put_nowait(self.curr_requester)
272
-
273
- return cast(
274
- grpc.aio.UnaryStreamCall[
275
- SubscribeToWorkflowRunsRequest, WorkflowRunEvent
276
- ],
277
- self.client.SubscribeToWorkflowRuns(
278
- self._request(), # type: ignore[arg-type]
279
- metadata=get_metadata(self.token),
280
- ),
281
- )
282
- except grpc.RpcError as e:
283
- if e.code() == grpc.StatusCode.UNAVAILABLE:
284
- retries = retries + 1
285
- else:
286
- raise ValueError(f"gRPC error: {e}")
287
-
288
- raise ValueError("Failed to connect to workflow run listener")
hatchet_sdk/utils/aio.py DELETED
@@ -1,43 +0,0 @@
1
- import asyncio
2
- from concurrent.futures import ThreadPoolExecutor
3
- from typing import Callable, Coroutine, ParamSpec, TypeVar
4
-
5
- P = ParamSpec("P")
6
- R = TypeVar("R")
7
- Y = TypeVar("Y")
8
- S = TypeVar("S")
9
-
10
-
11
- def _run_async_function_do_not_use_directly(
12
- async_func: Callable[P, Coroutine[Y, S, R]],
13
- *args: P.args,
14
- **kwargs: P.kwargs,
15
- ) -> R:
16
- loop = asyncio.new_event_loop()
17
- asyncio.set_event_loop(loop)
18
- try:
19
- return loop.run_until_complete(async_func(*args, **kwargs))
20
- finally:
21
- loop.close()
22
-
23
-
24
- def run_async_from_sync(
25
- async_func: Callable[P, Coroutine[Y, S, R]],
26
- *args: P.args,
27
- **kwargs: P.kwargs,
28
- ) -> R:
29
- try:
30
- loop = asyncio.get_event_loop()
31
- except RuntimeError:
32
- loop = None
33
-
34
- if loop and loop.is_running():
35
- return loop.run_until_complete(async_func(*args, **kwargs))
36
- else:
37
- with ThreadPoolExecutor() as executor:
38
- future = executor.submit(
39
- lambda: _run_async_function_do_not_use_directly(
40
- async_func, *args, **kwargs
41
- )
42
- )
43
- return future.result()