hatchet-sdk 1.6.3__py3-none-any.whl → 1.6.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hatchet-sdk might be problematic. Click here for more details.
- hatchet_sdk/clients/dispatcher/action_listener.py +15 -0
- hatchet_sdk/clients/events.py +2 -0
- hatchet_sdk/context/context.py +5 -2
- hatchet_sdk/opentelemetry/instrumentor.py +4 -4
- hatchet_sdk/runnables/workflow.py +3 -3
- hatchet_sdk/worker/runner/runner.py +37 -43
- {hatchet_sdk-1.6.3.dist-info → hatchet_sdk-1.6.5.dist-info}/METADATA +1 -1
- {hatchet_sdk-1.6.3.dist-info → hatchet_sdk-1.6.5.dist-info}/RECORD +10 -10
- {hatchet_sdk-1.6.3.dist-info → hatchet_sdk-1.6.5.dist-info}/WHEEL +0 -0
- {hatchet_sdk-1.6.3.dist-info → hatchet_sdk-1.6.5.dist-info}/entry_points.txt +0 -0
|
@@ -88,6 +88,9 @@ class ActionType(str, Enum):
|
|
|
88
88
|
START_GET_GROUP_KEY = "START_GET_GROUP_KEY"
|
|
89
89
|
|
|
90
90
|
|
|
91
|
+
ActionKey = str
|
|
92
|
+
|
|
93
|
+
|
|
91
94
|
class Action(BaseModel):
|
|
92
95
|
worker_id: str
|
|
93
96
|
tenant_id: str
|
|
@@ -141,6 +144,18 @@ class Action(BaseModel):
|
|
|
141
144
|
|
|
142
145
|
return {k: v for k, v in attrs.items() if v}
|
|
143
146
|
|
|
147
|
+
@property
|
|
148
|
+
def key(self) -> ActionKey:
|
|
149
|
+
"""
|
|
150
|
+
This key is used to uniquely identify a single step run by its id + retry count.
|
|
151
|
+
It's used when storing references to a task, a context, etc. in a dictionary so that
|
|
152
|
+
we can look up those items in the dictionary by a unique key.
|
|
153
|
+
"""
|
|
154
|
+
if self.action_type == ActionType.START_GET_GROUP_KEY:
|
|
155
|
+
return f"{self.get_group_key_run_id}/{self.retry_count}"
|
|
156
|
+
else:
|
|
157
|
+
return f"{self.step_run_id}/{self.retry_count}"
|
|
158
|
+
|
|
144
159
|
|
|
145
160
|
def parse_additional_metadata(additional_metadata: str) -> JSONSerializableMapping:
|
|
146
161
|
try:
|
hatchet_sdk/clients/events.py
CHANGED
|
@@ -149,6 +149,7 @@ class EventClient:
|
|
|
149
149
|
).events
|
|
150
150
|
)
|
|
151
151
|
|
|
152
|
+
@tenacity_retry
|
|
152
153
|
def log(self, message: str, step_run_id: str) -> None:
|
|
153
154
|
request = PutLogRequest(
|
|
154
155
|
stepRunId=step_run_id,
|
|
@@ -158,6 +159,7 @@ class EventClient:
|
|
|
158
159
|
|
|
159
160
|
self.client.PutLog(request, metadata=get_metadata(self.token))
|
|
160
161
|
|
|
162
|
+
@tenacity_retry
|
|
161
163
|
def stream(self, data: str | bytes, step_run_id: str) -> None:
|
|
162
164
|
if isinstance(data, str):
|
|
163
165
|
data_bytes = data.encode("utf-8")
|
hatchet_sdk/context/context.py
CHANGED
|
@@ -127,15 +127,18 @@ class Context:
|
|
|
127
127
|
def workflow_run_id(self) -> str:
|
|
128
128
|
return self.action.workflow_run_id
|
|
129
129
|
|
|
130
|
+
def _set_cancellation_flag(self) -> None:
|
|
131
|
+
self.exit_flag = True
|
|
132
|
+
|
|
130
133
|
def cancel(self) -> None:
|
|
131
134
|
logger.debug("cancelling step...")
|
|
132
135
|
self.runs_client.cancel(self.step_run_id)
|
|
133
|
-
self.
|
|
136
|
+
self._set_cancellation_flag()
|
|
134
137
|
|
|
135
138
|
async def aio_cancel(self) -> None:
|
|
136
139
|
logger.debug("cancelling step...")
|
|
137
140
|
await self.runs_client.aio_cancel(self.step_run_id)
|
|
138
|
-
self.
|
|
141
|
+
self._set_cancellation_flag()
|
|
139
142
|
|
|
140
143
|
# done returns true if the context has been cancelled
|
|
141
144
|
def done(self) -> bool:
|
|
@@ -267,17 +267,17 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
|
|
267
267
|
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
268
268
|
async def _wrap_handle_cancel_action(
|
|
269
269
|
self,
|
|
270
|
-
wrapped: Callable[[
|
|
270
|
+
wrapped: Callable[[Action], Coroutine[None, None, Exception | None]],
|
|
271
271
|
instance: Runner,
|
|
272
|
-
args: tuple[
|
|
272
|
+
args: tuple[Action],
|
|
273
273
|
kwargs: Any,
|
|
274
274
|
) -> Exception | None:
|
|
275
|
-
|
|
275
|
+
action = args[0]
|
|
276
276
|
|
|
277
277
|
with self._tracer.start_as_current_span(
|
|
278
278
|
"hatchet.cancel_step_run",
|
|
279
279
|
attributes={
|
|
280
|
-
"hatchet.step_run_id": step_run_id,
|
|
280
|
+
"hatchet.step_run_id": action.step_run_id,
|
|
281
281
|
},
|
|
282
282
|
):
|
|
283
283
|
return await wrapped(*args, **kwargs)
|
|
@@ -765,7 +765,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
765
765
|
"""
|
|
766
766
|
A decorator to transform a function into a Hatchet on-success task that runs as the last step in a workflow that had all upstream tasks succeed.
|
|
767
767
|
|
|
768
|
-
:param name: The name of the on-success task. If not specified, defaults to the name of the function being wrapped by the `
|
|
768
|
+
:param name: The name of the on-success task. If not specified, defaults to the name of the function being wrapped by the `on_success_task` decorator.
|
|
769
769
|
|
|
770
770
|
:param schedule_timeout: The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time.
|
|
771
771
|
|
|
@@ -803,8 +803,8 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
803
803
|
parents=[],
|
|
804
804
|
)
|
|
805
805
|
|
|
806
|
-
if self.
|
|
807
|
-
raise ValueError("Only one on-
|
|
806
|
+
if self._on_success_task:
|
|
807
|
+
raise ValueError("Only one on-success task is allowed")
|
|
808
808
|
|
|
809
809
|
self._on_success_task = task
|
|
810
810
|
|
|
@@ -14,7 +14,7 @@ from pydantic import BaseModel
|
|
|
14
14
|
|
|
15
15
|
from hatchet_sdk.client import Client
|
|
16
16
|
from hatchet_sdk.clients.admin import AdminClient
|
|
17
|
-
from hatchet_sdk.clients.dispatcher.action_listener import Action, ActionType
|
|
17
|
+
from hatchet_sdk.clients.dispatcher.action_listener import Action, ActionKey, ActionType
|
|
18
18
|
from hatchet_sdk.clients.dispatcher.dispatcher import DispatcherClient
|
|
19
19
|
from hatchet_sdk.clients.events import EventClient
|
|
20
20
|
from hatchet_sdk.clients.listeners.durable_event_listener import DurableEventListener
|
|
@@ -69,15 +69,15 @@ class Runner:
|
|
|
69
69
|
self.config = config
|
|
70
70
|
|
|
71
71
|
self.slots = slots
|
|
72
|
-
self.tasks: dict[
|
|
73
|
-
self.contexts: dict[
|
|
72
|
+
self.tasks: dict[ActionKey, asyncio.Task[Any]] = {} # Store run ids and futures
|
|
73
|
+
self.contexts: dict[ActionKey, Context] = {} # Store run ids and contexts
|
|
74
74
|
self.action_registry = action_registry
|
|
75
75
|
|
|
76
76
|
self.event_queue = event_queue
|
|
77
77
|
|
|
78
78
|
# The thread pool is used for synchronous functions which need to run concurrently
|
|
79
79
|
self.thread_pool = ThreadPoolExecutor(max_workers=slots)
|
|
80
|
-
self.threads: Dict[
|
|
80
|
+
self.threads: Dict[ActionKey, Thread] = {} # Store run ids and threads
|
|
81
81
|
|
|
82
82
|
self.killing = False
|
|
83
83
|
self.handle_kill = handle_kill
|
|
@@ -118,9 +118,9 @@ class Runner:
|
|
|
118
118
|
logger.info(log)
|
|
119
119
|
asyncio.create_task(self.handle_start_step_run(action))
|
|
120
120
|
case ActionType.CANCEL_STEP_RUN:
|
|
121
|
-
log = f"cancel: step run: {action.action_id}/{action.step_run_id}"
|
|
121
|
+
log = f"cancel: step run: {action.action_id}/{action.step_run_id}/{action.retry_count}"
|
|
122
122
|
logger.info(log)
|
|
123
|
-
asyncio.create_task(self.handle_cancel_action(action
|
|
123
|
+
asyncio.create_task(self.handle_cancel_action(action))
|
|
124
124
|
case ActionType.START_GET_GROUP_KEY:
|
|
125
125
|
log = f"run: get group key: {action.action_id}/{action.get_group_key_run_id}"
|
|
126
126
|
logger.info(log)
|
|
@@ -129,11 +129,9 @@ class Runner:
|
|
|
129
129
|
log = f"unknown action type: {action.action_type}"
|
|
130
130
|
logger.error(log)
|
|
131
131
|
|
|
132
|
-
def step_run_callback(
|
|
133
|
-
self, action: Action, action_task: "Task[TWorkflowInput, R]"
|
|
134
|
-
) -> Callable[[asyncio.Task[Any]], None]:
|
|
132
|
+
def step_run_callback(self, action: Action) -> Callable[[asyncio.Task[Any]], None]:
|
|
135
133
|
def inner_callback(task: asyncio.Task[Any]) -> None:
|
|
136
|
-
self.cleanup_run_id(action.
|
|
134
|
+
self.cleanup_run_id(action.key)
|
|
137
135
|
|
|
138
136
|
errored = False
|
|
139
137
|
cancelled = task.cancelled()
|
|
@@ -182,7 +180,7 @@ class Runner:
|
|
|
182
180
|
self, action: Action
|
|
183
181
|
) -> Callable[[asyncio.Task[Any]], None]:
|
|
184
182
|
def inner_callback(task: asyncio.Task[Any]) -> None:
|
|
185
|
-
self.cleanup_run_id(action.
|
|
183
|
+
self.cleanup_run_id(action.key)
|
|
186
184
|
|
|
187
185
|
errored = False
|
|
188
186
|
cancelled = task.cancelled()
|
|
@@ -227,9 +225,9 @@ class Runner:
|
|
|
227
225
|
self, ctx: Context, task: Task[TWorkflowInput, R], action: Action
|
|
228
226
|
) -> R:
|
|
229
227
|
if action.step_run_id:
|
|
230
|
-
self.threads[action.
|
|
228
|
+
self.threads[action.key] = current_thread()
|
|
231
229
|
elif action.get_group_key_run_id:
|
|
232
|
-
self.threads[action.
|
|
230
|
+
self.threads[action.key] = current_thread()
|
|
233
231
|
|
|
234
232
|
return task.call(ctx)
|
|
235
233
|
|
|
@@ -239,7 +237,6 @@ class Runner:
|
|
|
239
237
|
ctx: Context,
|
|
240
238
|
task: Task[TWorkflowInput, R],
|
|
241
239
|
action: Action,
|
|
242
|
-
run_id: str,
|
|
243
240
|
) -> R:
|
|
244
241
|
ctx_step_run_id.set(action.step_run_id)
|
|
245
242
|
ctx_workflow_run_id.set(action.workflow_run_id)
|
|
@@ -271,17 +268,17 @@ class Runner:
|
|
|
271
268
|
)
|
|
272
269
|
raise e
|
|
273
270
|
finally:
|
|
274
|
-
self.cleanup_run_id(
|
|
271
|
+
self.cleanup_run_id(action.key)
|
|
275
272
|
|
|
276
|
-
def cleanup_run_id(self,
|
|
277
|
-
if
|
|
278
|
-
del self.tasks[
|
|
273
|
+
def cleanup_run_id(self, key: ActionKey) -> None:
|
|
274
|
+
if key in self.tasks:
|
|
275
|
+
del self.tasks[key]
|
|
279
276
|
|
|
280
|
-
if
|
|
281
|
-
del self.threads[
|
|
277
|
+
if key in self.threads:
|
|
278
|
+
del self.threads[key]
|
|
282
279
|
|
|
283
|
-
if
|
|
284
|
-
del self.contexts[
|
|
280
|
+
if key in self.contexts:
|
|
281
|
+
del self.contexts[key]
|
|
285
282
|
|
|
286
283
|
@overload
|
|
287
284
|
def create_context(
|
|
@@ -321,7 +318,7 @@ class Runner:
|
|
|
321
318
|
action, True if action_func.is_durable else False
|
|
322
319
|
)
|
|
323
320
|
|
|
324
|
-
self.contexts[action.
|
|
321
|
+
self.contexts[action.key] = context
|
|
325
322
|
self.event_queue.put(
|
|
326
323
|
ActionEvent(
|
|
327
324
|
action=action,
|
|
@@ -333,13 +330,11 @@ class Runner:
|
|
|
333
330
|
|
|
334
331
|
loop = asyncio.get_event_loop()
|
|
335
332
|
task = loop.create_task(
|
|
336
|
-
self.async_wrapped_action_func(
|
|
337
|
-
context, action_func, action, action.step_run_id
|
|
338
|
-
)
|
|
333
|
+
self.async_wrapped_action_func(context, action_func, action)
|
|
339
334
|
)
|
|
340
335
|
|
|
341
|
-
task.add_done_callback(self.step_run_callback(action
|
|
342
|
-
self.tasks[action.
|
|
336
|
+
task.add_done_callback(self.step_run_callback(action))
|
|
337
|
+
self.tasks[action.key] = task
|
|
343
338
|
|
|
344
339
|
try:
|
|
345
340
|
await task
|
|
@@ -358,7 +353,7 @@ class Runner:
|
|
|
358
353
|
action_name = action.action_id
|
|
359
354
|
context = self.create_context(action)
|
|
360
355
|
|
|
361
|
-
self.contexts[action.
|
|
356
|
+
self.contexts[action.key] = context
|
|
362
357
|
|
|
363
358
|
# Find the corresponding action function from the registry
|
|
364
359
|
action_func = self.action_registry.get(action_name)
|
|
@@ -376,13 +371,11 @@ class Runner:
|
|
|
376
371
|
|
|
377
372
|
loop = asyncio.get_event_loop()
|
|
378
373
|
task = loop.create_task(
|
|
379
|
-
self.async_wrapped_action_func(
|
|
380
|
-
context, action_func, action, action.get_group_key_run_id
|
|
381
|
-
)
|
|
374
|
+
self.async_wrapped_action_func(context, action_func, action)
|
|
382
375
|
)
|
|
383
376
|
|
|
384
377
|
task.add_done_callback(self.group_key_run_callback(action))
|
|
385
|
-
self.tasks[action.
|
|
378
|
+
self.tasks[action.key] = task
|
|
386
379
|
|
|
387
380
|
try:
|
|
388
381
|
await task
|
|
@@ -421,35 +414,36 @@ class Runner:
|
|
|
421
414
|
logger.exception(f"Failed to terminate thread: {e}")
|
|
422
415
|
|
|
423
416
|
## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
|
|
424
|
-
async def handle_cancel_action(self,
|
|
417
|
+
async def handle_cancel_action(self, action: Action) -> None:
|
|
418
|
+
key = action.key
|
|
425
419
|
try:
|
|
426
420
|
# call cancel to signal the context to stop
|
|
427
|
-
if
|
|
428
|
-
context = self.contexts.get(
|
|
421
|
+
if key in self.contexts:
|
|
422
|
+
context = self.contexts.get(key)
|
|
429
423
|
|
|
430
424
|
if context:
|
|
431
|
-
|
|
425
|
+
context._set_cancellation_flag()
|
|
432
426
|
|
|
433
427
|
await asyncio.sleep(1)
|
|
434
428
|
|
|
435
|
-
if
|
|
436
|
-
future = self.tasks.get(
|
|
429
|
+
if key in self.tasks:
|
|
430
|
+
future = self.tasks.get(key)
|
|
437
431
|
|
|
438
432
|
if future:
|
|
439
433
|
future.cancel()
|
|
440
434
|
|
|
441
435
|
# check if thread is still running, if so, print a warning
|
|
442
|
-
if
|
|
443
|
-
thread = self.threads.get(
|
|
436
|
+
if key in self.threads:
|
|
437
|
+
thread = self.threads.get(key)
|
|
444
438
|
if thread and self.config.enable_force_kill_sync_threads:
|
|
445
439
|
self.force_kill_thread(thread)
|
|
446
440
|
await asyncio.sleep(1)
|
|
447
441
|
|
|
448
442
|
logger.warning(
|
|
449
|
-
f"Thread {self.threads[
|
|
443
|
+
f"Thread {self.threads[key].ident} with key {key} is still running after cancellation. This could cause the thread pool to get blocked and prevent new tasks from running."
|
|
450
444
|
)
|
|
451
445
|
finally:
|
|
452
|
-
self.cleanup_run_id(
|
|
446
|
+
self.cleanup_run_id(key)
|
|
453
447
|
|
|
454
448
|
def serialize_output(self, output: Any) -> str:
|
|
455
449
|
if isinstance(output, BaseModel):
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
hatchet_sdk/__init__.py,sha256=LUj6VyGVSHCYTQTaoyiVhjyJLOfv6gMCmb-s4hRyISM,10031
|
|
2
2
|
hatchet_sdk/client.py,sha256=tbOeMuaJmgpyYSQg8QUz_J4AdqRNvV9E0aEZpgsiZTE,2207
|
|
3
3
|
hatchet_sdk/clients/admin.py,sha256=zZKqjqjiq6_D2S5INuJBXdkyh5Owk65FMz6V6PC5Ta4,17692
|
|
4
|
-
hatchet_sdk/clients/dispatcher/action_listener.py,sha256=
|
|
4
|
+
hatchet_sdk/clients/dispatcher/action_listener.py,sha256=IwI2DKXTaPjqekZW8i6LXw4GOTtI2YdqR8PLUIhxooA,16955
|
|
5
5
|
hatchet_sdk/clients/dispatcher/dispatcher.py,sha256=IL-hDXG8Lzas9FieVuNr47E_3Gvpc-aL4Xu_l385Vp8,8140
|
|
6
6
|
hatchet_sdk/clients/event_ts.py,sha256=MudFszIb9IcPKQYvBTzcatPkcWEy3nxbAtEQ0_NYxMg,2094
|
|
7
|
-
hatchet_sdk/clients/events.py,sha256=
|
|
7
|
+
hatchet_sdk/clients/events.py,sha256=4l_xiBZN7R9sxK7PzPHRj6C7Mrg1xCP14AzkiQyzKZY,5483
|
|
8
8
|
hatchet_sdk/clients/listeners/durable_event_listener.py,sha256=jpqnbZsuouWk3XaOIYL9apaGtVk65eKKq66eBP9klBs,4085
|
|
9
9
|
hatchet_sdk/clients/listeners/pooled_listener.py,sha256=1rodfIeqmHRF-u-PB6cBJbOU8NrvToLTyGigJMydpGo,8496
|
|
10
10
|
hatchet_sdk/clients/listeners/run_event_listener.py,sha256=rIjBLRF7d7FBoEq7RKbmbOA84lX_hHSU26trwnthqV8,10230
|
|
@@ -221,7 +221,7 @@ hatchet_sdk/clients/v1/api_client.py,sha256=mJQUZ3cOxlFJiwWKK5F8jBxcpNZ7A2292Huc
|
|
|
221
221
|
hatchet_sdk/config.py,sha256=jJA76BOvVdfOQHy6TKclAvr2qyblcM-Pz5J-hVAdpQ4,3588
|
|
222
222
|
hatchet_sdk/connection.py,sha256=B5gT5NL9BBB5-l9U_cN6pMlraQk880rEYMnqaK_dgL0,2590
|
|
223
223
|
hatchet_sdk/context/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
224
|
-
hatchet_sdk/context/context.py,sha256=
|
|
224
|
+
hatchet_sdk/context/context.py,sha256=djzppHK3VINAtnrmPwLJhpIqzSMA5qKAVd6qB1r6gfM,9518
|
|
225
225
|
hatchet_sdk/context/worker_context.py,sha256=OVcEWvdT_Kpd0nlg61VAPUgIPSFzSLs0aSrXWj-1GX4,974
|
|
226
226
|
hatchet_sdk/contracts/dispatcher_pb2.py,sha256=wO-x-NiHjMuC55RArpcvQX7QAjpSrjq1n70mFVwOD0Q,14543
|
|
227
227
|
hatchet_sdk/contracts/dispatcher_pb2.pyi,sha256=iOcGfGtoyvmT58yCrQDvst_o0VPbq1-9rir-0jH_140,18559
|
|
@@ -254,14 +254,14 @@ hatchet_sdk/hatchet.py,sha256=kIRcvHiLgyOc1OY1uytNatUZ8qaYA4beqoKEiqFcRpE,21873
|
|
|
254
254
|
hatchet_sdk/labels.py,sha256=nATgxWE3lFxRTnfISEpoIRLGbMfAZsHF4lZTuG4Mfic,182
|
|
255
255
|
hatchet_sdk/logger.py,sha256=5uOr52T4mImSQm1QvWT8HvZFK5WfPNh3Y1cBQZRFgUQ,333
|
|
256
256
|
hatchet_sdk/metadata.py,sha256=XkRbhnghJJGCdVvF-uzyGBcNaTqpeQ3uiQvNNP1wyBc,107
|
|
257
|
-
hatchet_sdk/opentelemetry/instrumentor.py,sha256=
|
|
257
|
+
hatchet_sdk/opentelemetry/instrumentor.py,sha256=GbsMZ1c9s0VRE7wwq-Iz5U9sT9fl7ZdXJ_ddqOqO5B0,13815
|
|
258
258
|
hatchet_sdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
259
259
|
hatchet_sdk/rate_limit.py,sha256=TwbCuggiZaWpYuo4mjVLlE-z1OfQ2mRBiVvCSaG3lv4,3919
|
|
260
260
|
hatchet_sdk/runnables/contextvars.py,sha256=6MDocAMmlyiRW37oQ1jyx10tAlJs-xgDjR3xPoPz05g,426
|
|
261
261
|
hatchet_sdk/runnables/standalone.py,sha256=2D31qNL7IgStrW0bO_794VDhvLxgyrahZalrlEu17WQ,11739
|
|
262
262
|
hatchet_sdk/runnables/task.py,sha256=nZPclBbNaYRMlCLOYi91YU503MiTcGSp92Unh3_nKvs,7533
|
|
263
263
|
hatchet_sdk/runnables/types.py,sha256=hF_8kcSWIVXgYspp67RwSfi-CFZJFTbb8mL8BHR8tG8,4889
|
|
264
|
-
hatchet_sdk/runnables/workflow.py,sha256=
|
|
264
|
+
hatchet_sdk/runnables/workflow.py,sha256=mH6ZfrHbLVe9kNIUVnQYdivESvqA9G-a9d9qmJUjqwc,33562
|
|
265
265
|
hatchet_sdk/token.py,sha256=KjIiInwG5Kqd_FO4BSW1x_5Uc7PFbnzIVJqr50-ZldE,779
|
|
266
266
|
hatchet_sdk/utils/backoff.py,sha256=6B5Rb5nLKw_TqqgpJMYjIBV1PTTtbOMRZCveisVhg_I,353
|
|
267
267
|
hatchet_sdk/utils/proto_enums.py,sha256=0UybwE3s7TcqmzoQSO8YnhgAKOS8WZXsyPchB8-eksw,1247
|
|
@@ -502,11 +502,11 @@ hatchet_sdk/waits.py,sha256=L2xZUcmrQX-pTVXWv1W8suMoYU_eA0uowpollauQmOM,3893
|
|
|
502
502
|
hatchet_sdk/worker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
503
503
|
hatchet_sdk/worker/action_listener_process.py,sha256=KxS7-wBpfKnsq0LNSvk-MG442Lh60iQMy3VpD1FW3mU,11703
|
|
504
504
|
hatchet_sdk/worker/runner/run_loop_manager.py,sha256=RNWKDCjR57nJ0LCoLUMi0_3pnmpqyo80mz_RaxHYGIc,3812
|
|
505
|
-
hatchet_sdk/worker/runner/runner.py,sha256=
|
|
505
|
+
hatchet_sdk/worker/runner/runner.py,sha256=z8ri-viK_avAfF6zgbVNBc-rztFDbxSwng3RHsof92w,17063
|
|
506
506
|
hatchet_sdk/worker/runner/utils/capture_logs.py,sha256=nHRPSiDBqzhObM7i2X7t03OupVFnE7kQBdR2Ckgg-2w,2709
|
|
507
507
|
hatchet_sdk/worker/worker.py,sha256=AdkYo167jNsQ3CAaxuBcqA8YjmnKEu33Lp9TKK15amg,16168
|
|
508
508
|
hatchet_sdk/workflow_run.py,sha256=ZwH0HLFGFVXz6jbiqSv4w0Om2XuR52Tzzw6LH4y65jQ,2765
|
|
509
|
-
hatchet_sdk-1.6.
|
|
510
|
-
hatchet_sdk-1.6.
|
|
511
|
-
hatchet_sdk-1.6.
|
|
512
|
-
hatchet_sdk-1.6.
|
|
509
|
+
hatchet_sdk-1.6.5.dist-info/METADATA,sha256=UDOxt6AocsMoaP8zNZsU2DakoXFWDdKpz-20Ti6XjTI,3635
|
|
510
|
+
hatchet_sdk-1.6.5.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
|
511
|
+
hatchet_sdk-1.6.5.dist-info/entry_points.txt,sha256=Un_76pcLse-ZGBlwebhQpnTPyQrripeHW8J7qmEpGOk,1400
|
|
512
|
+
hatchet_sdk-1.6.5.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|